From efc6d8d7c01e8bc5990b85625537b3a0cfc82c48 Mon Sep 17 00:00:00 2001 From: Igor Velikorossov Date: Tue, 16 Feb 2021 10:39:02 +1100 Subject: [PATCH 001/413] Update Analyzer Samples.md --- docs/analyzers/Analyzer Samples.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/analyzers/Analyzer Samples.md b/docs/analyzers/Analyzer Samples.md index 6c6e9dc41689c..a81e15b93f452 100644 --- a/docs/analyzers/Analyzer Samples.md +++ b/docs/analyzers/Analyzer Samples.md @@ -18,17 +18,17 @@ Analyzers have been broadly categorized into the following three buckets based o **Contents:** Following sample analyzers, with simple unit tests, are provided: - 1. Stateless analyzers: + 1. [Stateless analyzers](https://github.com/dotnet/roslyn-sdk/tree/master/samples/CSharp/Analyzers/Analyzers.Implementation/StatelessAnalyzers): 1. SymbolAnalyzer: Analyzer for reporting symbol diagnostics. 2. SyntaxNodeAnalyzer: Analyzer for reporting syntax node diagnostics. 3. CodeBlockAnalyzer: Analyzer for reporting code block diagnostics. 4. CompilationAnalyzer: Analyzer for reporting compilation diagnostics. 5. SyntaxTreeAnalyzer: Analyzer for reporting syntax tree diagnostics. 6. SemanticModelAnalyzer: Analyzer for reporting syntax tree diagnostics, that require some semantic analysis. - 2. Stateful analyzers: + 2. [Stateful analyzers](https://github.com/dotnet/roslyn-sdk/tree/master/samples/CSharp/Analyzers/Analyzers.Implementation/StatefulAnalyzers): 1. CodeBlockStartedAnalyzer: Analyzer to demonstrate code block wide analysis. 2. CompilationStartedAnalyzer: Analyzer to demonstrate analysis within a compilation, for example analysis that depends on certain well-known symbol(s). 3. CompilationStartedAnalyzerWithCompilationWideAnalysis: Analyzer to demonstrate compilation-wide analysis. - 3. Additional File analyzers: + 3. [Additional File analyzers](https://github.com/dotnet/roslyn-sdk/tree/master/samples/CSharp/Analyzers/Analyzers.Implementation/AdditionalFileAnalyzers): 1. SimpleAdditionalFileAnalyzer: Demonstrates reading an additional file line-by-line and using the data in analysis. 2. XmlAdditionalFileAnalyzer: Demonstrates writing an additional file out to a `Stream` so that it can be read back as a structured document (in this case, XML). From 9c0e051464d6ec0b92291a867afb1a13d87e1b8d Mon Sep 17 00:00:00 2001 From: Sam Harwell Date: Fri, 2 Jul 2021 09:19:51 -0700 Subject: [PATCH 002/413] Add HashSet and supporting files These files are based on dotnet/runtime@556582d9 (v5.0.7). --- .../HashSet/ISet_Generic_Tests`1.cs | 617 +++++++ .../HashSet/SegmentedHashSet_Generic_Tests.cs | 199 +++ .../SegmentedHashSet_Generic_Tests`1.cs | 711 ++++++++ ...tedHashSet_IEnumerable_NonGeneric_Tests.cs | 60 + .../Collections/HashSet/TestingTypes.cs | 387 ++++ .../Collections/Internal/BitHelper.cs | 40 + .../SegmentedHashSetEqualityComparer`1.cs | 77 + ...crosoft.CodeAnalysis.Collections.projitems | 3 + .../Collections/SegmentedHashSet`1.cs | 1582 +++++++++++++++++ 9 files changed, 3676 insertions(+) create mode 100644 src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/ISet_Generic_Tests`1.cs create mode 100644 src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/SegmentedHashSet_Generic_Tests.cs create mode 100644 src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/SegmentedHashSet_Generic_Tests`1.cs create mode 100644 src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/SegmentedHashSet_IEnumerable_NonGeneric_Tests.cs create mode 100644 src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/TestingTypes.cs create mode 100644 src/Dependencies/Collections/Internal/BitHelper.cs create mode 100644 src/Dependencies/Collections/Internal/SegmentedHashSetEqualityComparer`1.cs create mode 100644 src/Dependencies/Collections/SegmentedHashSet`1.cs diff --git a/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/ISet_Generic_Tests`1.cs b/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/ISet_Generic_Tests`1.cs new file mode 100644 index 0000000000000..438b72e4ff746 --- /dev/null +++ b/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/ISet_Generic_Tests`1.cs @@ -0,0 +1,617 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System.Collections.Generic; +using System.Diagnostics; +using System.Linq; +using Xunit; + +namespace System.Collections.Tests +{ + /// + /// Contains tests that ensure the correctness of any class that implements the generic + /// ISet interface. + /// + /// Tests for an ISet follow a rather different structure because of the consistency in + /// function signatures. Instead of having a test for every data scenario within a class for + /// every set function, there is instead a test for every configuration of enumerable parameter. + /// Each of those tests calls a Validation function that calculates the expected result and then + /// compares it to the actual result of the set operation. + /// + public abstract class ISet_Generic_Tests : ICollection_Generic_Tests + { + #region ISet Helper methods + + /// + /// Creates an instance of an ISet{T} that can be used for testing. + /// + /// An instance of an ISet{T} that can be used for testing. + protected abstract ISet GenericISetFactory(); + + /// + /// Creates an instance of an ISet{T} that can be used for testing. + /// + /// The number of unique items that the returned ISet{T} contains. + /// An instance of an ISet{T} that can be used for testing. + protected virtual ISet GenericISetFactory(int count) + { + ISet collection = GenericISetFactory(); + AddToCollection(collection, count); + return collection; + } + + protected override void AddToCollection(ICollection collection, int numberOfItemsToAdd) + { + int seed = 9600; + ISet set = (ISet)collection; + while (set.Count < numberOfItemsToAdd) + { + T toAdd = CreateT(seed++); + while (set.Contains(toAdd) || (InvalidValues != Array.Empty() && InvalidValues.Contains(toAdd, GetIEqualityComparer()))) + toAdd = CreateT(seed++); + set.Add(toAdd); + } + } + + protected virtual int ISet_Large_Capacity => 1000; + + #endregion + + #region ICollection Helper Methods + + protected override ICollection GenericICollectionFactory() => GenericISetFactory(); + + protected override ICollection GenericICollectionFactory(int count) => GenericISetFactory(count); + + protected override bool DuplicateValuesAllowed => false; + protected override bool DefaultValueWhenNotAllowed_Throws => false; + + #endregion + + #region ICollection_Generic + + [Theory] + [MemberData(nameof(ValidCollectionSizes))] + public void ICollection_Generic_Add_ReturnValue(int count) + { + if (!IsReadOnly) + { + ISet set = GenericISetFactory(count); + int seed = 92834; + T newValue = CreateT(seed++); + while (set.Contains(newValue)) + newValue = CreateT(seed++); + Assert.True(set.Add(newValue)); + if (!DuplicateValuesAllowed) + Assert.False(set.Add(newValue)); + Assert.Equal(count + 1, set.Count); + Assert.True(set.Contains(newValue)); + } + } + + [Theory] + [MemberData(nameof(ValidCollectionSizes))] + public void ICollection_Generic_Add_DuplicateValue_DoesNothing(int count) + { + if (!IsReadOnly) + { + if (!DuplicateValuesAllowed) + { + ICollection collection = GenericICollectionFactory(count); + int seed = 800; + T duplicateValue = CreateT(seed++); + while (collection.Contains(duplicateValue)) + duplicateValue = CreateT(seed++); + collection.Add(duplicateValue); + collection.Add(duplicateValue); + Assert.Equal(count + 1, collection.Count); + } + } + } + + #endregion + + #region Set Function Validation + + private void Validate_ExceptWith(ISet set, IEnumerable enumerable) + { + if (set.Count == 0 || enumerable == set) + { + set.ExceptWith(enumerable); + Assert.Equal(0, set.Count); + } + else + { + HashSet expected = new HashSet(set, GetIEqualityComparer()); + foreach (T element in enumerable) + expected.Remove(element); + set.ExceptWith(enumerable); + Assert.Equal(expected.Count, set.Count); + Assert.True(expected.SetEquals(set)); + } + } + + private void Validate_IntersectWith(ISet set, IEnumerable enumerable) + { + if (set.Count == 0 || Enumerable.Count(enumerable) == 0) + { + set.IntersectWith(enumerable); + Assert.Equal(0, set.Count); + } + else if (set == enumerable) + { + HashSet beforeOperation = new HashSet(set, GetIEqualityComparer()); + set.IntersectWith(enumerable); + Assert.True(beforeOperation.SetEquals(set)); + } + else + { + IEqualityComparer comparer = GetIEqualityComparer(); + HashSet expected = new HashSet(comparer); + foreach (T value in set) + if (enumerable.Contains(value, comparer)) + expected.Add(value); + set.IntersectWith(enumerable); + Assert.Equal(expected.Count, set.Count); + Assert.True(expected.SetEquals(set)); + } + } + + private void Validate_IsProperSubsetOf(ISet set, IEnumerable enumerable) + { + bool setContainsValueNotInEnumerable = false; + bool enumerableContainsValueNotInSet = false; + IEqualityComparer comparer = GetIEqualityComparer(); + foreach (T value in set) // Every value in Set must be in Enumerable + { + if (!enumerable.Contains(value, comparer)) + { + setContainsValueNotInEnumerable = true; + break; + } + } + foreach (T value in enumerable) // Enumerable must contain at least one value not in Set + { + if (!set.Contains(value, comparer)) + { + enumerableContainsValueNotInSet = true; + break; + } + } + Assert.Equal(!setContainsValueNotInEnumerable && enumerableContainsValueNotInSet, set.IsProperSubsetOf(enumerable)); + } + + private void Validate_IsProperSupersetOf(ISet set, IEnumerable enumerable) + { + bool isProperSuperset = true; + bool setContainsElementsNotInEnumerable = false; + IEqualityComparer comparer = GetIEqualityComparer(); + foreach (T value in enumerable) + { + if (!set.Contains(value, comparer)) + { + isProperSuperset = false; + break; + } + } + foreach (T value in set) + { + if (!enumerable.Contains(value, comparer)) + { + setContainsElementsNotInEnumerable = true; + break; + } + } + isProperSuperset = isProperSuperset && setContainsElementsNotInEnumerable; + Assert.Equal(isProperSuperset, set.IsProperSupersetOf(enumerable)); + } + + private void Validate_IsSubsetOf(ISet set, IEnumerable enumerable) + { + IEqualityComparer comparer = GetIEqualityComparer(); + foreach (T value in set) + if (!enumerable.Contains(value, comparer)) + { + Assert.False(set.IsSubsetOf(enumerable)); + return; + } + Assert.True(set.IsSubsetOf(enumerable)); + } + + private void Validate_IsSupersetOf(ISet set, IEnumerable enumerable) + { + IEqualityComparer comparer = GetIEqualityComparer(); + foreach (T value in enumerable) + if (!set.Contains(value, comparer)) + { + Assert.False(set.IsSupersetOf(enumerable)); + return; + } + Assert.True(set.IsSupersetOf(enumerable)); + } + + private void Validate_Overlaps(ISet set, IEnumerable enumerable) + { + IEqualityComparer comparer = GetIEqualityComparer(); + foreach (T value in enumerable) + { + if (set.Contains(value, comparer)) + { + Assert.True(set.Overlaps(enumerable)); + return; + } + } + Assert.False(set.Overlaps(enumerable)); + } + + private void Validate_SetEquals(ISet set, IEnumerable enumerable) + { + IEqualityComparer comparer = GetIEqualityComparer(); + foreach (T value in set) + { + if (!enumerable.Contains(value, comparer)) + { + Assert.False(set.SetEquals(enumerable)); + return; + } + } + foreach (T value in enumerable) + { + if (!set.Contains(value, comparer)) + { + Assert.False(set.SetEquals(enumerable)); + return; + } + } + Assert.True(set.SetEquals(enumerable)); + } + + private void Validate_SymmetricExceptWith(ISet set, IEnumerable enumerable) + { + IEqualityComparer comparer = GetIEqualityComparer(); + HashSet expected = new HashSet(comparer); + foreach (T element in enumerable) + if (!set.Contains(element, comparer)) + expected.Add(element); + foreach (T element in set) + if (!enumerable.Contains(element, comparer)) + expected.Add(element); + set.SymmetricExceptWith(enumerable); + Assert.Equal(expected.Count, set.Count); + Assert.True(expected.SetEquals(set)); + } + + private void Validate_UnionWith(ISet set, IEnumerable enumerable) + { + IEqualityComparer comparer = GetIEqualityComparer(); + HashSet expected = new HashSet(set, comparer); + foreach (T element in enumerable) + if (!set.Contains(element, comparer)) + expected.Add(element); + set.UnionWith(enumerable); + Assert.Equal(expected.Count, set.Count); + Assert.True(expected.SetEquals(set)); + } + + #endregion + + #region Set Function tests + + [Theory] + [MemberData(nameof(ValidCollectionSizes))] + public void ISet_Generic_NullEnumerableArgument(int count) + { + ISet set = GenericISetFactory(count); + Assert.Throws(() => set.ExceptWith(null)); + Assert.Throws(() => set.IntersectWith(null)); + Assert.Throws(() => set.IsProperSubsetOf(null)); + Assert.Throws(() => set.IsProperSupersetOf(null)); + Assert.Throws(() => set.IsSubsetOf(null)); + Assert.Throws(() => set.IsSupersetOf(null)); + Assert.Throws(() => set.Overlaps(null)); + Assert.Throws(() => set.SetEquals(null)); + Assert.Throws(() => set.SymmetricExceptWith(null)); + Assert.Throws(() => set.UnionWith(null)); + } + + [Theory] + [MemberData(nameof(EnumerableTestData))] + public void ISet_Generic_ExceptWith(EnumerableType enumerableType, int setLength, int enumerableLength, int numberOfMatchingElements, int numberOfDuplicateElements) + { + ISet set = GenericISetFactory(setLength); + IEnumerable enumerable = CreateEnumerable(enumerableType, set, enumerableLength, numberOfMatchingElements, numberOfDuplicateElements); + Validate_ExceptWith(set, enumerable); + } + + [Theory] + [MemberData(nameof(EnumerableTestData))] + public void ISet_Generic_IntersectWith(EnumerableType enumerableType, int setLength, int enumerableLength, int numberOfMatchingElements, int numberOfDuplicateElements) + { + ISet set = GenericISetFactory(setLength); + IEnumerable enumerable = CreateEnumerable(enumerableType, set, enumerableLength, numberOfMatchingElements, numberOfDuplicateElements); + Validate_IntersectWith(set, enumerable); + } + + [Theory] + [MemberData(nameof(EnumerableTestData))] + public void ISet_Generic_IsProperSubsetOf(EnumerableType enumerableType, int setLength, int enumerableLength, int numberOfMatchingElements, int numberOfDuplicateElements) + { + ISet set = GenericISetFactory(setLength); + IEnumerable enumerable = CreateEnumerable(enumerableType, set, enumerableLength, numberOfMatchingElements, numberOfDuplicateElements); + Validate_IsProperSubsetOf(set, enumerable); + } + + [Theory] + [MemberData(nameof(EnumerableTestData))] + public void ISet_Generic_IsProperSupersetOf(EnumerableType enumerableType, int setLength, int enumerableLength, int numberOfMatchingElements, int numberOfDuplicateElements) + { + ISet set = GenericISetFactory(setLength); + IEnumerable enumerable = CreateEnumerable(enumerableType, set, enumerableLength, numberOfMatchingElements, numberOfDuplicateElements); + Validate_IsProperSupersetOf(set, enumerable); + } + + [Theory] + [MemberData(nameof(EnumerableTestData))] + public void ISet_Generic_IsSubsetOf(EnumerableType enumerableType, int setLength, int enumerableLength, int numberOfMatchingElements, int numberOfDuplicateElements) + { + ISet set = GenericISetFactory(setLength); + IEnumerable enumerable = CreateEnumerable(enumerableType, set, enumerableLength, numberOfMatchingElements, numberOfDuplicateElements); + Validate_IsSubsetOf(set, enumerable); + } + + [Theory] + [MemberData(nameof(EnumerableTestData))] + public void ISet_Generic_IsSupersetOf(EnumerableType enumerableType, int setLength, int enumerableLength, int numberOfMatchingElements, int numberOfDuplicateElements) + { + ISet set = GenericISetFactory(setLength); + IEnumerable enumerable = CreateEnumerable(enumerableType, set, enumerableLength, numberOfMatchingElements, numberOfDuplicateElements); + Validate_IsSupersetOf(set, enumerable); + } + + [Theory] + [MemberData(nameof(EnumerableTestData))] + public void ISet_Generic_Overlaps(EnumerableType enumerableType, int setLength, int enumerableLength, int numberOfMatchingElements, int numberOfDuplicateElements) + { + ISet set = GenericISetFactory(setLength); + IEnumerable enumerable = CreateEnumerable(enumerableType, set, enumerableLength, numberOfMatchingElements, numberOfDuplicateElements); + Validate_Overlaps(set, enumerable); + } + + [Theory] + [MemberData(nameof(EnumerableTestData))] + public void ISet_Generic_SetEquals(EnumerableType enumerableType, int setLength, int enumerableLength, int numberOfMatchingElements, int numberOfDuplicateElements) + { + ISet set = GenericISetFactory(setLength); + IEnumerable enumerable = CreateEnumerable(enumerableType, set, enumerableLength, numberOfMatchingElements, numberOfDuplicateElements); + Validate_SetEquals(set, enumerable); + } + + [Theory] + [MemberData(nameof(EnumerableTestData))] + public void ISet_Generic_SymmetricExceptWith(EnumerableType enumerableType, int setLength, int enumerableLength, int numberOfMatchingElements, int numberOfDuplicateElements) + { + ISet set = GenericISetFactory(setLength); + IEnumerable enumerable = CreateEnumerable(enumerableType, set, enumerableLength, numberOfMatchingElements, numberOfDuplicateElements); + Validate_SymmetricExceptWith(set, enumerable); + } + + [Theory] + [MemberData(nameof(EnumerableTestData))] + public void ISet_Generic_UnionWith(EnumerableType enumerableType, int setLength, int enumerableLength, int numberOfMatchingElements, int numberOfDuplicateElements) + { + ISet set = GenericISetFactory(setLength); + IEnumerable enumerable = CreateEnumerable(enumerableType, set, enumerableLength, numberOfMatchingElements, numberOfDuplicateElements); + Validate_UnionWith(set, enumerable); + } + + #endregion + + #region Set Function tests on itself + + [Theory] + [MemberData(nameof(ValidCollectionSizes))] + public void ISet_Generic_ExceptWith_Itself(int setLength) + { + ISet set = GenericISetFactory(setLength); + Validate_ExceptWith(set, set); + } + + [Theory] + [MemberData(nameof(ValidCollectionSizes))] + [SkipOnTargetFramework(TargetFrameworkMonikers.NetFramework, ".NET Framework throws InvalidOperationException")] + public void ISet_Generic_IntersectWith_Itself(int setLength) + { + ISet set = GenericISetFactory(setLength); + Validate_IntersectWith(set, set); + } + + [Theory] + [MemberData(nameof(ValidCollectionSizes))] + public void ISet_Generic_IsProperSubsetOf_Itself(int setLength) + { + ISet set = GenericISetFactory(setLength); + Validate_IsProperSubsetOf(set, set); + } + + [Theory] + [MemberData(nameof(ValidCollectionSizes))] + public void ISet_Generic_IsProperSupersetOf_Itself(int setLength) + { + ISet set = GenericISetFactory(setLength); + Validate_IsProperSupersetOf(set, set); + } + + [Theory] + [MemberData(nameof(ValidCollectionSizes))] + public void ISet_Generic_IsSubsetOf_Itself(int setLength) + { + ISet set = GenericISetFactory(setLength); + Validate_IsSubsetOf(set, set); + } + + [Theory] + [MemberData(nameof(ValidCollectionSizes))] + public void ISet_Generic_IsSupersetOf_Itself(int setLength) + { + ISet set = GenericISetFactory(setLength); + Validate_IsSupersetOf(set, set); + } + + [Theory] + [MemberData(nameof(ValidCollectionSizes))] + public void ISet_Generic_Overlaps_Itself(int setLength) + { + ISet set = GenericISetFactory(setLength); + Validate_Overlaps(set, set); + } + + [Theory] + [MemberData(nameof(ValidCollectionSizes))] + public void ISet_Generic_SetEquals_Itself(int setLength) + { + ISet set = GenericISetFactory(setLength); + Assert.True(set.SetEquals(set)); + } + + [Theory] + [MemberData(nameof(ValidCollectionSizes))] + public void ISet_Generic_SymmetricExceptWith_Itself(int setLength) + { + ISet set = GenericISetFactory(setLength); + Validate_SymmetricExceptWith(set, set); + } + + [Theory] + [MemberData(nameof(ValidCollectionSizes))] + public void ISet_Generic_UnionWith_Itself(int setLength) + { + ISet set = GenericISetFactory(setLength); + Validate_UnionWith(set, set); + } + + #endregion + + #region Set Function tests on a large Set + + [Fact] + [OuterLoop] + public void ISet_Generic_ExceptWith_LargeSet() + { + ISet set = GenericISetFactory(ISet_Large_Capacity); + IEnumerable enumerable = CreateEnumerable(EnumerableType.List, set, 150, 0, 0); + Validate_ExceptWith(set, enumerable); + } + + [Fact] + [OuterLoop] + public void ISet_Generic_IntersectWith_LargeSet() + { + ISet set = GenericISetFactory(ISet_Large_Capacity); + IEnumerable enumerable = CreateEnumerable(EnumerableType.List, set, 150, 0, 0); + Validate_IntersectWith(set, enumerable); + } + + [Fact] + [OuterLoop] + public void ISet_Generic_IsProperSubsetOf_LargeSet() + { + ISet set = GenericISetFactory(ISet_Large_Capacity); + IEnumerable enumerable = CreateEnumerable(EnumerableType.List, set, 150, 0, 0); + Validate_IsProperSubsetOf(set, enumerable); + } + + [Fact] + [OuterLoop] + public void ISet_Generic_IsProperSupersetOf_LargeSet() + { + ISet set = GenericISetFactory(ISet_Large_Capacity); + IEnumerable enumerable = CreateEnumerable(EnumerableType.List, set, 150, 0, 0); + Validate_IsProperSupersetOf(set, enumerable); + } + + [Fact] + [OuterLoop] + public void ISet_Generic_IsSubsetOf_LargeSet() + { + ISet set = GenericISetFactory(ISet_Large_Capacity); + IEnumerable enumerable = CreateEnumerable(EnumerableType.List, set, 150, 0, 0); + Validate_IsSubsetOf(set, enumerable); + } + + [Fact] + [OuterLoop] + public void ISet_Generic_IsSupersetOf_LargeSet() + { + ISet set = GenericISetFactory(ISet_Large_Capacity); + IEnumerable enumerable = CreateEnumerable(EnumerableType.List, set, 150, 0, 0); + Validate_IsSupersetOf(set, enumerable); + } + + [Fact] + [OuterLoop] + public void ISet_Generic_Overlaps_LargeSet() + { + ISet set = GenericISetFactory(ISet_Large_Capacity); + IEnumerable enumerable = CreateEnumerable(EnumerableType.List, set, 150, 0, 0); + Validate_Overlaps(set, enumerable); + } + + [Fact] + [OuterLoop] + public void ISet_Generic_SetEquals_LargeSet() + { + ISet set = GenericISetFactory(ISet_Large_Capacity); + IEnumerable enumerable = CreateEnumerable(EnumerableType.List, set, 150, 0, 0); + Validate_SetEquals(set, enumerable); + } + + [Fact] + [OuterLoop] + public void ISet_Generic_SymmetricExceptWith_LargeSet() + { + ISet set = GenericISetFactory(ISet_Large_Capacity); + IEnumerable enumerable = CreateEnumerable(EnumerableType.List, set, 150, 0, 0); + Validate_SymmetricExceptWith(set, enumerable); + } + + [Fact] + [OuterLoop] + public void ISet_Generic_UnionWith_LargeSet() + { + ISet set = GenericISetFactory(ISet_Large_Capacity); + IEnumerable enumerable = CreateEnumerable(EnumerableType.List, set, 150, 0, 0); + Validate_UnionWith(set, enumerable); + } + + #endregion + + #region Other misc ISet test Scenarios + + [Theory] + [MemberData(nameof(EnumerableTestData))] + public void ISet_Generic_SymmetricExceptWith_AfterRemovingElements(EnumerableType enumerableType, int setLength, int enumerableLength, int numberOfMatchingElements, int numberOfDuplicateElements) + { + ISet set = GenericISetFactory(setLength); + T value = CreateT(532); + if (!set.Contains(value)) + set.Add(value); + set.Remove(value); + IEnumerable enumerable = CreateEnumerable(enumerableType, set, enumerableLength, numberOfMatchingElements, numberOfDuplicateElements); + Debug.Assert(enumerable != null); + + IEqualityComparer comparer = GetIEqualityComparer(); + HashSet expected = new HashSet(comparer); + foreach (T element in enumerable) + if (!set.Contains(element, comparer)) + expected.Add(element); + foreach (T element in set) + if (!enumerable.Contains(element, comparer)) + expected.Add(element); + set.SymmetricExceptWith(enumerable); + Assert.Equal(expected.Count, set.Count); + Assert.True(expected.SetEquals(set)); + } + + #endregion + } +} diff --git a/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/SegmentedHashSet_Generic_Tests.cs b/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/SegmentedHashSet_Generic_Tests.cs new file mode 100644 index 0000000000000..499732281ea97 --- /dev/null +++ b/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/SegmentedHashSet_Generic_Tests.cs @@ -0,0 +1,199 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System.Collections.Generic; +using Xunit; + +namespace System.Collections.Tests +{ + public class HashSet_Generic_Tests_string : HashSet_Generic_Tests + { + protected override string CreateT(int seed) + { + int stringLength = seed % 10 + 5; + Random rand = new Random(seed); + byte[] bytes = new byte[stringLength]; + rand.NextBytes(bytes); + return Convert.ToBase64String(bytes); + } + } + + public class HashSet_Generic_Tests_int : HashSet_Generic_Tests + { + protected override int CreateT(int seed) + { + Random rand = new Random(seed); + return rand.Next(); + } + + protected override bool DefaultValueAllowed => true; + } + + public class HashSet_Generic_Tests_int_With_Comparer_WrapStructural_Int : HashSet_Generic_Tests + { + protected override IEqualityComparer GetIEqualityComparer() + { + return new WrapStructural_Int(); + } + + protected override IComparer GetIComparer() + { + return new WrapStructural_Int(); + } + + protected override int CreateT(int seed) + { + Random rand = new Random(seed); + return rand.Next(); + } + + protected override ISet GenericISetFactory() + { + return new HashSet(new WrapStructural_Int()); + } + } + + public class HashSet_Generic_Tests_int_With_Comparer_WrapStructural_SimpleInt : HashSet_Generic_Tests + { + protected override IEqualityComparer GetIEqualityComparer() + { + return new WrapStructural_SimpleInt(); + } + + protected override IComparer GetIComparer() + { + return new WrapStructural_SimpleInt(); + } + + protected override SimpleInt CreateT(int seed) + { + Random rand = new Random(seed); + return new SimpleInt(rand.Next()); + } + + protected override ISet GenericISetFactory() + { + return new HashSet(new WrapStructural_SimpleInt()); + } + } + + [OuterLoop] + public class HashSet_Generic_Tests_EquatableBackwardsOrder : HashSet_Generic_Tests + { + protected override EquatableBackwardsOrder CreateT(int seed) + { + Random rand = new Random(seed); + return new EquatableBackwardsOrder(rand.Next()); + } + + protected override ISet GenericISetFactory() + { + return new HashSet(); + } + } + + [OuterLoop] + public class HashSet_Generic_Tests_int_With_Comparer_SameAsDefaultComparer : HashSet_Generic_Tests + { + protected override IEqualityComparer GetIEqualityComparer() + { + return new Comparer_SameAsDefaultComparer(); + } + + protected override int CreateT(int seed) + { + Random rand = new Random(seed); + return rand.Next(); + } + + protected override ISet GenericISetFactory() + { + return new HashSet(new Comparer_SameAsDefaultComparer()); + } + } + + [OuterLoop] + public class HashSet_Generic_Tests_int_With_Comparer_HashCodeAlwaysReturnsZero : HashSet_Generic_Tests + { + protected override IEqualityComparer GetIEqualityComparer() + { + return new Comparer_HashCodeAlwaysReturnsZero(); + } + + protected override int CreateT(int seed) + { + Random rand = new Random(seed); + return rand.Next(); + } + + protected override ISet GenericISetFactory() + { + return new HashSet(new Comparer_HashCodeAlwaysReturnsZero()); + } + } + + [OuterLoop] + public class HashSet_Generic_Tests_int_With_Comparer_ModOfInt : HashSet_Generic_Tests + { + protected override IEqualityComparer GetIEqualityComparer() + { + return new Comparer_ModOfInt(15000); + } + + protected override IComparer GetIComparer() + { + return new Comparer_ModOfInt(15000); + } + + protected override int CreateT(int seed) + { + Random rand = new Random(seed); + return rand.Next(); + } + + protected override ISet GenericISetFactory() + { + return new HashSet(new Comparer_ModOfInt(15000)); + } + } + + [OuterLoop] + public class HashSet_Generic_Tests_int_With_Comparer_AbsOfInt : HashSet_Generic_Tests + { + protected override IEqualityComparer GetIEqualityComparer() + { + return new Comparer_AbsOfInt(); + } + + protected override int CreateT(int seed) + { + Random rand = new Random(seed); + return rand.Next(); + } + + protected override ISet GenericISetFactory() + { + return new HashSet(new Comparer_AbsOfInt()); + } + } + + [OuterLoop] + public class HashSet_Generic_Tests_int_With_Comparer_BadIntEqualityComparer : HashSet_Generic_Tests + { + protected override IEqualityComparer GetIEqualityComparer() + { + return new BadIntEqualityComparer(); + } + + protected override int CreateT(int seed) + { + Random rand = new Random(seed); + return rand.Next(); + } + + protected override ISet GenericISetFactory() + { + return new HashSet(new BadIntEqualityComparer()); + } + } +} diff --git a/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/SegmentedHashSet_Generic_Tests`1.cs b/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/SegmentedHashSet_Generic_Tests`1.cs new file mode 100644 index 0000000000000..c82ca3938463a --- /dev/null +++ b/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/SegmentedHashSet_Generic_Tests`1.cs @@ -0,0 +1,711 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Runtime.Serialization.Formatters.Binary; +using Xunit; + +namespace System.Collections.Tests +{ + /// + /// Contains tests that ensure the correctness of the HashSet class. + /// + public abstract class HashSet_Generic_Tests : ISet_Generic_Tests + { + #region ISet Helper Methods + + protected override bool ResetImplemented => true; + + protected override ModifyOperation ModifyEnumeratorThrows => PlatformDetection.IsNetFramework ? base.ModifyEnumeratorThrows : (base.ModifyEnumeratorAllowed & ~(ModifyOperation.Remove | ModifyOperation.Clear)); + + protected override ModifyOperation ModifyEnumeratorAllowed => PlatformDetection.IsNetFramework ? base.ModifyEnumeratorAllowed : ModifyOperation.Overwrite | ModifyOperation.Remove | ModifyOperation.Clear; + + protected override ISet GenericISetFactory() + { + return new HashSet(); + } + + #endregion + + #region Constructors + + private static IEnumerable NonSquares(int limit) + { + for (int i = 0; i != limit; ++i) + { + int root = (int)Math.Sqrt(i); + if (i != root * root) + yield return i; + } + } + + [Fact] + public void HashSet_Generic_Constructor() + { + HashSet set = new HashSet(); + Assert.Empty(set); + } + + [Fact] + public void HashSet_Generic_Constructor_IEqualityComparer() + { + IEqualityComparer comparer = GetIEqualityComparer(); + HashSet set = new HashSet(comparer); + if (comparer == null) + Assert.Equal(EqualityComparer.Default, set.Comparer); + else + Assert.Equal(comparer, set.Comparer); + } + + [Fact] + public void HashSet_Generic_Constructor_NullIEqualityComparer() + { + IEqualityComparer comparer = null; + HashSet set = new HashSet(comparer); + if (comparer == null) + Assert.Equal(EqualityComparer.Default, set.Comparer); + else + Assert.Equal(comparer, set.Comparer); + } + + [Theory] + [MemberData(nameof(EnumerableTestData))] + public void HashSet_Generic_Constructor_IEnumerable(EnumerableType enumerableType, int setLength, int enumerableLength, int numberOfMatchingElements, int numberOfDuplicateElements) + { + _ = setLength; + _ = numberOfMatchingElements; + IEnumerable enumerable = CreateEnumerable(enumerableType, null, enumerableLength, 0, numberOfDuplicateElements); + HashSet set = new HashSet(enumerable); + Assert.True(set.SetEquals(enumerable)); + } + + [Theory] + [MemberData(nameof(ValidCollectionSizes))] + public void HashSet_Generic_Constructor_IEnumerable_WithManyDuplicates(int count) + { + IEnumerable items = CreateEnumerable(EnumerableType.List, null, count, 0, 0); + HashSet hashSetFromDuplicates = new HashSet(Enumerable.Range(0, 40).SelectMany(i => items).ToArray()); + HashSet hashSetFromNoDuplicates = new HashSet(items); + Assert.True(hashSetFromNoDuplicates.SetEquals(hashSetFromDuplicates)); + } + + [Theory] + [MemberData(nameof(ValidCollectionSizes))] + public void HashSet_Generic_Constructor_HashSet_SparselyFilled(int count) + { + HashSet source = (HashSet)CreateEnumerable(EnumerableType.HashSet, null, count, 0, 0); + List sourceElements = source.ToList(); + foreach (int i in NonSquares(count)) + source.Remove(sourceElements[i]);// Unevenly spaced survivors increases chance of catching any spacing-related bugs. + + + HashSet set = new HashSet(source, GetIEqualityComparer()); + Assert.True(set.SetEquals(source)); + } + + [Fact] + public void HashSet_Generic_Constructor_IEnumerable_Null() + { + Assert.Throws(() => new HashSet((IEnumerable)null)); + Assert.Throws(() => new HashSet((IEnumerable)null, EqualityComparer.Default)); + } + + [Theory] + [MemberData(nameof(EnumerableTestData))] + public void HashSet_Generic_Constructor_IEnumerable_IEqualityComparer(EnumerableType enumerableType, int setLength, int enumerableLength, int numberOfMatchingElements, int numberOfDuplicateElements) + { + _ = setLength; + _ = numberOfMatchingElements; + _ = numberOfDuplicateElements; + IEnumerable enumerable = CreateEnumerable(enumerableType, null, enumerableLength, 0, 0); + HashSet set = new HashSet(enumerable, GetIEqualityComparer()); + Assert.True(set.SetEquals(enumerable)); + } + + #endregion + + #region RemoveWhere + + [Theory] + [MemberData(nameof(ValidCollectionSizes))] + public void HashSet_Generic_RemoveWhere_AllElements(int setLength) + { + HashSet set = (HashSet)GenericISetFactory(setLength); + int removedCount = set.RemoveWhere((value) => { return true; }); + Assert.Equal(setLength, removedCount); + } + + [Theory] + [MemberData(nameof(ValidCollectionSizes))] + public void HashSet_Generic_RemoveWhere_NoElements(int setLength) + { + HashSet set = (HashSet)GenericISetFactory(setLength); + int removedCount = set.RemoveWhere((value) => { return false; }); + Assert.Equal(0, removedCount); + Assert.Equal(setLength, set.Count); + } + + [Fact] + public void HashSet_Generic_RemoveWhere_NewObject() // Regression Dev10_624201 + { + object[] array = new object[2]; + object obj = new object(); + HashSet set = new HashSet(); + + set.Add(obj); + set.Remove(obj); + foreach (object o in set) { } + set.CopyTo(array, 0, 2); + set.RemoveWhere((element) => { return false; }); + } + + [Theory] + [MemberData(nameof(ValidCollectionSizes))] + public void HashSet_Generic_RemoveWhere_NullMatchPredicate(int setLength) + { + HashSet set = (HashSet)GenericISetFactory(setLength); + Assert.Throws(() => set.RemoveWhere(null)); + } + + #endregion + + #region TrimExcess + + [Theory] + [MemberData(nameof(ValidCollectionSizes))] + public void HashSet_Generic_TrimExcess_OnValidSetThatHasntBeenRemovedFrom(int setLength) + { + HashSet set = (HashSet)GenericISetFactory(setLength); + set.TrimExcess(); + } + + [Theory] + [MemberData(nameof(ValidCollectionSizes))] + public void HashSet_Generic_TrimExcess_Repeatedly(int setLength) + { + HashSet set = (HashSet)GenericISetFactory(setLength); + List expected = set.ToList(); + set.TrimExcess(); + set.TrimExcess(); + set.TrimExcess(); + Assert.True(set.SetEquals(expected)); + } + + [Theory] + [MemberData(nameof(ValidCollectionSizes))] + public void HashSet_Generic_TrimExcess_AfterRemovingOneElement(int setLength) + { + if (setLength > 0) + { + HashSet set = (HashSet)GenericISetFactory(setLength); + List expected = set.ToList(); + T elementToRemove = set.ElementAt(0); + + set.TrimExcess(); + Assert.True(set.Remove(elementToRemove)); + expected.Remove(elementToRemove); + set.TrimExcess(); + + Assert.True(set.SetEquals(expected)); + } + } + + [Theory] + [MemberData(nameof(ValidCollectionSizes))] + public void HashSet_Generic_TrimExcess_AfterClearingAndAddingSomeElementsBack(int setLength) + { + if (setLength > 0) + { + HashSet set = (HashSet)GenericISetFactory(setLength); + set.TrimExcess(); + set.Clear(); + set.TrimExcess(); + Assert.Equal(0, set.Count); + + AddToCollection(set, setLength / 10); + set.TrimExcess(); + Assert.Equal(setLength / 10, set.Count); + } + } + + [Theory] + [MemberData(nameof(ValidCollectionSizes))] + public void HashSet_Generic_TrimExcess_AfterClearingAndAddingAllElementsBack(int setLength) + { + if (setLength > 0) + { + HashSet set = (HashSet)GenericISetFactory(setLength); + set.TrimExcess(); + set.Clear(); + set.TrimExcess(); + Assert.Equal(0, set.Count); + + AddToCollection(set, setLength); + set.TrimExcess(); + Assert.Equal(setLength, set.Count); + } + } + + #endregion + + #region CopyTo + + [Theory] + [MemberData(nameof(ValidCollectionSizes))] + public void HashSet_Generic_CopyTo_NegativeCount_ThrowsArgumentOutOfRangeException(int count) + { + HashSet set = (HashSet)GenericISetFactory(count); + T[] arr = new T[count]; + Assert.Throws(() => set.CopyTo(arr, 0, -1)); + Assert.Throws(() => set.CopyTo(arr, 0, int.MinValue)); + } + + [Theory] + [MemberData(nameof(ValidCollectionSizes))] + public void HashSet_Generic_CopyTo_NoIndexDefaultsToZero(int count) + { + HashSet set = (HashSet)GenericISetFactory(count); + T[] arr1 = new T[count]; + T[] arr2 = new T[count]; + set.CopyTo(arr1); + set.CopyTo(arr2, 0); + Assert.True(arr1.SequenceEqual(arr2)); + } + + #endregion + + #region CreateSetComparer + + [Fact] + public void SetComparer_SetEqualsTests() + { + List objects = new List() { CreateT(1), CreateT(2), CreateT(3), CreateT(4), CreateT(5), CreateT(6) }; + + var set = new HashSet>() + { + new HashSet { objects[0], objects[1], objects[2] }, + new HashSet { objects[3], objects[4], objects[5] } + }; + + var noComparerSet = new HashSet>() + { + new HashSet { objects[0], objects[1], objects[2] }, + new HashSet { objects[3], objects[4], objects[5] } + }; + + var comparerSet1 = new HashSet>(HashSet.CreateSetComparer()) + { + new HashSet { objects[0], objects[1], objects[2] }, + new HashSet { objects[3], objects[4], objects[5] } + }; + + var comparerSet2 = new HashSet>(HashSet.CreateSetComparer()) + { + new HashSet { objects[3], objects[4], objects[5] }, + new HashSet { objects[0], objects[1], objects[2] } + }; + + Assert.False(noComparerSet.SetEquals(set)); + Assert.True(comparerSet1.SetEquals(set)); + Assert.True(comparerSet2.SetEquals(set)); + } + + [Fact] + public void SetComparer_SequenceEqualTests() + { + List objects = new List() { CreateT(1), CreateT(2), CreateT(3), CreateT(4), CreateT(5), CreateT(6) }; + + var set = new HashSet>() + { + new HashSet { objects[0], objects[1], objects[2] }, + new HashSet { objects[3], objects[4], objects[5] } + }; + + var noComparerSet = new HashSet>() + { + new HashSet { objects[0], objects[1], objects[2] }, + new HashSet { objects[3], objects[4], objects[5] } + }; + + var comparerSet = new HashSet>(HashSet.CreateSetComparer()) + { + new HashSet { objects[0], objects[1], objects[2] }, + new HashSet { objects[3], objects[4], objects[5] } + }; + + Assert.False(noComparerSet.SequenceEqual(set)); + Assert.True(noComparerSet.SequenceEqual(set, HashSet.CreateSetComparer())); + Assert.False(comparerSet.SequenceEqual(set)); + } + + #endregion + + [Fact] + public void CanBeCastedToISet() + { + HashSet set = new HashSet(); + ISet iset = (set as ISet); + Assert.NotNull(iset); + } + + [Theory] + [MemberData(nameof(ValidCollectionSizes))] + public void HashSet_Generic_Constructor_int(int capacity) + { + HashSet set = new HashSet(capacity); + Assert.Equal(0, set.Count); + } + + [Theory] + [MemberData(nameof(ValidCollectionSizes))] + public void HashSet_Generic_Constructor_int_AddUpToAndBeyondCapacity(int capacity) + { + HashSet set = new HashSet(capacity); + + AddToCollection(set, capacity); + Assert.Equal(capacity, set.Count); + + AddToCollection(set, capacity + 1); + Assert.Equal(capacity + 1, set.Count); + } + + [Fact] + public void HashSet_Generic_Constructor_Capacity_ToNextPrimeNumber() + { + // Highest pre-computed number + 1. + const int Capacity = 7199370; + var set = new HashSet(Capacity); + + // Assert that the HashTable's capacity is set to the descendant prime number of the given one. + const int NextPrime = 7199371; + Assert.Equal(NextPrime, set.EnsureCapacity(0)); + } + + [Fact] + public void HashSet_Generic_Constructor_int_Negative_ThrowsArgumentOutOfRangeException() + { + AssertExtensions.Throws("capacity", () => new HashSet(-1)); + AssertExtensions.Throws("capacity", () => new HashSet(int.MinValue)); + } + + [Theory] + [MemberData(nameof(ValidCollectionSizes))] + public void HashSet_Generic_Constructor_int_IEqualityComparer(int capacity) + { + IEqualityComparer comparer = GetIEqualityComparer(); + HashSet set = new HashSet(capacity, comparer); + Assert.Equal(0, set.Count); + if (comparer == null) + Assert.Equal(EqualityComparer.Default, set.Comparer); + else + Assert.Equal(comparer, set.Comparer); + } + + [Theory] + [MemberData(nameof(ValidCollectionSizes))] + public void HashSet_Generic_Constructor_int_IEqualityComparer_AddUpToAndBeyondCapacity(int capacity) + { + IEqualityComparer comparer = GetIEqualityComparer(); + HashSet set = new HashSet(capacity, comparer); + + AddToCollection(set, capacity); + Assert.Equal(capacity, set.Count); + + AddToCollection(set, capacity + 1); + Assert.Equal(capacity + 1, set.Count); + } + + [Fact] + public void HashSet_Generic_Constructor_int_IEqualityComparer_Negative_ThrowsArgumentOutOfRangeException() + { + IEqualityComparer comparer = GetIEqualityComparer(); + AssertExtensions.Throws("capacity", () => new HashSet(-1, comparer)); + AssertExtensions.Throws("capacity", () => new HashSet(int.MinValue, comparer)); + } + + #region TryGetValue + + [Fact] + public void HashSet_Generic_TryGetValue_Contains() + { + T value = CreateT(1); + HashSet set = new HashSet { value }; + T equalValue = CreateT(1); + T actualValue; + Assert.True(set.TryGetValue(equalValue, out actualValue)); + Assert.Equal(value, actualValue); + if (!typeof(T).IsValueType) + { + Assert.Same((object)value, (object)actualValue); + } + } + + [Fact] + public void HashSet_Generic_TryGetValue_Contains_OverwriteOutputParam() + { + T value = CreateT(1); + HashSet set = new HashSet { value }; + T equalValue = CreateT(1); + T actualValue = CreateT(2); + Assert.True(set.TryGetValue(equalValue, out actualValue)); + Assert.Equal(value, actualValue); + if (!typeof(T).IsValueType) + { + Assert.Same((object)value, (object)actualValue); + } + } + + [Fact] + public void HashSet_Generic_TryGetValue_NotContains() + { + T value = CreateT(1); + HashSet set = new HashSet { value }; + T equalValue = CreateT(2); + T actualValue; + Assert.False(set.TryGetValue(equalValue, out actualValue)); + Assert.Equal(default(T), actualValue); + } + + [Fact] + public void HashSet_Generic_TryGetValue_NotContains_OverwriteOutputParam() + { + T value = CreateT(1); + HashSet set = new HashSet { value }; + T equalValue = CreateT(2); + T actualValue = equalValue; + Assert.False(set.TryGetValue(equalValue, out actualValue)); + Assert.Equal(default(T), actualValue); + } + + #endregion + + #region EnsureCapacity + + [Theory] + [MemberData(nameof(ValidCollectionSizes))] + public void EnsureCapacity_Generic_RequestingLargerCapacity_DoesNotInvalidateEnumeration(int setLength) + { + HashSet set = (HashSet)(GenericISetFactory(setLength)); + var capacity = set.EnsureCapacity(0); + IEnumerator valuesEnum = set.GetEnumerator(); + IEnumerator valuesListEnum = new List(set).GetEnumerator(); + + set.EnsureCapacity(capacity + 1); // Verify EnsureCapacity does not invalidate enumeration + + while (valuesEnum.MoveNext()) + { + valuesListEnum.MoveNext(); + Assert.Equal(valuesListEnum.Current, valuesEnum.Current); + } + } + + [Fact] + public void EnsureCapacity_Generic_NegativeCapacityRequested_Throws() + { + var set = new HashSet(); + AssertExtensions.Throws("capacity", () => set.EnsureCapacity(-1)); + } + + [Fact] + public void EnsureCapacity_Generic_HashsetNotInitialized_RequestedZero_ReturnsZero() + { + var set = new HashSet(); + Assert.Equal(0, set.EnsureCapacity(0)); + } + + [Theory] + [InlineData(1)] + [InlineData(2)] + [InlineData(3)] + [InlineData(4)] + public void EnsureCapacity_Generic_HashsetNotInitialized_RequestedNonZero_CapacityIsSetToAtLeastTheRequested(int requestedCapacity) + { + var set = new HashSet(); + Assert.InRange(set.EnsureCapacity(requestedCapacity), requestedCapacity, int.MaxValue); + } + + [Theory] + [InlineData(3)] + [InlineData(7)] + public void EnsureCapacity_Generic_RequestedCapacitySmallerThanCurrent_CapacityUnchanged(int currentCapacity) + { + HashSet set; + + // assert capacity remains the same when ensuring a capacity smaller or equal than existing + for (int i = 0; i <= currentCapacity; i++) + { + set = new HashSet(currentCapacity); + Assert.Equal(currentCapacity, set.EnsureCapacity(i)); + } + } + + [Theory] + [InlineData(7)] + [InlineData(89)] + public void EnsureCapacity_Generic_ExistingCapacityRequested_SameValueReturned(int capacity) + { + var set = new HashSet(capacity); + Assert.Equal(capacity, set.EnsureCapacity(capacity)); + + set = (HashSet)GenericISetFactory(capacity); + Assert.Equal(capacity, set.EnsureCapacity(capacity)); + } + + [Theory] + [InlineData(0)] + [InlineData(1)] + [InlineData(2)] + [InlineData(3)] + [InlineData(4)] + public void EnsureCapacity_Generic_EnsureCapacityCalledTwice_ReturnsSameValue(int setLength) + { + HashSet set = (HashSet)GenericISetFactory(setLength); + int capacity = set.EnsureCapacity(0); + Assert.Equal(capacity, set.EnsureCapacity(0)); + + set = (HashSet)GenericISetFactory(setLength); + capacity = set.EnsureCapacity(setLength); + Assert.Equal(capacity, set.EnsureCapacity(setLength)); + + set = (HashSet)GenericISetFactory(setLength); + capacity = set.EnsureCapacity(setLength + 1); + Assert.Equal(capacity, set.EnsureCapacity(setLength + 1)); + } + + [Theory] + [InlineData(1)] + [InlineData(5)] + [InlineData(7)] + [InlineData(8)] + public void EnsureCapacity_Generic_HashsetNotEmpty_RequestedSmallerThanCount_ReturnsAtLeastSizeOfCount(int setLength) + { + HashSet set = (HashSet)GenericISetFactory(setLength); + Assert.InRange(set.EnsureCapacity(setLength - 1), setLength, int.MaxValue); + } + + [Theory] + [InlineData(7)] + [InlineData(20)] + public void EnsureCapacity_Generic_HashsetNotEmpty_SetsToAtLeastTheRequested(int setLength) + { + HashSet set = (HashSet)GenericISetFactory(setLength); + + // get current capacity + int currentCapacity = set.EnsureCapacity(0); + + // assert we can update to a larger capacity + int newCapacity = set.EnsureCapacity(currentCapacity * 2); + Assert.InRange(newCapacity, currentCapacity * 2, int.MaxValue); + } + + [Fact] + public void EnsureCapacity_Generic_CapacityIsSetToPrimeNumberLargerOrEqualToRequested() + { + var set = new HashSet(); + Assert.Equal(17, set.EnsureCapacity(17)); + + set = new HashSet(); + Assert.Equal(17, set.EnsureCapacity(15)); + + set = new HashSet(); + Assert.Equal(17, set.EnsureCapacity(13)); + } + + [Theory] + [InlineData(2)] + [InlineData(10)] + public void EnsureCapacity_Generic_GrowCapacityWithFreeList(int setLength) + { + HashSet set = (HashSet)GenericISetFactory(setLength); + + // Remove the first element to ensure we have a free list. + Assert.True(set.Remove(set.ElementAt(0))); + + int currentCapacity = set.EnsureCapacity(0); + Assert.True(currentCapacity > 0); + + int newCapacity = set.EnsureCapacity(currentCapacity + 1); + Assert.True(newCapacity > currentCapacity); + } + + #endregion + + #region Remove + + [Theory] + [MemberData(nameof(ValidPositiveCollectionSizes))] + public void Remove_NonDefaultComparer_ComparerUsed(int capacity) + { + var c = new TrackingEqualityComparer(); + var set = new HashSet(capacity, c); + + AddToCollection(set, capacity); + T first = set.First(); + c.EqualsCalls = 0; + c.GetHashCodeCalls = 0; + + Assert.Equal(capacity, set.Count); + set.Remove(first); + Assert.Equal(capacity - 1, set.Count); + + Assert.InRange(c.EqualsCalls, 1, int.MaxValue); + Assert.InRange(c.GetHashCodeCalls, 1, int.MaxValue); + } + + #endregion + + #region Serialization + + [ConditionalFact(typeof(PlatformDetection), nameof(PlatformDetection.IsBinaryFormatterSupported))] + public void ComparerSerialization() + { + // Strings switch between randomized and non-randomized comparers, + // however this should never be observable externally. + TestComparerSerialization(EqualityComparer.Default); + + // OrdinalCaseSensitiveComparer is internal and (de)serializes as OrdinalComparer + TestComparerSerialization(StringComparer.Ordinal, "System.OrdinalComparer"); + + // OrdinalIgnoreCaseComparer is internal and (de)serializes as OrdinalComparer + TestComparerSerialization(StringComparer.OrdinalIgnoreCase, "System.OrdinalComparer"); + TestComparerSerialization(StringComparer.CurrentCulture); + TestComparerSerialization(StringComparer.CurrentCultureIgnoreCase); + TestComparerSerialization(StringComparer.InvariantCulture); + TestComparerSerialization(StringComparer.InvariantCultureIgnoreCase); + + // Check other types while here, IEquatable valuetype, nullable valuetype, and non IEquatable object + TestComparerSerialization(EqualityComparer.Default); + TestComparerSerialization(EqualityComparer.Default); + TestComparerSerialization(EqualityComparer.Default); + + static void TestComparerSerialization(IEqualityComparer equalityComparer, string internalTypeName = null) + { + var bf = new BinaryFormatter(); + var s = new MemoryStream(); + + var dict = new HashSet(equalityComparer); + + Assert.Same(equalityComparer, dict.Comparer); + + bf.Serialize(s, dict); + s.Position = 0; + dict = (HashSet)bf.Deserialize(s); + + if (internalTypeName == null) + { + Assert.IsType(equalityComparer.GetType(), dict.Comparer); + } + else + { + Assert.Equal(internalTypeName, dict.Comparer.GetType().ToString()); + } + + Assert.True(equalityComparer.Equals(dict.Comparer)); + } + } + + #endregion + } +} diff --git a/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/SegmentedHashSet_IEnumerable_NonGeneric_Tests.cs b/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/SegmentedHashSet_IEnumerable_NonGeneric_Tests.cs new file mode 100644 index 0000000000000..48ffc2e4ba1d6 --- /dev/null +++ b/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/SegmentedHashSet_IEnumerable_NonGeneric_Tests.cs @@ -0,0 +1,60 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System.Collections.Generic; + +namespace System.Collections.Tests +{ + public class HashSet_IEnumerable_NonGeneric_Tests : IEnumerable_NonGeneric_Tests + { + protected override IEnumerable NonGenericIEnumerableFactory(int count) + { + var set = new HashSet(); + int seed = 12354; + while (set.Count < count) + set.Add(CreateT(set, seed++)); + return set; + } + + protected override bool Enumerator_Current_UndefinedOperation_Throws => true; + + protected override ModifyOperation ModifyEnumeratorThrows => PlatformDetection.IsNetFramework ? base.ModifyEnumeratorThrows : (base.ModifyEnumeratorAllowed & ~ModifyOperation.Remove); + + protected override ModifyOperation ModifyEnumeratorAllowed => PlatformDetection.IsNetFramework ? base.ModifyEnumeratorAllowed : ModifyOperation.Overwrite | ModifyOperation.Remove; + + /// + /// Returns a set of ModifyEnumerable delegates that modify the enumerable passed to them. + /// + protected override IEnumerable GetModifyEnumerables(ModifyOperation operations) + { + if ((operations & ModifyOperation.Clear) == ModifyOperation.Clear) + { + yield return (IEnumerable enumerable) => + { + HashSet casted = ((HashSet)enumerable); + if (casted.Count > 0) + { + casted.Clear(); + return true; + } + return false; + }; + } + } + + protected string CreateT(HashSet set, int seed) + { + int stringLength = seed % 10 + 5; + Random rand = new Random(seed); + byte[] bytes = new byte[stringLength]; + rand.NextBytes(bytes); + string ret = Convert.ToBase64String(bytes); + while (set.Contains(ret)) + { + rand.NextBytes(bytes); + ret = Convert.ToBase64String(bytes); + } + return ret; + } + } +} diff --git a/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/TestingTypes.cs b/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/TestingTypes.cs new file mode 100644 index 0000000000000..4df123571586b --- /dev/null +++ b/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/TestingTypes.cs @@ -0,0 +1,387 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System.Collections.Generic; + +namespace System.Collections.Tests +{ + #region Comparers and Equatables + + // Use parity only as a hashcode so as to have many collisions. + [Serializable] + public class BadIntEqualityComparer : IEqualityComparer + { + public bool Equals(int x, int y) + { + return x == y; + } + + public int GetHashCode(int obj) + { + return obj % 2; + } + + public override bool Equals(object obj) + { + return obj is BadIntEqualityComparer; // Equal to all other instances of this type, not to anything else. + } + + public override int GetHashCode() + { + return unchecked((int)0xC001CAFE); // Doesn't matter as long as its constant. + } + } + + [Serializable] + public class EquatableBackwardsOrder : IEquatable, IComparable, IComparable + { + private int _value; + + public EquatableBackwardsOrder(int value) + { + _value = value; + } + + public int CompareTo(EquatableBackwardsOrder other) //backwards from the usual integer ordering + { + return other._value - _value; + } + + public override int GetHashCode() => _value; + + public override bool Equals(object obj) + { + EquatableBackwardsOrder other = obj as EquatableBackwardsOrder; + return other != null && Equals(other); + } + + public bool Equals(EquatableBackwardsOrder other) + { + return _value == other._value; + } + + int IComparable.CompareTo(object obj) + { + if (obj != null && obj.GetType() == typeof(EquatableBackwardsOrder)) + return ((EquatableBackwardsOrder)obj)._value - _value; + else return -1; + } + } + + [Serializable] + public class Comparer_SameAsDefaultComparer : IEqualityComparer, IComparer + { + public int Compare(int x, int y) + { + return x - y; + } + + public bool Equals(int x, int y) + { + return x == y; + } + + public int GetHashCode(int obj) + { + return obj.GetHashCode(); + } + } + + [Serializable] + public class Comparer_HashCodeAlwaysReturnsZero : IEqualityComparer, IComparer + { + public int Compare(int x, int y) + { + return x - y; + } + + public bool Equals(int x, int y) + { + return x == y; + } + + public int GetHashCode(int obj) + { + return 0; + } + } + + [Serializable] + public class Comparer_ModOfInt : IEqualityComparer, IComparer + { + private int _mod; + + public Comparer_ModOfInt(int mod) + { + _mod = mod; + } + + public Comparer_ModOfInt() + { + _mod = 500; + } + + public int Compare(int x, int y) + { + return ((x % _mod) - (y % _mod)); + } + + public bool Equals(int x, int y) + { + return ((x % _mod) == (y % _mod)); + } + + public int GetHashCode(int x) + { + return (x % _mod); + } + } + + [Serializable] + public class Comparer_AbsOfInt : IEqualityComparer, IComparer + { + public int Compare(int x, int y) + { + return Math.Abs(x) - Math.Abs(y); + } + + public bool Equals(int x, int y) + { + return Math.Abs(x) == Math.Abs(y); + } + + public int GetHashCode(int x) + { + return Math.Abs(x); + } + } + + #endregion + + #region TestClasses + + [Serializable] + public struct SimpleInt : IStructuralComparable, IStructuralEquatable, IComparable, IComparable + { + private int _val; + public SimpleInt(int t) + { + _val = t; + } + public int Val + { + get { return _val; } + set { _val = value; } + } + + public int CompareTo(SimpleInt other) + { + return other.Val - _val; + } + + public int CompareTo(object obj) + { + if (obj.GetType() == typeof(SimpleInt)) + { + return ((SimpleInt)obj).Val - _val; + } + return -1; + } + + public int CompareTo(object other, IComparer comparer) + { + if (other.GetType() == typeof(SimpleInt)) + return ((SimpleInt)other).Val - _val; + return -1; + } + + public bool Equals(object other, IEqualityComparer comparer) + { + if (other.GetType() == typeof(SimpleInt)) + return ((SimpleInt)other).Val == _val; + return false; + } + + public int GetHashCode(IEqualityComparer comparer) + { + return comparer.GetHashCode(_val); + } + } + + [Serializable] + public class WrapStructural_Int : IEqualityComparer, IComparer + { + public int Compare(int x, int y) + { + return StructuralComparisons.StructuralComparer.Compare(x, y); + } + + public bool Equals(int x, int y) + { + return StructuralComparisons.StructuralEqualityComparer.Equals(x, y); + } + + public int GetHashCode(int obj) + { + return StructuralComparisons.StructuralEqualityComparer.GetHashCode(obj); + } + } + + [Serializable] + public class WrapStructural_SimpleInt : IEqualityComparer, IComparer + { + public int Compare(SimpleInt x, SimpleInt y) + { + return StructuralComparisons.StructuralComparer.Compare(x, y); + } + + public bool Equals(SimpleInt x, SimpleInt y) + { + return StructuralComparisons.StructuralEqualityComparer.Equals(x, y); + } + + public int GetHashCode(SimpleInt obj) + { + return StructuralComparisons.StructuralEqualityComparer.GetHashCode(obj); + } + } + + public class GenericComparable : IComparable + { + private readonly int _value; + + public GenericComparable(int value) + { + _value = value; + } + + public int CompareTo(GenericComparable other) => _value.CompareTo(other._value); + } + + public class NonGenericComparable : IComparable + { + private readonly GenericComparable _inner; + + public NonGenericComparable(int value) + { + _inner = new GenericComparable(value); + } + + public int CompareTo(object other) => + _inner.CompareTo(((NonGenericComparable)other)._inner); + } + + public class BadlyBehavingComparable : IComparable, IComparable + { + public int CompareTo(BadlyBehavingComparable other) => 1; + + public int CompareTo(object other) => -1; + } + + public class MutatingComparable : IComparable, IComparable + { + private int _state; + + public MutatingComparable(int initialState) + { + _state = initialState; + } + + public int State => _state; + + public int CompareTo(object other) => _state++; + + public int CompareTo(MutatingComparable other) => _state++; + } + + public static class ValueComparable + { + // Convenience method so the compiler can work its type inference magic. + public static ValueComparable Create(T value) where T : IComparable + { + return new ValueComparable(value); + } + } + + public struct ValueComparable : IComparable> where T : IComparable + { + public ValueComparable(T value) + { + Value = value; + } + + public T Value { get; } + + public int CompareTo(ValueComparable other) => + Value.CompareTo(other.Value); + } + + public class Equatable : IEquatable + { + public Equatable(int value) + { + Value = value; + } + + public int Value { get; } + + // Equals(object) is not implemented on purpose. + // EqualityComparer is only supposed to call through to the strongly-typed Equals since we implement IEquatable. + + public bool Equals(Equatable other) + { + return other != null && Value == other.Value; + } + + public override int GetHashCode() => Value; + } + + public struct NonEquatableValueType + { + public NonEquatableValueType(int value) + { + Value = value; + } + + public int Value { get; set; } + } + + public class DelegateEquatable : IEquatable + { + public DelegateEquatable() + { + EqualsWorker = _ => false; + } + + public Func EqualsWorker { get; set; } + + public bool Equals(DelegateEquatable other) => EqualsWorker(other); + } + + public struct ValueDelegateEquatable : IEquatable + { + public Func EqualsWorker { get; set; } + + public bool Equals(ValueDelegateEquatable other) => EqualsWorker(other); + } + + public sealed class TrackingEqualityComparer : IEqualityComparer + { + public int EqualsCalls; + public int GetHashCodeCalls; + + public bool Equals(T x, T y) + { + EqualsCalls++; + return EqualityComparer.Default.Equals(x, y); + } + + public int GetHashCode(T obj) + { + GetHashCodeCalls++; + return EqualityComparer.Default.GetHashCode(obj); + } + } + + #endregion +} diff --git a/src/Dependencies/Collections/Internal/BitHelper.cs b/src/Dependencies/Collections/Internal/BitHelper.cs new file mode 100644 index 0000000000000..b2c87e0d4a458 --- /dev/null +++ b/src/Dependencies/Collections/Internal/BitHelper.cs @@ -0,0 +1,40 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +namespace System.Collections.Generic +{ + internal ref struct BitHelper + { + private const int IntSize = sizeof(int) * 8; + private readonly Span _span; + + internal BitHelper(Span span, bool clear) + { + if (clear) + { + span.Clear(); + } + _span = span; + } + + internal void MarkBit(int bitPosition) + { + int bitArrayIndex = bitPosition / IntSize; + if ((uint)bitArrayIndex < (uint)_span.Length) + { + _span[bitArrayIndex] |= (1 << (bitPosition % IntSize)); + } + } + + internal bool IsMarked(int bitPosition) + { + int bitArrayIndex = bitPosition / IntSize; + return + (uint)bitArrayIndex < (uint)_span.Length && + (_span[bitArrayIndex] & (1 << (bitPosition % IntSize))) != 0; + } + + /// How many ints must be allocated to represent n bits. Returns (n+31)/32, but avoids overflow. + internal static int ToIntArrayLength(int n) => n > 0 ? ((n - 1) / IntSize + 1) : 0; + } +} diff --git a/src/Dependencies/Collections/Internal/SegmentedHashSetEqualityComparer`1.cs b/src/Dependencies/Collections/Internal/SegmentedHashSetEqualityComparer`1.cs new file mode 100644 index 0000000000000..a5e8594c3c95d --- /dev/null +++ b/src/Dependencies/Collections/Internal/SegmentedHashSetEqualityComparer`1.cs @@ -0,0 +1,77 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +namespace System.Collections.Generic +{ + /// Equality comparer for hashsets of hashsets + internal sealed class HashSetEqualityComparer : IEqualityComparer?> + { + public bool Equals(HashSet? x, HashSet? y) + { + // If they're the exact same instance, they're equal. + if (ReferenceEquals(x, y)) + { + return true; + } + + // They're not both null, so if either is null, they're not equal. + if (x == null || y == null) + { + return false; + } + + EqualityComparer defaultComparer = EqualityComparer.Default; + + // If both sets use the same comparer, they're equal if they're the same + // size and one is a "subset" of the other. + if (HashSet.EqualityComparersAreEqual(x, y)) + { + return x.Count == y.Count && y.IsSubsetOfHashSetWithSameComparer(x); + } + + // Otherwise, do an O(N^2) match. + foreach (T yi in y) + { + bool found = false; + foreach (T xi in x) + { + if (defaultComparer.Equals(yi, xi)) + { + found = true; + break; + } + } + + if (!found) + { + return false; + } + } + + return true; + } + + public int GetHashCode(HashSet? obj) + { + int hashCode = 0; // default to 0 for null/empty set + + if (obj != null) + { + foreach (T t in obj) + { + if (t != null) + { + hashCode ^= t.GetHashCode(); // same hashcode as as default comparer + } + } + } + + return hashCode; + } + + // Equals method for the comparer itself. + public override bool Equals(object? obj) => obj is HashSetEqualityComparer; + + public override int GetHashCode() => EqualityComparer.Default.GetHashCode(); + } +} diff --git a/src/Dependencies/Collections/Microsoft.CodeAnalysis.Collections.projitems b/src/Dependencies/Collections/Microsoft.CodeAnalysis.Collections.projitems index ffb157764e4dd..4dc6777339755 100644 --- a/src/Dependencies/Collections/Microsoft.CodeAnalysis.Collections.projitems +++ b/src/Dependencies/Collections/Microsoft.CodeAnalysis.Collections.projitems @@ -22,6 +22,7 @@ + @@ -29,12 +30,14 @@ + + diff --git a/src/Dependencies/Collections/SegmentedHashSet`1.cs b/src/Dependencies/Collections/SegmentedHashSet`1.cs new file mode 100644 index 0000000000000..f9bc52a4d4740 --- /dev/null +++ b/src/Dependencies/Collections/SegmentedHashSet`1.cs @@ -0,0 +1,1582 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System.Diagnostics; +using System.Diagnostics.CodeAnalysis; +using System.Runtime.CompilerServices; +using System.Runtime.Serialization; + +using Internal.Runtime.CompilerServices; + +namespace System.Collections.Generic +{ + [DebuggerTypeProxy(typeof(ICollectionDebugView<>))] + [DebuggerDisplay("Count = {Count}")] + [Serializable] + [TypeForwardedFrom("System.Core, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089")] + public class HashSet : ICollection, ISet, IReadOnlyCollection, IReadOnlySet, ISerializable, IDeserializationCallback + { + // This uses the same array-based implementation as Dictionary. + + // Constants for serialization + private const string CapacityName = "Capacity"; // Do not rename (binary serialization) + private const string ElementsName = "Elements"; // Do not rename (binary serialization) + private const string ComparerName = "Comparer"; // Do not rename (binary serialization) + private const string VersionName = "Version"; // Do not rename (binary serialization) + + /// Cutoff point for stackallocs. This corresponds to the number of ints. + private const int StackAllocThreshold = 100; + + /// + /// When constructing a hashset from an existing collection, it may contain duplicates, + /// so this is used as the max acceptable excess ratio of capacity to count. Note that + /// this is only used on the ctor and not to automatically shrink if the hashset has, e.g, + /// a lot of adds followed by removes. Users must explicitly shrink by calling TrimExcess. + /// This is set to 3 because capacity is acceptable as 2x rounded up to nearest prime. + /// + private const int ShrinkThreshold = 3; + private const int StartOfFreeList = -3; + + private int[]? _buckets; + private Entry[]? _entries; +#if TARGET_64BIT + private ulong _fastModMultiplier; +#endif + private int _count; + private int _freeList; + private int _freeCount; + private int _version; + private IEqualityComparer? _comparer; + private SerializationInfo? _siInfo; // temporary variable needed during deserialization + + #region Constructors + + public HashSet() : this((IEqualityComparer?)null) { } + + public HashSet(IEqualityComparer? comparer) + { + if (comparer != null && comparer != EqualityComparer.Default) // first check for null to avoid forcing default comparer instantiation unnecessarily + { + _comparer = comparer; + } + + // Special-case EqualityComparer.Default, StringComparer.Ordinal, and StringComparer.OrdinalIgnoreCase. + // We use a non-randomized comparer for improved perf, falling back to a randomized comparer if the + // hash buckets become unbalanced. + + if (typeof(T) == typeof(string)) + { + if (_comparer is null) + { + _comparer = (IEqualityComparer)NonRandomizedStringEqualityComparer.WrappedAroundDefaultComparer; + } + else if (ReferenceEquals(_comparer, StringComparer.Ordinal)) + { + _comparer = (IEqualityComparer)NonRandomizedStringEqualityComparer.WrappedAroundStringComparerOrdinal; + } + else if (ReferenceEquals(_comparer, StringComparer.OrdinalIgnoreCase)) + { + _comparer = (IEqualityComparer)NonRandomizedStringEqualityComparer.WrappedAroundStringComparerOrdinalIgnoreCase; + } + } + } + + public HashSet(int capacity) : this(capacity, null) { } + + public HashSet(IEnumerable collection) : this(collection, null) { } + + public HashSet(IEnumerable collection, IEqualityComparer? comparer) : this(comparer) + { + if (collection == null) + { + ThrowHelper.ThrowArgumentNullException(ExceptionArgument.collection); + } + + if (collection is HashSet otherAsHashSet && EqualityComparersAreEqual(this, otherAsHashSet)) + { + ConstructFrom(otherAsHashSet); + } + else + { + // To avoid excess resizes, first set size based on collection's count. The collection may + // contain duplicates, so call TrimExcess if resulting HashSet is larger than the threshold. + if (collection is ICollection coll) + { + int count = coll.Count; + if (count > 0) + { + Initialize(count); + } + } + + UnionWith(collection); + + if (_count > 0 && _entries!.Length / _count > ShrinkThreshold) + { + TrimExcess(); + } + } + } + + public HashSet(int capacity, IEqualityComparer? comparer) : this(comparer) + { + if (capacity < 0) + { + ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.capacity); + } + + if (capacity > 0) + { + Initialize(capacity); + } + } + + protected HashSet(SerializationInfo info, StreamingContext context) + { + // We can't do anything with the keys and values until the entire graph has been + // deserialized and we have a reasonable estimate that GetHashCode is not going to + // fail. For the time being, we'll just cache this. The graph is not valid until + // OnDeserialization has been called. + _siInfo = info; + } + + /// Initializes the HashSet from another HashSet with the same element type and equality comparer. + private void ConstructFrom(HashSet source) + { + if (source.Count == 0) + { + // As well as short-circuiting on the rest of the work done, + // this avoids errors from trying to access source._buckets + // or source._entries when they aren't initialized. + return; + } + + int capacity = source._buckets!.Length; + int threshold = HashHelpers.ExpandPrime(source.Count + 1); + + if (threshold >= capacity) + { + _buckets = (int[])source._buckets.Clone(); + _entries = (Entry[])source._entries!.Clone(); + _freeList = source._freeList; + _freeCount = source._freeCount; + _count = source._count; +#if TARGET_64BIT + _fastModMultiplier = source._fastModMultiplier; +#endif + } + else + { + Initialize(source.Count); + + Entry[]? entries = source._entries; + for (int i = 0; i < source._count; i++) + { + ref Entry entry = ref entries![i]; + if (entry.Next >= -1) + { + AddIfNotPresent(entry.Value, out _); + } + } + } + + Debug.Assert(Count == source.Count); + } + + #endregion + + #region ICollection methods + + void ICollection.Add(T item) => AddIfNotPresent(item, out _); + + /// Removes all elements from the object. + public void Clear() + { + int count = _count; + if (count > 0) + { + Debug.Assert(_buckets != null, "_buckets should be non-null"); + Debug.Assert(_entries != null, "_entries should be non-null"); + + Array.Clear(_buckets, 0, _buckets.Length); + _count = 0; + _freeList = -1; + _freeCount = 0; + Array.Clear(_entries, 0, count); + } + } + + /// Determines whether the contains the specified element. + /// The element to locate in the object. + /// true if the object contains the specified element; otherwise, false. + public bool Contains(T item) => FindItemIndex(item) >= 0; + + /// Gets the index of the item in , or -1 if it's not in the set. + private int FindItemIndex(T item) + { + int[]? buckets = _buckets; + if (buckets != null) + { + Entry[]? entries = _entries; + Debug.Assert(entries != null, "Expected _entries to be initialized"); + + uint collisionCount = 0; + IEqualityComparer? comparer = _comparer; + + if (comparer == null) + { + int hashCode = item != null ? item.GetHashCode() : 0; + if (typeof(T).IsValueType) + { + // ValueType: Devirtualize with EqualityComparer.Default intrinsic + int i = GetBucketRef(hashCode) - 1; // Value in _buckets is 1-based + while (i >= 0) + { + ref Entry entry = ref entries[i]; + if (entry.HashCode == hashCode && EqualityComparer.Default.Equals(entry.Value, item)) + { + return i; + } + i = entry.Next; + + collisionCount++; + if (collisionCount > (uint)entries.Length) + { + // The chain of entries forms a loop, which means a concurrent update has happened. + ThrowHelper.ThrowInvalidOperationException_ConcurrentOperationsNotSupported(); + } + } + } + else + { + // Object type: Shared Generic, EqualityComparer.Default won't devirtualize (https://github.com/dotnet/runtime/issues/10050), + // so cache in a local rather than get EqualityComparer per loop iteration. + EqualityComparer defaultComparer = EqualityComparer.Default; + int i = GetBucketRef(hashCode) - 1; // Value in _buckets is 1-based + while (i >= 0) + { + ref Entry entry = ref entries[i]; + if (entry.HashCode == hashCode && defaultComparer.Equals(entry.Value, item)) + { + return i; + } + i = entry.Next; + + collisionCount++; + if (collisionCount > (uint)entries.Length) + { + // The chain of entries forms a loop, which means a concurrent update has happened. + ThrowHelper.ThrowInvalidOperationException_ConcurrentOperationsNotSupported(); + } + } + } + } + else + { + int hashCode = item != null ? comparer.GetHashCode(item) : 0; + int i = GetBucketRef(hashCode) - 1; // Value in _buckets is 1-based + while (i >= 0) + { + ref Entry entry = ref entries[i]; + if (entry.HashCode == hashCode && comparer.Equals(entry.Value, item)) + { + return i; + } + i = entry.Next; + + collisionCount++; + if (collisionCount > (uint)entries.Length) + { + // The chain of entries forms a loop, which means a concurrent update has happened. + ThrowHelper.ThrowInvalidOperationException_ConcurrentOperationsNotSupported(); + } + } + } + } + + return -1; + } + + /// Gets a reference to the specified hashcode's bucket, containing an index into . + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private ref int GetBucketRef(int hashCode) + { + int[] buckets = _buckets!; +#if TARGET_64BIT + return ref buckets[HashHelpers.FastMod((uint)hashCode, (uint)buckets.Length, _fastModMultiplier)]; +#else + return ref buckets[(uint)hashCode % (uint)buckets.Length]; +#endif + } + + public bool Remove(T item) + { + if (_buckets != null) + { + Entry[]? entries = _entries; + Debug.Assert(entries != null, "entries should be non-null"); + + uint collisionCount = 0; + int last = -1; + int hashCode = item != null ? (_comparer?.GetHashCode(item) ?? item.GetHashCode()) : 0; + + ref int bucket = ref GetBucketRef(hashCode); + int i = bucket - 1; // Value in buckets is 1-based + + while (i >= 0) + { + ref Entry entry = ref entries[i]; + + if (entry.HashCode == hashCode && (_comparer?.Equals(entry.Value, item) ?? EqualityComparer.Default.Equals(entry.Value, item))) + { + if (last < 0) + { + bucket = entry.Next + 1; // Value in buckets is 1-based + } + else + { + entries[last].Next = entry.Next; + } + + Debug.Assert((StartOfFreeList - _freeList) < 0, "shouldn't underflow because max hashtable length is MaxPrimeArrayLength = 0x7FEFFFFD(2146435069) _freelist underflow threshold 2147483646"); + entry.Next = StartOfFreeList - _freeList; + + if (RuntimeHelpers.IsReferenceOrContainsReferences()) + { + entry.Value = default!; + } + + _freeList = i; + _freeCount++; + return true; + } + + last = i; + i = entry.Next; + + collisionCount++; + if (collisionCount > (uint)entries.Length) + { + // The chain of entries forms a loop; which means a concurrent update has happened. + // Break out of the loop and throw, rather than looping forever. + ThrowHelper.ThrowInvalidOperationException_ConcurrentOperationsNotSupported(); + } + } + } + + return false; + } + + /// Gets the number of elements that are contained in the set. + public int Count => _count - _freeCount; + + bool ICollection.IsReadOnly => false; + + #endregion + + #region IEnumerable methods + + public Enumerator GetEnumerator() => new Enumerator(this); + + IEnumerator IEnumerable.GetEnumerator() => GetEnumerator(); + + IEnumerator IEnumerable.GetEnumerator() => GetEnumerator(); + + #endregion + + #region ISerializable methods + + public virtual void GetObjectData(SerializationInfo info, StreamingContext context) + { + if (info == null) + { + ThrowHelper.ThrowArgumentNullException(ExceptionArgument.info); + } + + info.AddValue(VersionName, _version); // need to serialize version to avoid problems with serializing while enumerating + info.AddValue(ComparerName, Comparer, typeof(IEqualityComparer)); + info.AddValue(CapacityName, _buckets == null ? 0 : _buckets.Length); + + if (_buckets != null) + { + var array = new T[Count]; + CopyTo(array); + info.AddValue(ElementsName, array, typeof(T[])); + } + } + + #endregion + + #region IDeserializationCallback methods + + public virtual void OnDeserialization(object? sender) + { + if (_siInfo == null) + { + // It might be necessary to call OnDeserialization from a container if the + // container object also implements OnDeserialization. We can return immediately + // if this function is called twice. Note we set _siInfo to null at the end of this method. + return; + } + + int capacity = _siInfo.GetInt32(CapacityName); + _comparer = (IEqualityComparer)_siInfo.GetValue(ComparerName, typeof(IEqualityComparer))!; + _freeList = -1; + _freeCount = 0; + + if (capacity != 0) + { + _buckets = new int[capacity]; + _entries = new Entry[capacity]; +#if TARGET_64BIT + _fastModMultiplier = HashHelpers.GetFastModMultiplier((uint)capacity); +#endif + + T[]? array = (T[]?)_siInfo.GetValue(ElementsName, typeof(T[])); + if (array == null) + { + ThrowHelper.ThrowSerializationException(ExceptionResource.Serialization_MissingKeys); + } + + // There are no resizes here because we already set capacity above. + for (int i = 0; i < array.Length; i++) + { + AddIfNotPresent(array[i], out _); + } + } + else + { + _buckets = null; + } + + _version = _siInfo.GetInt32(VersionName); + _siInfo = null; + } + + #endregion + + #region HashSet methods + + /// Adds the specified element to the . + /// The element to add to the set. + /// true if the element is added to the object; false if the element is already present. + public bool Add(T item) => AddIfNotPresent(item, out _); + + /// Searches the set for a given value and returns the equal value it finds, if any. + /// The value to search for. + /// The value from the set that the search found, or the default value of when the search yielded no match. + /// A value indicating whether the search was successful. + /// + /// This can be useful when you want to reuse a previously stored reference instead of + /// a newly constructed one (so that more sharing of references can occur) or to look up + /// a value that has more complete data than the value you currently have, although their + /// comparer functions indicate they are equal. + /// + public bool TryGetValue(T equalValue, [MaybeNullWhen(false)] out T actualValue) + { + if (_buckets != null) + { + int index = FindItemIndex(equalValue); + if (index >= 0) + { + actualValue = _entries![index].Value; + return true; + } + } + + actualValue = default; + return false; + } + + /// Modifies the current object to contain all elements that are present in itself, the specified collection, or both. + /// The collection to compare to the current object. + public void UnionWith(IEnumerable other) + { + if (other == null) + { + ThrowHelper.ThrowArgumentNullException(ExceptionArgument.other); + } + + foreach (T item in other) + { + AddIfNotPresent(item, out _); + } + } + + /// Modifies the current object to contain only elements that are present in that object and in the specified collection. + /// The collection to compare to the current object. + public void IntersectWith(IEnumerable other) + { + if (other == null) + { + ThrowHelper.ThrowArgumentNullException(ExceptionArgument.other); + } + + // Intersection of anything with empty set is empty set, so return if count is 0. + // Same if the set intersecting with itself is the same set. + if (Count == 0 || other == this) + { + return; + } + + // If other is known to be empty, intersection is empty set; remove all elements, and we're done. + if (other is ICollection otherAsCollection) + { + if (otherAsCollection.Count == 0) + { + Clear(); + return; + } + + // Faster if other is a hashset using same equality comparer; so check + // that other is a hashset using the same equality comparer. + if (other is HashSet otherAsSet && EqualityComparersAreEqual(this, otherAsSet)) + { + IntersectWithHashSetWithSameComparer(otherAsSet); + return; + } + } + + IntersectWithEnumerable(other); + } + + /// Removes all elements in the specified collection from the current object. + /// The collection to compare to the current object. + public void ExceptWith(IEnumerable other) + { + if (other == null) + { + ThrowHelper.ThrowArgumentNullException(ExceptionArgument.other); + } + + // This is already the empty set; return. + if (Count == 0) + { + return; + } + + // Special case if other is this; a set minus itself is the empty set. + if (other == this) + { + Clear(); + return; + } + + // Remove every element in other from this. + foreach (T element in other) + { + Remove(element); + } + } + + /// Modifies the current object to contain only elements that are present either in that object or in the specified collection, but not both. + /// The collection to compare to the current object. + public void SymmetricExceptWith(IEnumerable other) + { + if (other == null) + { + ThrowHelper.ThrowArgumentNullException(ExceptionArgument.other); + } + + // If set is empty, then symmetric difference is other. + if (Count == 0) + { + UnionWith(other); + return; + } + + // Special-case this; the symmetric difference of a set with itself is the empty set. + if (other == this) + { + Clear(); + return; + } + + // If other is a HashSet, it has unique elements according to its equality comparer, + // but if they're using different equality comparers, then assumption of uniqueness + // will fail. So first check if other is a hashset using the same equality comparer; + // symmetric except is a lot faster and avoids bit array allocations if we can assume + // uniqueness. + if (other is HashSet otherAsSet && EqualityComparersAreEqual(this, otherAsSet)) + { + SymmetricExceptWithUniqueHashSet(otherAsSet); + } + else + { + SymmetricExceptWithEnumerable(other); + } + } + + /// Determines whether a object is a subset of the specified collection. + /// The collection to compare to the current object. + /// true if the object is a subset of ; otherwise, false. + public bool IsSubsetOf(IEnumerable other) + { + if (other == null) + { + ThrowHelper.ThrowArgumentNullException(ExceptionArgument.other); + } + + // The empty set is a subset of any set, and a set is a subset of itself. + // Set is always a subset of itself + if (Count == 0 || other == this) + { + return true; + } + + // Faster if other has unique elements according to this equality comparer; so check + // that other is a hashset using the same equality comparer. + if (other is HashSet otherAsSet && EqualityComparersAreEqual(this, otherAsSet)) + { + // if this has more elements then it can't be a subset + if (Count > otherAsSet.Count) + { + return false; + } + + // already checked that we're using same equality comparer. simply check that + // each element in this is contained in other. + return IsSubsetOfHashSetWithSameComparer(otherAsSet); + } + + (int uniqueCount, int unfoundCount) = CheckUniqueAndUnfoundElements(other, returnIfUnfound: false); + return uniqueCount == Count && unfoundCount >= 0; + } + + /// Determines whether a object is a proper subset of the specified collection. + /// The collection to compare to the current object. + /// true if the object is a proper subset of ; otherwise, false. + public bool IsProperSubsetOf(IEnumerable other) + { + if (other == null) + { + ThrowHelper.ThrowArgumentNullException(ExceptionArgument.other); + } + + // No set is a proper subset of itself. + if (other == this) + { + return false; + } + + if (other is ICollection otherAsCollection) + { + // No set is a proper subset of an empty set. + if (otherAsCollection.Count == 0) + { + return false; + } + + // The empty set is a proper subset of anything but the empty set. + if (Count == 0) + { + return otherAsCollection.Count > 0; + } + + // Faster if other is a hashset (and we're using same equality comparer). + if (other is HashSet otherAsSet && EqualityComparersAreEqual(this, otherAsSet)) + { + if (Count >= otherAsSet.Count) + { + return false; + } + + // This has strictly less than number of items in other, so the following + // check suffices for proper subset. + return IsSubsetOfHashSetWithSameComparer(otherAsSet); + } + } + + (int uniqueCount, int unfoundCount) = CheckUniqueAndUnfoundElements(other, returnIfUnfound: false); + return uniqueCount == Count && unfoundCount > 0; + } + + /// Determines whether a object is a proper superset of the specified collection. + /// The collection to compare to the current object. + /// true if the object is a superset of ; otherwise, false. + public bool IsSupersetOf(IEnumerable other) + { + if (other == null) + { + ThrowHelper.ThrowArgumentNullException(ExceptionArgument.other); + } + + // A set is always a superset of itself. + if (other == this) + { + return true; + } + + // Try to fall out early based on counts. + if (other is ICollection otherAsCollection) + { + // If other is the empty set then this is a superset. + if (otherAsCollection.Count == 0) + { + return true; + } + + // Try to compare based on counts alone if other is a hashset with same equality comparer. + if (other is HashSet otherAsSet && + EqualityComparersAreEqual(this, otherAsSet) && + otherAsSet.Count > Count) + { + return false; + } + } + + return ContainsAllElements(other); + } + + /// Determines whether a object is a proper superset of the specified collection. + /// The collection to compare to the current object. + /// true if the object is a proper superset of ; otherwise, false. + public bool IsProperSupersetOf(IEnumerable other) + { + if (other == null) + { + ThrowHelper.ThrowArgumentNullException(ExceptionArgument.other); + } + + // The empty set isn't a proper superset of any set, and a set is never a strict superset of itself. + if (Count == 0 || other == this) + { + return false; + } + + if (other is ICollection otherAsCollection) + { + // If other is the empty set then this is a superset. + if (otherAsCollection.Count == 0) + { + // Note that this has at least one element, based on above check. + return true; + } + + // Faster if other is a hashset with the same equality comparer + if (other is HashSet otherAsSet && EqualityComparersAreEqual(this, otherAsSet)) + { + if (otherAsSet.Count >= Count) + { + return false; + } + + // Now perform element check. + return ContainsAllElements(otherAsSet); + } + } + + // Couldn't fall out in the above cases; do it the long way + (int uniqueCount, int unfoundCount) = CheckUniqueAndUnfoundElements(other, returnIfUnfound: true); + return uniqueCount < Count && unfoundCount == 0; + } + + /// Determines whether the current object and a specified collection share common elements. + /// The collection to compare to the current object. + /// true if the object and share at least one common element; otherwise, false. + public bool Overlaps(IEnumerable other) + { + if (other == null) + { + ThrowHelper.ThrowArgumentNullException(ExceptionArgument.other); + } + + if (Count == 0) + { + return false; + } + + // Set overlaps itself + if (other == this) + { + return true; + } + + foreach (T element in other) + { + if (Contains(element)) + { + return true; + } + } + + return false; + } + + /// Determines whether a object and the specified collection contain the same elements. + /// The collection to compare to the current object. + /// true if the object is equal to ; otherwise, false. + public bool SetEquals(IEnumerable other) + { + if (other == null) + { + ThrowHelper.ThrowArgumentNullException(ExceptionArgument.other); + } + + // A set is equal to itself. + if (other == this) + { + return true; + } + + // Faster if other is a hashset and we're using same equality comparer. + if (other is HashSet otherAsSet && EqualityComparersAreEqual(this, otherAsSet)) + { + // Attempt to return early: since both contain unique elements, if they have + // different counts, then they can't be equal. + if (Count != otherAsSet.Count) + { + return false; + } + + // Already confirmed that the sets have the same number of distinct elements, so if + // one is a superset of the other then they must be equal. + return ContainsAllElements(otherAsSet); + } + else + { + // If this count is 0 but other contains at least one element, they can't be equal. + if (Count == 0 && + other is ICollection otherAsCollection && + otherAsCollection.Count > 0) + { + return false; + } + + (int uniqueCount, int unfoundCount) = CheckUniqueAndUnfoundElements(other, returnIfUnfound: true); + return uniqueCount == Count && unfoundCount == 0; + } + } + + public void CopyTo(T[] array) => CopyTo(array, 0, Count); + + /// Copies the elements of a object to an array, starting at the specified array index. + /// The destination array. + /// The zero-based index in array at which copying begins. + public void CopyTo(T[] array, int arrayIndex) => CopyTo(array, arrayIndex, Count); + + public void CopyTo(T[] array, int arrayIndex, int count) + { + if (array == null) + { + ThrowHelper.ThrowArgumentNullException(ExceptionArgument.array); + } + + // Check array index valid index into array. + if (arrayIndex < 0) + { + throw new ArgumentOutOfRangeException(nameof(arrayIndex), arrayIndex, SR.ArgumentOutOfRange_NeedNonNegNum); + } + + // Also throw if count less than 0. + if (count < 0) + { + throw new ArgumentOutOfRangeException(nameof(count), count, SR.ArgumentOutOfRange_NeedNonNegNum); + } + + // Will the array, starting at arrayIndex, be able to hold elements? Note: not + // checking arrayIndex >= array.Length (consistency with list of allowing + // count of 0; subsequent check takes care of the rest) + if (arrayIndex > array.Length || count > array.Length - arrayIndex) + { + ThrowHelper.ThrowArgumentException(ExceptionResource.Arg_ArrayPlusOffTooSmall); + } + + Entry[]? entries = _entries; + for (int i = 0; i < _count && count != 0; i++) + { + ref Entry entry = ref entries![i]; + if (entry.Next >= -1) + { + array[arrayIndex++] = entry.Value; + count--; + } + } + } + + /// Removes all elements that match the conditions defined by the specified predicate from a collection. + public int RemoveWhere(Predicate match) + { + if (match == null) + { + ThrowHelper.ThrowArgumentNullException(ExceptionArgument.match); + } + + Entry[]? entries = _entries; + int numRemoved = 0; + for (int i = 0; i < _count; i++) + { + ref Entry entry = ref entries![i]; + if (entry.Next >= -1) + { + // Cache value in case delegate removes it + T value = entry.Value; + if (match(value)) + { + // Check again that remove actually removed it. + if (Remove(value)) + { + numRemoved++; + } + } + } + } + + return numRemoved; + } + + /// Gets the object that is used to determine equality for the values in the set. + public IEqualityComparer Comparer + { + get + { + if (typeof(T) == typeof(string)) + { + return (IEqualityComparer)IInternalStringEqualityComparer.GetUnderlyingEqualityComparer((IEqualityComparer?)_comparer); + } + else + { + return _comparer ?? EqualityComparer.Default; + } + } + } + + /// Ensures that this hash set can hold the specified number of elements without growing. + public int EnsureCapacity(int capacity) + { + if (capacity < 0) + { + ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.capacity); + } + + int currentCapacity = _entries == null ? 0 : _entries.Length; + if (currentCapacity >= capacity) + { + return currentCapacity; + } + + if (_buckets == null) + { + return Initialize(capacity); + } + + int newSize = HashHelpers.GetPrime(capacity); + Resize(newSize, forceNewHashCodes: false); + return newSize; + } + + private void Resize() => Resize(HashHelpers.ExpandPrime(_count), forceNewHashCodes: false); + + private void Resize(int newSize, bool forceNewHashCodes) + { + // Value types never rehash + Debug.Assert(!forceNewHashCodes || !typeof(T).IsValueType); + Debug.Assert(_entries != null, "_entries should be non-null"); + Debug.Assert(newSize >= _entries.Length); + + var entries = new Entry[newSize]; + + int count = _count; + Array.Copy(_entries, entries, count); + + if (!typeof(T).IsValueType && forceNewHashCodes) + { + Debug.Assert(_comparer is NonRandomizedStringEqualityComparer); + _comparer = (IEqualityComparer)((NonRandomizedStringEqualityComparer)_comparer).GetRandomizedEqualityComparer(); + + for (int i = 0; i < count; i++) + { + ref Entry entry = ref entries[i]; + if (entry.Next >= -1) + { + entry.HashCode = entry.Value != null ? _comparer!.GetHashCode(entry.Value) : 0; + } + } + + if (ReferenceEquals(_comparer, EqualityComparer.Default)) + { + _comparer = null; + } + } + + // Assign member variables after both arrays allocated to guard against corruption from OOM if second fails + _buckets = new int[newSize]; +#if TARGET_64BIT + _fastModMultiplier = HashHelpers.GetFastModMultiplier((uint)newSize); +#endif + for (int i = 0; i < count; i++) + { + ref Entry entry = ref entries[i]; + if (entry.Next >= -1) + { + ref int bucket = ref GetBucketRef(entry.HashCode); + entry.Next = bucket - 1; // Value in _buckets is 1-based + bucket = i + 1; + } + } + + _entries = entries; + } + + /// + /// Sets the capacity of a object to the actual number of elements it contains, + /// rounded up to a nearby, implementation-specific value. + /// + public void TrimExcess() + { + int capacity = Count; + + int newSize = HashHelpers.GetPrime(capacity); + Entry[]? oldEntries = _entries; + int currentCapacity = oldEntries == null ? 0 : oldEntries.Length; + if (newSize >= currentCapacity) + { + return; + } + + int oldCount = _count; + _version++; + Initialize(newSize); + Entry[]? entries = _entries; + int count = 0; + for (int i = 0; i < oldCount; i++) + { + int hashCode = oldEntries![i].HashCode; // At this point, we know we have entries. + if (oldEntries[i].Next >= -1) + { + ref Entry entry = ref entries![count]; + entry = oldEntries[i]; + ref int bucket = ref GetBucketRef(hashCode); + entry.Next = bucket - 1; // Value in _buckets is 1-based + bucket = count + 1; + count++; + } + } + + _count = capacity; + _freeCount = 0; + } + + #endregion + + #region Helper methods + + /// Returns an object that can be used for equality testing of a object. + public static IEqualityComparer> CreateSetComparer() => new HashSetEqualityComparer(); + + /// + /// Initializes buckets and slots arrays. Uses suggested capacity by finding next prime + /// greater than or equal to capacity. + /// + private int Initialize(int capacity) + { + int size = HashHelpers.GetPrime(capacity); + var buckets = new int[size]; + var entries = new Entry[size]; + + // Assign member variables after both arrays are allocated to guard against corruption from OOM if second fails. + _freeList = -1; + _buckets = buckets; + _entries = entries; +#if TARGET_64BIT + _fastModMultiplier = HashHelpers.GetFastModMultiplier((uint)size); +#endif + + return size; + } + + /// Adds the specified element to the set if it's not already contained. + /// The element to add to the set. + /// The index into of the element. + /// true if the element is added to the object; false if the element is already present. + private bool AddIfNotPresent(T value, out int location) + { + if (_buckets == null) + { + Initialize(0); + } + Debug.Assert(_buckets != null); + + Entry[]? entries = _entries; + Debug.Assert(entries != null, "expected entries to be non-null"); + + IEqualityComparer? comparer = _comparer; + int hashCode; + + uint collisionCount = 0; + ref int bucket = ref Unsafe.NullRef(); + + if (comparer == null) + { + hashCode = value != null ? value.GetHashCode() : 0; + bucket = ref GetBucketRef(hashCode); + int i = bucket - 1; // Value in _buckets is 1-based + if (typeof(T).IsValueType) + { + // ValueType: Devirtualize with EqualityComparer.Default intrinsic + while (i >= 0) + { + ref Entry entry = ref entries[i]; + if (entry.HashCode == hashCode && EqualityComparer.Default.Equals(entry.Value, value)) + { + location = i; + return false; + } + i = entry.Next; + + collisionCount++; + if (collisionCount > (uint)entries.Length) + { + // The chain of entries forms a loop, which means a concurrent update has happened. + ThrowHelper.ThrowInvalidOperationException_ConcurrentOperationsNotSupported(); + } + } + } + else + { + // Object type: Shared Generic, EqualityComparer.Default won't devirtualize (https://github.com/dotnet/runtime/issues/10050), + // so cache in a local rather than get EqualityComparer per loop iteration. + EqualityComparer defaultComparer = EqualityComparer.Default; + while (i >= 0) + { + ref Entry entry = ref entries[i]; + if (entry.HashCode == hashCode && defaultComparer.Equals(entry.Value, value)) + { + location = i; + return false; + } + i = entry.Next; + + collisionCount++; + if (collisionCount > (uint)entries.Length) + { + // The chain of entries forms a loop, which means a concurrent update has happened. + ThrowHelper.ThrowInvalidOperationException_ConcurrentOperationsNotSupported(); + } + } + } + } + else + { + hashCode = value != null ? comparer.GetHashCode(value) : 0; + bucket = ref GetBucketRef(hashCode); + int i = bucket - 1; // Value in _buckets is 1-based + while (i >= 0) + { + ref Entry entry = ref entries[i]; + if (entry.HashCode == hashCode && comparer.Equals(entry.Value, value)) + { + location = i; + return false; + } + i = entry.Next; + + collisionCount++; + if (collisionCount > (uint)entries.Length) + { + // The chain of entries forms a loop, which means a concurrent update has happened. + ThrowHelper.ThrowInvalidOperationException_ConcurrentOperationsNotSupported(); + } + } + } + + int index; + if (_freeCount > 0) + { + index = _freeList; + _freeCount--; + Debug.Assert((StartOfFreeList - entries![_freeList].Next) >= -1, "shouldn't overflow because `next` cannot underflow"); + _freeList = StartOfFreeList - entries[_freeList].Next; + } + else + { + int count = _count; + if (count == entries.Length) + { + Resize(); + bucket = ref GetBucketRef(hashCode); + } + index = count; + _count = count + 1; + entries = _entries; + } + + { + ref Entry entry = ref entries![index]; + entry.HashCode = hashCode; + entry.Next = bucket - 1; // Value in _buckets is 1-based + entry.Value = value; + bucket = index + 1; + _version++; + location = index; + } + + // Value types never rehash + if (!typeof(T).IsValueType && collisionCount > HashHelpers.HashCollisionThreshold && comparer is NonRandomizedStringEqualityComparer) + { + // If we hit the collision threshold we'll need to switch to the comparer which is using randomized string hashing + // i.e. EqualityComparer.Default. + Resize(entries.Length, forceNewHashCodes: true); + location = FindItemIndex(value); + Debug.Assert(location >= 0); + } + + return true; + } + + /// + /// Checks if this contains of other's elements. Iterates over other's elements and + /// returns false as soon as it finds an element in other that's not in this. + /// Used by SupersetOf, ProperSupersetOf, and SetEquals. + /// + private bool ContainsAllElements(IEnumerable other) + { + foreach (T element in other) + { + if (!Contains(element)) + { + return false; + } + } + + return true; + } + + /// + /// Implementation Notes: + /// If other is a hashset and is using same equality comparer, then checking subset is + /// faster. Simply check that each element in this is in other. + /// + /// Note: if other doesn't use same equality comparer, then Contains check is invalid, + /// which is why callers must take are of this. + /// + /// If callers are concerned about whether this is a proper subset, they take care of that. + /// + internal bool IsSubsetOfHashSetWithSameComparer(HashSet other) + { + foreach (T item in this) + { + if (!other.Contains(item)) + { + return false; + } + } + + return true; + } + + /// + /// If other is a hashset that uses same equality comparer, intersect is much faster + /// because we can use other's Contains + /// + private void IntersectWithHashSetWithSameComparer(HashSet other) + { + Entry[]? entries = _entries; + for (int i = 0; i < _count; i++) + { + ref Entry entry = ref entries![i]; + if (entry.Next >= -1) + { + T item = entry.Value; + if (!other.Contains(item)) + { + Remove(item); + } + } + } + } + + /// + /// Iterate over other. If contained in this, mark an element in bit array corresponding to + /// its position in _slots. If anything is unmarked (in bit array), remove it. + /// + /// This attempts to allocate on the stack, if below StackAllocThreshold. + /// + private unsafe void IntersectWithEnumerable(IEnumerable other) + { + Debug.Assert(_buckets != null, "_buckets shouldn't be null; callers should check first"); + + // Keep track of current last index; don't want to move past the end of our bit array + // (could happen if another thread is modifying the collection). + int originalCount = _count; + int intArrayLength = BitHelper.ToIntArrayLength(originalCount); + + Span span = stackalloc int[StackAllocThreshold]; + BitHelper bitHelper = intArrayLength <= StackAllocThreshold ? + new BitHelper(span.Slice(0, intArrayLength), clear: true) : + new BitHelper(new int[intArrayLength], clear: false); + + // Mark if contains: find index of in slots array and mark corresponding element in bit array. + foreach (T item in other) + { + int index = FindItemIndex(item); + if (index >= 0) + { + bitHelper.MarkBit(index); + } + } + + // If anything unmarked, remove it. Perf can be optimized here if BitHelper had a + // FindFirstUnmarked method. + for (int i = 0; i < originalCount; i++) + { + ref Entry entry = ref _entries![i]; + if (entry.Next >= -1 && !bitHelper.IsMarked(i)) + { + Remove(entry.Value); + } + } + } + + /// + /// if other is a set, we can assume it doesn't have duplicate elements, so use this + /// technique: if can't remove, then it wasn't present in this set, so add. + /// + /// As with other methods, callers take care of ensuring that other is a hashset using the + /// same equality comparer. + /// + /// + private void SymmetricExceptWithUniqueHashSet(HashSet other) + { + foreach (T item in other) + { + if (!Remove(item)) + { + AddIfNotPresent(item, out _); + } + } + } + + /// + /// Implementation notes: + /// + /// Used for symmetric except when other isn't a HashSet. This is more tedious because + /// other may contain duplicates. HashSet technique could fail in these situations: + /// 1. Other has a duplicate that's not in this: HashSet technique would add then + /// remove it. + /// 2. Other has a duplicate that's in this: HashSet technique would remove then add it + /// back. + /// In general, its presence would be toggled each time it appears in other. + /// + /// This technique uses bit marking to indicate whether to add/remove the item. If already + /// present in collection, it will get marked for deletion. If added from other, it will + /// get marked as something not to remove. + /// + /// + /// + private unsafe void SymmetricExceptWithEnumerable(IEnumerable other) + { + int originalCount = _count; + int intArrayLength = BitHelper.ToIntArrayLength(originalCount); + + Span itemsToRemoveSpan = stackalloc int[StackAllocThreshold / 2]; + BitHelper itemsToRemove = intArrayLength <= StackAllocThreshold / 2 ? + new BitHelper(itemsToRemoveSpan.Slice(0, intArrayLength), clear: true) : + new BitHelper(new int[intArrayLength], clear: false); + + Span itemsAddedFromOtherSpan = stackalloc int[StackAllocThreshold / 2]; + BitHelper itemsAddedFromOther = intArrayLength <= StackAllocThreshold / 2 ? + new BitHelper(itemsAddedFromOtherSpan.Slice(0, intArrayLength), clear: true) : + new BitHelper(new int[intArrayLength], clear: false); + + foreach (T item in other) + { + int location; + if (AddIfNotPresent(item, out location)) + { + // wasn't already present in collection; flag it as something not to remove + // *NOTE* if location is out of range, we should ignore. BitHelper will + // detect that it's out of bounds and not try to mark it. But it's + // expected that location could be out of bounds because adding the item + // will increase _lastIndex as soon as all the free spots are filled. + itemsAddedFromOther.MarkBit(location); + } + else + { + // already there...if not added from other, mark for remove. + // *NOTE* Even though BitHelper will check that location is in range, we want + // to check here. There's no point in checking items beyond originalCount + // because they could not have been in the original collection + if (location < originalCount && !itemsAddedFromOther.IsMarked(location)) + { + itemsToRemove.MarkBit(location); + } + } + } + + // if anything marked, remove it + for (int i = 0; i < originalCount; i++) + { + if (itemsToRemove.IsMarked(i)) + { + Remove(_entries![i].Value); + } + } + } + + /// + /// Determines counts that can be used to determine equality, subset, and superset. This + /// is only used when other is an IEnumerable and not a HashSet. If other is a HashSet + /// these properties can be checked faster without use of marking because we can assume + /// other has no duplicates. + /// + /// The following count checks are performed by callers: + /// 1. Equals: checks if unfoundCount = 0 and uniqueFoundCount = _count; i.e. everything + /// in other is in this and everything in this is in other + /// 2. Subset: checks if unfoundCount >= 0 and uniqueFoundCount = _count; i.e. other may + /// have elements not in this and everything in this is in other + /// 3. Proper subset: checks if unfoundCount > 0 and uniqueFoundCount = _count; i.e + /// other must have at least one element not in this and everything in this is in other + /// 4. Proper superset: checks if unfound count = 0 and uniqueFoundCount strictly less + /// than _count; i.e. everything in other was in this and this had at least one element + /// not contained in other. + /// + /// An earlier implementation used delegates to perform these checks rather than returning + /// an ElementCount struct; however this was changed due to the perf overhead of delegates. + /// + /// + /// Allows us to finish faster for equals and proper superset + /// because unfoundCount must be 0. + private unsafe (int UniqueCount, int UnfoundCount) CheckUniqueAndUnfoundElements(IEnumerable other, bool returnIfUnfound) + { + // Need special case in case this has no elements. + if (_count == 0) + { + int numElementsInOther = 0; + foreach (T item in other) + { + numElementsInOther++; + break; // break right away, all we want to know is whether other has 0 or 1 elements + } + + return (UniqueCount: 0, UnfoundCount: numElementsInOther); + } + + Debug.Assert((_buckets != null) && (_count > 0), "_buckets was null but count greater than 0"); + + int originalCount = _count; + int intArrayLength = BitHelper.ToIntArrayLength(originalCount); + + Span span = stackalloc int[StackAllocThreshold]; + BitHelper bitHelper = intArrayLength <= StackAllocThreshold ? + new BitHelper(span.Slice(0, intArrayLength), clear: true) : + new BitHelper(new int[intArrayLength], clear: false); + + int unfoundCount = 0; // count of items in other not found in this + int uniqueFoundCount = 0; // count of unique items in other found in this + + foreach (T item in other) + { + int index = FindItemIndex(item); + if (index >= 0) + { + if (!bitHelper.IsMarked(index)) + { + // Item hasn't been seen yet. + bitHelper.MarkBit(index); + uniqueFoundCount++; + } + } + else + { + unfoundCount++; + if (returnIfUnfound) + { + break; + } + } + } + + return (uniqueFoundCount, unfoundCount); + } + + /// + /// Checks if equality comparers are equal. This is used for algorithms that can + /// speed up if it knows the other item has unique elements. I.e. if they're using + /// different equality comparers, then uniqueness assumption between sets break. + /// + internal static bool EqualityComparersAreEqual(HashSet set1, HashSet set2) => set1.Comparer.Equals(set2.Comparer); + +#endregion + + private struct Entry + { + public int HashCode; + /// + /// 0-based index of next entry in chain: -1 means end of chain + /// also encodes whether this entry _itself_ is part of the free list by changing sign and subtracting 3, + /// so -2 means end of free list, -3 means index 0 but on free list, -4 means index 1 but on free list, etc. + /// + public int Next; + public T Value; + } + + public struct Enumerator : IEnumerator + { + private readonly HashSet _hashSet; + private readonly int _version; + private int _index; + private T _current; + + internal Enumerator(HashSet hashSet) + { + _hashSet = hashSet; + _version = hashSet._version; + _index = 0; + _current = default!; + } + + public bool MoveNext() + { + if (_version != _hashSet._version) + { + ThrowHelper.ThrowInvalidOperationException_InvalidOperation_EnumFailedVersion(); + } + + // Use unsigned comparison since we set index to dictionary.count+1 when the enumeration ends. + // dictionary.count+1 could be negative if dictionary.count is int.MaxValue + while ((uint)_index < (uint)_hashSet._count) + { + ref Entry entry = ref _hashSet._entries![_index++]; + if (entry.Next >= -1) + { + _current = entry.Value; + return true; + } + } + + _index = _hashSet._count + 1; + _current = default!; + return false; + } + + public T Current => _current; + + public void Dispose() { } + + object? IEnumerator.Current + { + get + { + if (_index == 0 || (_index == _hashSet._count + 1)) + { + ThrowHelper.ThrowInvalidOperationException_InvalidOperation_EnumOpCantHappen(); + } + + return _current; + } + } + + void IEnumerator.Reset() + { + if (_version != _hashSet._version) + { + ThrowHelper.ThrowInvalidOperationException_InvalidOperation_EnumFailedVersion(); + } + + _index = 0; + _current = default!; + } + } + } +} From fa322ffb80a13841cfb49d53daf5e74927edc242 Mon Sep 17 00:00:00 2001 From: Sam Harwell Date: Fri, 2 Jul 2021 09:20:29 -0700 Subject: [PATCH 003/413] Add origin notes --- .../Collections/HashSet/ISet_Generic_Tests`1.cs | 6 ++++++ .../Collections/HashSet/SegmentedHashSet_Generic_Tests.cs | 6 ++++++ .../Collections/HashSet/SegmentedHashSet_Generic_Tests`1.cs | 6 ++++++ .../SegmentedHashSet_IEnumerable_NonGeneric_Tests.cs | 6 ++++++ .../CodeAnalysisTest/Collections/HashSet/TestingTypes.cs | 6 ++++++ src/Dependencies/Collections/Internal/BitHelper.cs | 6 ++++++ .../Internal/SegmentedHashSetEqualityComparer`1.cs | 6 ++++++ src/Dependencies/Collections/SegmentedHashSet`1.cs | 6 ++++++ 8 files changed, 48 insertions(+) diff --git a/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/ISet_Generic_Tests`1.cs b/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/ISet_Generic_Tests`1.cs index 438b72e4ff746..f1e9dbcae03a7 100644 --- a/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/ISet_Generic_Tests`1.cs +++ b/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/ISet_Generic_Tests`1.cs @@ -1,6 +1,12 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. +// NOTE: This code is derived from an implementation originally in dotnet/runtime: +// https://github.com/dotnet/runtime/blob/v5.0.7/src/libraries/Common/tests/System/Collections/ISet.Generic.Tests.cs +// +// See the commentary in https://github.com/dotnet/roslyn/pull/50156 for notes on incorporating changes made to the +// reference implementation. + using System.Collections.Generic; using System.Diagnostics; using System.Linq; diff --git a/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/SegmentedHashSet_Generic_Tests.cs b/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/SegmentedHashSet_Generic_Tests.cs index 499732281ea97..253941240153e 100644 --- a/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/SegmentedHashSet_Generic_Tests.cs +++ b/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/SegmentedHashSet_Generic_Tests.cs @@ -1,6 +1,12 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. +// NOTE: This code is derived from an implementation originally in dotnet/runtime: +// https://github.com/dotnet/runtime/blob/v5.0.7/src/libraries/System.Collections/tests/Generic/HashSet/HashSet.Generic.cs +// +// See the commentary in https://github.com/dotnet/roslyn/pull/50156 for notes on incorporating changes made to the +// reference implementation. + using System.Collections.Generic; using Xunit; diff --git a/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/SegmentedHashSet_Generic_Tests`1.cs b/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/SegmentedHashSet_Generic_Tests`1.cs index c82ca3938463a..0c98e521ed4f3 100644 --- a/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/SegmentedHashSet_Generic_Tests`1.cs +++ b/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/SegmentedHashSet_Generic_Tests`1.cs @@ -1,6 +1,12 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. +// NOTE: This code is derived from an implementation originally in dotnet/runtime: +// https://github.com/dotnet/runtime/blob/v5.0.7/src/libraries/System.Collections/tests/Generic/HashSet/HashSet.Generic.Tests.cs +// +// See the commentary in https://github.com/dotnet/roslyn/pull/50156 for notes on incorporating changes made to the +// reference implementation. + using System.Collections.Generic; using System.IO; using System.Linq; diff --git a/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/SegmentedHashSet_IEnumerable_NonGeneric_Tests.cs b/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/SegmentedHashSet_IEnumerable_NonGeneric_Tests.cs index 48ffc2e4ba1d6..54dbae1fbd74d 100644 --- a/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/SegmentedHashSet_IEnumerable_NonGeneric_Tests.cs +++ b/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/SegmentedHashSet_IEnumerable_NonGeneric_Tests.cs @@ -1,6 +1,12 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. +// NOTE: This code is derived from an implementation originally in dotnet/runtime: +// https://github.com/dotnet/runtime/blob/v5.0.7/src/libraries/System.Collections/tests/Generic/HashSet/HashSet.Generic.Tests.AsNonGenericIEnumerable.cs +// +// See the commentary in https://github.com/dotnet/roslyn/pull/50156 for notes on incorporating changes made to the +// reference implementation. + using System.Collections.Generic; namespace System.Collections.Tests diff --git a/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/TestingTypes.cs b/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/TestingTypes.cs index 4df123571586b..8ce466ee32b35 100644 --- a/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/TestingTypes.cs +++ b/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/TestingTypes.cs @@ -1,6 +1,12 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. +// NOTE: This code is derived from an implementation originally in dotnet/runtime: +// https://github.com/dotnet/runtime/blob/v5.0.7/src/libraries/Common/tests/System/Collections/TestingTypes.cs +// +// See the commentary in https://github.com/dotnet/roslyn/pull/50156 for notes on incorporating changes made to the +// reference implementation. + using System.Collections.Generic; namespace System.Collections.Tests diff --git a/src/Dependencies/Collections/Internal/BitHelper.cs b/src/Dependencies/Collections/Internal/BitHelper.cs index b2c87e0d4a458..18bb071e4c25b 100644 --- a/src/Dependencies/Collections/Internal/BitHelper.cs +++ b/src/Dependencies/Collections/Internal/BitHelper.cs @@ -1,6 +1,12 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. +// NOTE: This code is derived from an implementation originally in dotnet/runtime: +// https://github.com/dotnet/runtime/blob/v5.0.7/src/libraries/Common/src/System/Collections/Generic/BitHelper.cs +// +// See the commentary in https://github.com/dotnet/roslyn/pull/50156 for notes on incorporating changes made to the +// reference implementation. + namespace System.Collections.Generic { internal ref struct BitHelper diff --git a/src/Dependencies/Collections/Internal/SegmentedHashSetEqualityComparer`1.cs b/src/Dependencies/Collections/Internal/SegmentedHashSetEqualityComparer`1.cs index a5e8594c3c95d..c0a47b7f89c8e 100644 --- a/src/Dependencies/Collections/Internal/SegmentedHashSetEqualityComparer`1.cs +++ b/src/Dependencies/Collections/Internal/SegmentedHashSetEqualityComparer`1.cs @@ -1,6 +1,12 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. +// NOTE: This code is derived from an implementation originally in dotnet/runtime: +// https://github.com/dotnet/runtime/blob/v5.0.7/src/libraries/System.Private.CoreLib/src/System/Collections/Generic/HashSetEqualityComparer.cs +// +// See the commentary in https://github.com/dotnet/roslyn/pull/50156 for notes on incorporating changes made to the +// reference implementation. + namespace System.Collections.Generic { /// Equality comparer for hashsets of hashsets diff --git a/src/Dependencies/Collections/SegmentedHashSet`1.cs b/src/Dependencies/Collections/SegmentedHashSet`1.cs index f9bc52a4d4740..3ae667e5c9e26 100644 --- a/src/Dependencies/Collections/SegmentedHashSet`1.cs +++ b/src/Dependencies/Collections/SegmentedHashSet`1.cs @@ -1,6 +1,12 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. +// NOTE: This code is derived from an implementation originally in dotnet/runtime: +// https://github.com/dotnet/runtime/blob/v5.0.7/src/libraries/System.Private.CoreLib/src/System/Collections/Generic/HashSet.cs +// +// See the commentary in https://github.com/dotnet/roslyn/pull/50156 for notes on incorporating changes made to the +// reference implementation. + using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Runtime.CompilerServices; From 17617c75f710fa7cf72d5d483e7513d01add7d1e Mon Sep 17 00:00:00 2001 From: Sam Harwell Date: Fri, 2 Jul 2021 09:34:02 -0700 Subject: [PATCH 004/413] Rename type to SegmentedHashSet --- .../Collections/Internal/BitHelper.cs | 2 +- .../SegmentedHashSetEqualityComparer`1.cs | 12 +- .../Collections/SegmentedHashSet`1.cs | 142 +++++++++--------- 3 files changed, 78 insertions(+), 78 deletions(-) diff --git a/src/Dependencies/Collections/Internal/BitHelper.cs b/src/Dependencies/Collections/Internal/BitHelper.cs index 18bb071e4c25b..b13dce1646576 100644 --- a/src/Dependencies/Collections/Internal/BitHelper.cs +++ b/src/Dependencies/Collections/Internal/BitHelper.cs @@ -7,7 +7,7 @@ // See the commentary in https://github.com/dotnet/roslyn/pull/50156 for notes on incorporating changes made to the // reference implementation. -namespace System.Collections.Generic +namespace Microsoft.CodeAnalysis.Collections.Internal { internal ref struct BitHelper { diff --git a/src/Dependencies/Collections/Internal/SegmentedHashSetEqualityComparer`1.cs b/src/Dependencies/Collections/Internal/SegmentedHashSetEqualityComparer`1.cs index c0a47b7f89c8e..449981afc611b 100644 --- a/src/Dependencies/Collections/Internal/SegmentedHashSetEqualityComparer`1.cs +++ b/src/Dependencies/Collections/Internal/SegmentedHashSetEqualityComparer`1.cs @@ -7,12 +7,12 @@ // See the commentary in https://github.com/dotnet/roslyn/pull/50156 for notes on incorporating changes made to the // reference implementation. -namespace System.Collections.Generic +namespace Microsoft.CodeAnalysis.Collections.Internal { /// Equality comparer for hashsets of hashsets - internal sealed class HashSetEqualityComparer : IEqualityComparer?> + internal sealed class SegmentedHashSetEqualityComparer : IEqualityComparer?> { - public bool Equals(HashSet? x, HashSet? y) + public bool Equals(SegmentedHashSet? x, SegmentedHashSet? y) { // If they're the exact same instance, they're equal. if (ReferenceEquals(x, y)) @@ -30,7 +30,7 @@ public bool Equals(HashSet? x, HashSet? y) // If both sets use the same comparer, they're equal if they're the same // size and one is a "subset" of the other. - if (HashSet.EqualityComparersAreEqual(x, y)) + if (SegmentedHashSet.EqualityComparersAreEqual(x, y)) { return x.Count == y.Count && y.IsSubsetOfHashSetWithSameComparer(x); } @@ -57,7 +57,7 @@ public bool Equals(HashSet? x, HashSet? y) return true; } - public int GetHashCode(HashSet? obj) + public int GetHashCode(SegmentedHashSet? obj) { int hashCode = 0; // default to 0 for null/empty set @@ -76,7 +76,7 @@ public int GetHashCode(HashSet? obj) } // Equals method for the comparer itself. - public override bool Equals(object? obj) => obj is HashSetEqualityComparer; + public override bool Equals(object? obj) => obj is SegmentedHashSetEqualityComparer; public override int GetHashCode() => EqualityComparer.Default.GetHashCode(); } diff --git a/src/Dependencies/Collections/SegmentedHashSet`1.cs b/src/Dependencies/Collections/SegmentedHashSet`1.cs index 3ae667e5c9e26..77f2db778ac74 100644 --- a/src/Dependencies/Collections/SegmentedHashSet`1.cs +++ b/src/Dependencies/Collections/SegmentedHashSet`1.cs @@ -14,13 +14,13 @@ using Internal.Runtime.CompilerServices; -namespace System.Collections.Generic +namespace Microsoft.CodeAnalysis.Collections { [DebuggerTypeProxy(typeof(ICollectionDebugView<>))] [DebuggerDisplay("Count = {Count}")] [Serializable] [TypeForwardedFrom("System.Core, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089")] - public class HashSet : ICollection, ISet, IReadOnlyCollection, IReadOnlySet, ISerializable, IDeserializationCallback + internal class SegmentedHashSet : ICollection, ISet, IReadOnlyCollection, IReadOnlySet, ISerializable, IDeserializationCallback { // This uses the same array-based implementation as Dictionary. @@ -57,9 +57,9 @@ public class HashSet : ICollection, ISet, IReadOnlyCollection, IRead #region Constructors - public HashSet() : this((IEqualityComparer?)null) { } + public SegmentedHashSet() : this((IEqualityComparer?)null) { } - public HashSet(IEqualityComparer? comparer) + public SegmentedHashSet(IEqualityComparer? comparer) { if (comparer != null && comparer != EqualityComparer.Default) // first check for null to avoid forcing default comparer instantiation unnecessarily { @@ -87,25 +87,25 @@ public HashSet(IEqualityComparer? comparer) } } - public HashSet(int capacity) : this(capacity, null) { } + public SegmentedHashSet(int capacity) : this(capacity, null) { } - public HashSet(IEnumerable collection) : this(collection, null) { } + public SegmentedHashSet(IEnumerable collection) : this(collection, null) { } - public HashSet(IEnumerable collection, IEqualityComparer? comparer) : this(comparer) + public SegmentedHashSet(IEnumerable collection, IEqualityComparer? comparer) : this(comparer) { if (collection == null) { ThrowHelper.ThrowArgumentNullException(ExceptionArgument.collection); } - if (collection is HashSet otherAsHashSet && EqualityComparersAreEqual(this, otherAsHashSet)) + if (collection is SegmentedHashSet otherAsHashSet && EqualityComparersAreEqual(this, otherAsHashSet)) { ConstructFrom(otherAsHashSet); } else { // To avoid excess resizes, first set size based on collection's count. The collection may - // contain duplicates, so call TrimExcess if resulting HashSet is larger than the threshold. + // contain duplicates, so call TrimExcess if resulting SegmentedHashSet is larger than the threshold. if (collection is ICollection coll) { int count = coll.Count; @@ -124,7 +124,7 @@ public HashSet(IEnumerable collection, IEqualityComparer? comparer) : this } } - public HashSet(int capacity, IEqualityComparer? comparer) : this(comparer) + public SegmentedHashSet(int capacity, IEqualityComparer? comparer) : this(comparer) { if (capacity < 0) { @@ -137,7 +137,7 @@ public HashSet(int capacity, IEqualityComparer? comparer) : this(comparer) } } - protected HashSet(SerializationInfo info, StreamingContext context) + protected SegmentedHashSet(SerializationInfo info, StreamingContext context) { // We can't do anything with the keys and values until the entire graph has been // deserialized and we have a reasonable estimate that GetHashCode is not going to @@ -146,8 +146,8 @@ protected HashSet(SerializationInfo info, StreamingContext context) _siInfo = info; } - /// Initializes the HashSet from another HashSet with the same element type and equality comparer. - private void ConstructFrom(HashSet source) + /// Initializes the SegmentedHashSet from another SegmentedHashSet with the same element type and equality comparer. + private void ConstructFrom(SegmentedHashSet source) { if (source.Count == 0) { @@ -195,7 +195,7 @@ private void ConstructFrom(HashSet source) void ICollection.Add(T item) => AddIfNotPresent(item, out _); - /// Removes all elements from the object. + /// Removes all elements from the object. public void Clear() { int count = _count; @@ -212,9 +212,9 @@ public void Clear() } } - /// Determines whether the contains the specified element. - /// The element to locate in the object. - /// true if the object contains the specified element; otherwise, false. + /// Determines whether the contains the specified element. + /// The element to locate in the object. + /// true if the object contains the specified element; otherwise, false. public bool Contains(T item) => FindItemIndex(item) >= 0; /// Gets the index of the item in , or -1 if it's not in the set. @@ -461,11 +461,11 @@ public virtual void OnDeserialization(object? sender) #endregion - #region HashSet methods + #region SegmentedHashSet methods - /// Adds the specified element to the . + /// Adds the specified element to the . /// The element to add to the set. - /// true if the element is added to the object; false if the element is already present. + /// true if the element is added to the object; false if the element is already present. public bool Add(T item) => AddIfNotPresent(item, out _); /// Searches the set for a given value and returns the equal value it finds, if any. @@ -494,8 +494,8 @@ public bool TryGetValue(T equalValue, [MaybeNullWhen(false)] out T actualValue) return false; } - /// Modifies the current object to contain all elements that are present in itself, the specified collection, or both. - /// The collection to compare to the current object. + /// Modifies the current object to contain all elements that are present in itself, the specified collection, or both. + /// The collection to compare to the current object. public void UnionWith(IEnumerable other) { if (other == null) @@ -509,8 +509,8 @@ public void UnionWith(IEnumerable other) } } - /// Modifies the current object to contain only elements that are present in that object and in the specified collection. - /// The collection to compare to the current object. + /// Modifies the current object to contain only elements that are present in that object and in the specified collection. + /// The collection to compare to the current object. public void IntersectWith(IEnumerable other) { if (other == null) @@ -536,7 +536,7 @@ public void IntersectWith(IEnumerable other) // Faster if other is a hashset using same equality comparer; so check // that other is a hashset using the same equality comparer. - if (other is HashSet otherAsSet && EqualityComparersAreEqual(this, otherAsSet)) + if (other is SegmentedHashSet otherAsSet && EqualityComparersAreEqual(this, otherAsSet)) { IntersectWithHashSetWithSameComparer(otherAsSet); return; @@ -546,8 +546,8 @@ public void IntersectWith(IEnumerable other) IntersectWithEnumerable(other); } - /// Removes all elements in the specified collection from the current object. - /// The collection to compare to the current object. + /// Removes all elements in the specified collection from the current object. + /// The collection to compare to the current object. public void ExceptWith(IEnumerable other) { if (other == null) @@ -575,8 +575,8 @@ public void ExceptWith(IEnumerable other) } } - /// Modifies the current object to contain only elements that are present either in that object or in the specified collection, but not both. - /// The collection to compare to the current object. + /// Modifies the current object to contain only elements that are present either in that object or in the specified collection, but not both. + /// The collection to compare to the current object. public void SymmetricExceptWith(IEnumerable other) { if (other == null) @@ -598,12 +598,12 @@ public void SymmetricExceptWith(IEnumerable other) return; } - // If other is a HashSet, it has unique elements according to its equality comparer, + // If other is a SegmentedHashSet, it has unique elements according to its equality comparer, // but if they're using different equality comparers, then assumption of uniqueness // will fail. So first check if other is a hashset using the same equality comparer; // symmetric except is a lot faster and avoids bit array allocations if we can assume // uniqueness. - if (other is HashSet otherAsSet && EqualityComparersAreEqual(this, otherAsSet)) + if (other is SegmentedHashSet otherAsSet && EqualityComparersAreEqual(this, otherAsSet)) { SymmetricExceptWithUniqueHashSet(otherAsSet); } @@ -613,9 +613,9 @@ public void SymmetricExceptWith(IEnumerable other) } } - /// Determines whether a object is a subset of the specified collection. - /// The collection to compare to the current object. - /// true if the object is a subset of ; otherwise, false. + /// Determines whether a object is a subset of the specified collection. + /// The collection to compare to the current object. + /// true if the object is a subset of ; otherwise, false. public bool IsSubsetOf(IEnumerable other) { if (other == null) @@ -632,7 +632,7 @@ public bool IsSubsetOf(IEnumerable other) // Faster if other has unique elements according to this equality comparer; so check // that other is a hashset using the same equality comparer. - if (other is HashSet otherAsSet && EqualityComparersAreEqual(this, otherAsSet)) + if (other is SegmentedHashSet otherAsSet && EqualityComparersAreEqual(this, otherAsSet)) { // if this has more elements then it can't be a subset if (Count > otherAsSet.Count) @@ -649,9 +649,9 @@ public bool IsSubsetOf(IEnumerable other) return uniqueCount == Count && unfoundCount >= 0; } - /// Determines whether a object is a proper subset of the specified collection. - /// The collection to compare to the current object. - /// true if the object is a proper subset of ; otherwise, false. + /// Determines whether a object is a proper subset of the specified collection. + /// The collection to compare to the current object. + /// true if the object is a proper subset of ; otherwise, false. public bool IsProperSubsetOf(IEnumerable other) { if (other == null) @@ -680,7 +680,7 @@ public bool IsProperSubsetOf(IEnumerable other) } // Faster if other is a hashset (and we're using same equality comparer). - if (other is HashSet otherAsSet && EqualityComparersAreEqual(this, otherAsSet)) + if (other is SegmentedHashSet otherAsSet && EqualityComparersAreEqual(this, otherAsSet)) { if (Count >= otherAsSet.Count) { @@ -697,9 +697,9 @@ public bool IsProperSubsetOf(IEnumerable other) return uniqueCount == Count && unfoundCount > 0; } - /// Determines whether a object is a proper superset of the specified collection. - /// The collection to compare to the current object. - /// true if the object is a superset of ; otherwise, false. + /// Determines whether a object is a proper superset of the specified collection. + /// The collection to compare to the current object. + /// true if the object is a superset of ; otherwise, false. public bool IsSupersetOf(IEnumerable other) { if (other == null) @@ -723,7 +723,7 @@ public bool IsSupersetOf(IEnumerable other) } // Try to compare based on counts alone if other is a hashset with same equality comparer. - if (other is HashSet otherAsSet && + if (other is SegmentedHashSet otherAsSet && EqualityComparersAreEqual(this, otherAsSet) && otherAsSet.Count > Count) { @@ -734,9 +734,9 @@ public bool IsSupersetOf(IEnumerable other) return ContainsAllElements(other); } - /// Determines whether a object is a proper superset of the specified collection. - /// The collection to compare to the current object. - /// true if the object is a proper superset of ; otherwise, false. + /// Determines whether a object is a proper superset of the specified collection. + /// The collection to compare to the current object. + /// true if the object is a proper superset of ; otherwise, false. public bool IsProperSupersetOf(IEnumerable other) { if (other == null) @@ -760,7 +760,7 @@ public bool IsProperSupersetOf(IEnumerable other) } // Faster if other is a hashset with the same equality comparer - if (other is HashSet otherAsSet && EqualityComparersAreEqual(this, otherAsSet)) + if (other is SegmentedHashSet otherAsSet && EqualityComparersAreEqual(this, otherAsSet)) { if (otherAsSet.Count >= Count) { @@ -777,9 +777,9 @@ public bool IsProperSupersetOf(IEnumerable other) return uniqueCount < Count && unfoundCount == 0; } - /// Determines whether the current object and a specified collection share common elements. - /// The collection to compare to the current object. - /// true if the object and share at least one common element; otherwise, false. + /// Determines whether the current object and a specified collection share common elements. + /// The collection to compare to the current object. + /// true if the object and share at least one common element; otherwise, false. public bool Overlaps(IEnumerable other) { if (other == null) @@ -809,9 +809,9 @@ public bool Overlaps(IEnumerable other) return false; } - /// Determines whether a object and the specified collection contain the same elements. - /// The collection to compare to the current object. - /// true if the object is equal to ; otherwise, false. + /// Determines whether a object and the specified collection contain the same elements. + /// The collection to compare to the current object. + /// true if the object is equal to ; otherwise, false. public bool SetEquals(IEnumerable other) { if (other == null) @@ -826,7 +826,7 @@ public bool SetEquals(IEnumerable other) } // Faster if other is a hashset and we're using same equality comparer. - if (other is HashSet otherAsSet && EqualityComparersAreEqual(this, otherAsSet)) + if (other is SegmentedHashSet otherAsSet && EqualityComparersAreEqual(this, otherAsSet)) { // Attempt to return early: since both contain unique elements, if they have // different counts, then they can't be equal. @@ -856,7 +856,7 @@ other is ICollection otherAsCollection && public void CopyTo(T[] array) => CopyTo(array, 0, Count); - /// Copies the elements of a object to an array, starting at the specified array index. + /// Copies the elements of a object to an array, starting at the specified array index. /// The destination array. /// The zero-based index in array at which copying begins. public void CopyTo(T[] array, int arrayIndex) => CopyTo(array, arrayIndex, Count); @@ -900,7 +900,7 @@ public void CopyTo(T[] array, int arrayIndex, int count) } } - /// Removes all elements that match the conditions defined by the specified predicate from a collection. + /// Removes all elements that match the conditions defined by the specified predicate from a collection. public int RemoveWhere(Predicate match) { if (match == null) @@ -1025,7 +1025,7 @@ private void Resize(int newSize, bool forceNewHashCodes) } /// - /// Sets the capacity of a object to the actual number of elements it contains, + /// Sets the capacity of a object to the actual number of elements it contains, /// rounded up to a nearby, implementation-specific value. /// public void TrimExcess() @@ -1067,8 +1067,8 @@ public void TrimExcess() #region Helper methods - /// Returns an object that can be used for equality testing of a object. - public static IEqualityComparer> CreateSetComparer() => new HashSetEqualityComparer(); + /// Returns an object that can be used for equality testing of a object. + public static IEqualityComparer> CreateSetComparer() => new SegmentedHashSetEqualityComparer(); /// /// Initializes buckets and slots arrays. Uses suggested capacity by finding next prime @@ -1094,7 +1094,7 @@ private int Initialize(int capacity) /// Adds the specified element to the set if it's not already contained. /// The element to add to the set. /// The index into of the element. - /// true if the element is added to the object; false if the element is already present. + /// true if the element is added to the object; false if the element is already present. private bool AddIfNotPresent(T value, out int location) { if (_buckets == null) @@ -1258,7 +1258,7 @@ private bool ContainsAllElements(IEnumerable other) /// /// If callers are concerned about whether this is a proper subset, they take care of that. /// - internal bool IsSubsetOfHashSetWithSameComparer(HashSet other) + internal bool IsSubsetOfHashSetWithSameComparer(SegmentedHashSet other) { foreach (T item in this) { @@ -1275,7 +1275,7 @@ internal bool IsSubsetOfHashSetWithSameComparer(HashSet other) /// If other is a hashset that uses same equality comparer, intersect is much faster /// because we can use other's Contains /// - private void IntersectWithHashSetWithSameComparer(HashSet other) + private void IntersectWithHashSetWithSameComparer(SegmentedHashSet other) { Entry[]? entries = _entries; for (int i = 0; i < _count; i++) @@ -1342,7 +1342,7 @@ private unsafe void IntersectWithEnumerable(IEnumerable other) /// same equality comparer. /// /// - private void SymmetricExceptWithUniqueHashSet(HashSet other) + private void SymmetricExceptWithUniqueHashSet(SegmentedHashSet other) { foreach (T item in other) { @@ -1356,11 +1356,11 @@ private void SymmetricExceptWithUniqueHashSet(HashSet other) /// /// Implementation notes: /// - /// Used for symmetric except when other isn't a HashSet. This is more tedious because - /// other may contain duplicates. HashSet technique could fail in these situations: - /// 1. Other has a duplicate that's not in this: HashSet technique would add then + /// Used for symmetric except when other isn't a SegmentedHashSet. This is more tedious because + /// other may contain duplicates. SegmentedHashSet technique could fail in these situations: + /// 1. Other has a duplicate that's not in this: SegmentedHashSet technique would add then /// remove it. - /// 2. Other has a duplicate that's in this: HashSet technique would remove then add it + /// 2. Other has a duplicate that's in this: SegmentedHashSet technique would remove then add it /// back. /// In general, its presence would be toggled each time it appears in other. /// @@ -1422,7 +1422,7 @@ private unsafe void SymmetricExceptWithEnumerable(IEnumerable other) /// /// Determines counts that can be used to determine equality, subset, and superset. This - /// is only used when other is an IEnumerable and not a HashSet. If other is a HashSet + /// is only used when other is an IEnumerable and not a SegmentedHashSet. If other is a SegmentedHashSet /// these properties can be checked faster without use of marking because we can assume /// other has no duplicates. /// @@ -1501,7 +1501,7 @@ private unsafe (int UniqueCount, int UnfoundCount) CheckUniqueAndUnfoundElements /// speed up if it knows the other item has unique elements. I.e. if they're using /// different equality comparers, then uniqueness assumption between sets break. /// - internal static bool EqualityComparersAreEqual(HashSet set1, HashSet set2) => set1.Comparer.Equals(set2.Comparer); + internal static bool EqualityComparersAreEqual(SegmentedHashSet set1, SegmentedHashSet set2) => set1.Comparer.Equals(set2.Comparer); #endregion @@ -1519,12 +1519,12 @@ private struct Entry public struct Enumerator : IEnumerator { - private readonly HashSet _hashSet; + private readonly SegmentedHashSet _hashSet; private readonly int _version; private int _index; private T _current; - internal Enumerator(HashSet hashSet) + internal Enumerator(SegmentedHashSet hashSet) { _hashSet = hashSet; _version = hashSet._version; From 3fd3edf2ab78fbb943644956b5fb91c6979bb8eb Mon Sep 17 00:00:00 2001 From: Sam Harwell Date: Fri, 2 Jul 2021 09:46:50 -0700 Subject: [PATCH 005/413] Remove serialization support --- .../Collections/SegmentedHashSet`1.cs | 91 +------------------ 1 file changed, 1 insertion(+), 90 deletions(-) diff --git a/src/Dependencies/Collections/SegmentedHashSet`1.cs b/src/Dependencies/Collections/SegmentedHashSet`1.cs index 77f2db778ac74..59ba5890c15fe 100644 --- a/src/Dependencies/Collections/SegmentedHashSet`1.cs +++ b/src/Dependencies/Collections/SegmentedHashSet`1.cs @@ -10,7 +10,6 @@ using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Runtime.CompilerServices; -using System.Runtime.Serialization; using Internal.Runtime.CompilerServices; @@ -18,18 +17,11 @@ namespace Microsoft.CodeAnalysis.Collections { [DebuggerTypeProxy(typeof(ICollectionDebugView<>))] [DebuggerDisplay("Count = {Count}")] - [Serializable] [TypeForwardedFrom("System.Core, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089")] - internal class SegmentedHashSet : ICollection, ISet, IReadOnlyCollection, IReadOnlySet, ISerializable, IDeserializationCallback + internal class SegmentedHashSet : ICollection, ISet, IReadOnlyCollection, IReadOnlySet { // This uses the same array-based implementation as Dictionary. - // Constants for serialization - private const string CapacityName = "Capacity"; // Do not rename (binary serialization) - private const string ElementsName = "Elements"; // Do not rename (binary serialization) - private const string ComparerName = "Comparer"; // Do not rename (binary serialization) - private const string VersionName = "Version"; // Do not rename (binary serialization) - /// Cutoff point for stackallocs. This corresponds to the number of ints. private const int StackAllocThreshold = 100; @@ -53,7 +45,6 @@ internal class SegmentedHashSet : ICollection, ISet, IReadOnlyCollectio private int _freeCount; private int _version; private IEqualityComparer? _comparer; - private SerializationInfo? _siInfo; // temporary variable needed during deserialization #region Constructors @@ -137,15 +128,6 @@ public SegmentedHashSet(int capacity, IEqualityComparer? comparer) : this(com } } - protected SegmentedHashSet(SerializationInfo info, StreamingContext context) - { - // We can't do anything with the keys and values until the entire graph has been - // deserialized and we have a reasonable estimate that GetHashCode is not going to - // fail. For the time being, we'll just cache this. The graph is not valid until - // OnDeserialization has been called. - _siInfo = info; - } - /// Initializes the SegmentedHashSet from another SegmentedHashSet with the same element type and equality comparer. private void ConstructFrom(SegmentedHashSet source) { @@ -390,77 +372,6 @@ public bool Remove(T item) #endregion - #region ISerializable methods - - public virtual void GetObjectData(SerializationInfo info, StreamingContext context) - { - if (info == null) - { - ThrowHelper.ThrowArgumentNullException(ExceptionArgument.info); - } - - info.AddValue(VersionName, _version); // need to serialize version to avoid problems with serializing while enumerating - info.AddValue(ComparerName, Comparer, typeof(IEqualityComparer)); - info.AddValue(CapacityName, _buckets == null ? 0 : _buckets.Length); - - if (_buckets != null) - { - var array = new T[Count]; - CopyTo(array); - info.AddValue(ElementsName, array, typeof(T[])); - } - } - - #endregion - - #region IDeserializationCallback methods - - public virtual void OnDeserialization(object? sender) - { - if (_siInfo == null) - { - // It might be necessary to call OnDeserialization from a container if the - // container object also implements OnDeserialization. We can return immediately - // if this function is called twice. Note we set _siInfo to null at the end of this method. - return; - } - - int capacity = _siInfo.GetInt32(CapacityName); - _comparer = (IEqualityComparer)_siInfo.GetValue(ComparerName, typeof(IEqualityComparer))!; - _freeList = -1; - _freeCount = 0; - - if (capacity != 0) - { - _buckets = new int[capacity]; - _entries = new Entry[capacity]; -#if TARGET_64BIT - _fastModMultiplier = HashHelpers.GetFastModMultiplier((uint)capacity); -#endif - - T[]? array = (T[]?)_siInfo.GetValue(ElementsName, typeof(T[])); - if (array == null) - { - ThrowHelper.ThrowSerializationException(ExceptionResource.Serialization_MissingKeys); - } - - // There are no resizes here because we already set capacity above. - for (int i = 0; i < array.Length; i++) - { - AddIfNotPresent(array[i], out _); - } - } - else - { - _buckets = null; - } - - _version = _siInfo.GetInt32(VersionName); - _siInfo = null; - } - - #endregion - #region SegmentedHashSet methods /// Adds the specified element to the . From 16870015979d69f84fb8c0c9fe3eaa1803b82736 Mon Sep 17 00:00:00 2001 From: Sam Harwell Date: Fri, 2 Jul 2021 09:48:27 -0700 Subject: [PATCH 006/413] Remove requirement that the runtime support comparer devirtualization and non-randomized string comparers --- .../Collections/SegmentedHashSet`1.cs | 91 ++++++------------- 1 file changed, 26 insertions(+), 65 deletions(-) diff --git a/src/Dependencies/Collections/SegmentedHashSet`1.cs b/src/Dependencies/Collections/SegmentedHashSet`1.cs index 59ba5890c15fe..100c640d2c440 100644 --- a/src/Dependencies/Collections/SegmentedHashSet`1.cs +++ b/src/Dependencies/Collections/SegmentedHashSet`1.cs @@ -20,6 +20,13 @@ namespace Microsoft.CodeAnalysis.Collections [TypeForwardedFrom("System.Core, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089")] internal class SegmentedHashSet : ICollection, ISet, IReadOnlyCollection, IReadOnlySet { + private const bool SupportsComparerDevirtualization +#if NETCOREAPP + = true; +#else + = false; +#endif + // This uses the same array-based implementation as Dictionary. /// Cutoff point for stackallocs. This corresponds to the number of ints. @@ -44,7 +51,15 @@ internal class SegmentedHashSet : ICollection, ISet, IReadOnlyCollectio private int _freeList; private int _freeCount; private int _version; - private IEqualityComparer? _comparer; +#if NETCOREAPP + private readonly IEqualityComparer? _comparer; +#else + /// + /// doesn't devirtualize on .NET Framework, so we always ensure + /// is initialized to a non- value. + /// + private readonly IEqualityComparer _comparer; +#endif #region Constructors @@ -57,25 +72,10 @@ public SegmentedHashSet(IEqualityComparer? comparer) _comparer = comparer; } - // Special-case EqualityComparer.Default, StringComparer.Ordinal, and StringComparer.OrdinalIgnoreCase. - // We use a non-randomized comparer for improved perf, falling back to a randomized comparer if the - // hash buckets become unbalanced. - - if (typeof(T) == typeof(string)) - { - if (_comparer is null) - { - _comparer = (IEqualityComparer)NonRandomizedStringEqualityComparer.WrappedAroundDefaultComparer; - } - else if (ReferenceEquals(_comparer, StringComparer.Ordinal)) - { - _comparer = (IEqualityComparer)NonRandomizedStringEqualityComparer.WrappedAroundStringComparerOrdinal; - } - else if (ReferenceEquals(_comparer, StringComparer.OrdinalIgnoreCase)) - { - _comparer = (IEqualityComparer)NonRandomizedStringEqualityComparer.WrappedAroundStringComparerOrdinalIgnoreCase; - } - } +#if !NETCOREAPP + // .NET Framework doesn't support devirtualization, so we always initialize comparer to a non-null value + _comparer ??= EqualityComparer.Default; +#endif } public SegmentedHashSet(int capacity) : this(capacity, null) { } @@ -211,7 +211,7 @@ private int FindItemIndex(T item) uint collisionCount = 0; IEqualityComparer? comparer = _comparer; - if (comparer == null) + if (SupportsComparerDevirtualization && comparer == null) { int hashCode = item != null ? item.GetHashCode() : 0; if (typeof(T).IsValueType) @@ -847,14 +847,7 @@ public IEqualityComparer Comparer { get { - if (typeof(T) == typeof(string)) - { - return (IEqualityComparer)IInternalStringEqualityComparer.GetUnderlyingEqualityComparer((IEqualityComparer?)_comparer); - } - else - { - return _comparer ?? EqualityComparer.Default; - } + return _comparer ?? EqualityComparer.Default; } } @@ -878,16 +871,14 @@ public int EnsureCapacity(int capacity) } int newSize = HashHelpers.GetPrime(capacity); - Resize(newSize, forceNewHashCodes: false); + Resize(newSize); return newSize; } - private void Resize() => Resize(HashHelpers.ExpandPrime(_count), forceNewHashCodes: false); + private void Resize() => Resize(HashHelpers.ExpandPrime(_count)); - private void Resize(int newSize, bool forceNewHashCodes) + private void Resize(int newSize) { - // Value types never rehash - Debug.Assert(!forceNewHashCodes || !typeof(T).IsValueType); Debug.Assert(_entries != null, "_entries should be non-null"); Debug.Assert(newSize >= _entries.Length); @@ -896,26 +887,6 @@ private void Resize(int newSize, bool forceNewHashCodes) int count = _count; Array.Copy(_entries, entries, count); - if (!typeof(T).IsValueType && forceNewHashCodes) - { - Debug.Assert(_comparer is NonRandomizedStringEqualityComparer); - _comparer = (IEqualityComparer)((NonRandomizedStringEqualityComparer)_comparer).GetRandomizedEqualityComparer(); - - for (int i = 0; i < count; i++) - { - ref Entry entry = ref entries[i]; - if (entry.Next >= -1) - { - entry.HashCode = entry.Value != null ? _comparer!.GetHashCode(entry.Value) : 0; - } - } - - if (ReferenceEquals(_comparer, EqualityComparer.Default)) - { - _comparer = null; - } - } - // Assign member variables after both arrays allocated to guard against corruption from OOM if second fails _buckets = new int[newSize]; #if TARGET_64BIT @@ -1023,7 +994,7 @@ private bool AddIfNotPresent(T value, out int location) uint collisionCount = 0; ref int bucket = ref Unsafe.NullRef(); - if (comparer == null) + if (SupportsComparerDevirtualization && comparer == null) { hashCode = value != null ? value.GetHashCode() : 0; bucket = ref GetBucketRef(hashCode); @@ -1128,16 +1099,6 @@ private bool AddIfNotPresent(T value, out int location) location = index; } - // Value types never rehash - if (!typeof(T).IsValueType && collisionCount > HashHelpers.HashCollisionThreshold && comparer is NonRandomizedStringEqualityComparer) - { - // If we hit the collision threshold we'll need to switch to the comparer which is using randomized string hashing - // i.e. EqualityComparer.Default. - Resize(entries.Length, forceNewHashCodes: true); - location = FindItemIndex(value); - Debug.Assert(location >= 0); - } - return true; } From e1ed6d78e7bfbc3cba7190dea05d111d64e88946 Mon Sep 17 00:00:00 2001 From: Sam Harwell Date: Fri, 2 Jul 2021 09:52:07 -0700 Subject: [PATCH 007/413] Fix code style diagnostics (except naming) --- .../Collections/Internal/BitHelper.cs | 5 +- .../SegmentedHashSetEqualityComparer`1.cs | 13 +- .../Collections/SegmentedHashSet`1.cs | 204 +++++++++--------- 3 files changed, 112 insertions(+), 110 deletions(-) diff --git a/src/Dependencies/Collections/Internal/BitHelper.cs b/src/Dependencies/Collections/Internal/BitHelper.cs index b13dce1646576..079f8b6738cff 100644 --- a/src/Dependencies/Collections/Internal/BitHelper.cs +++ b/src/Dependencies/Collections/Internal/BitHelper.cs @@ -1,5 +1,6 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. // NOTE: This code is derived from an implementation originally in dotnet/runtime: // https://github.com/dotnet/runtime/blob/v5.0.7/src/libraries/Common/src/System/Collections/Generic/BitHelper.cs @@ -25,7 +26,7 @@ internal BitHelper(Span span, bool clear) internal void MarkBit(int bitPosition) { - int bitArrayIndex = bitPosition / IntSize; + var bitArrayIndex = bitPosition / IntSize; if ((uint)bitArrayIndex < (uint)_span.Length) { _span[bitArrayIndex] |= (1 << (bitPosition % IntSize)); @@ -34,7 +35,7 @@ internal void MarkBit(int bitPosition) internal bool IsMarked(int bitPosition) { - int bitArrayIndex = bitPosition / IntSize; + var bitArrayIndex = bitPosition / IntSize; return (uint)bitArrayIndex < (uint)_span.Length && (_span[bitArrayIndex] & (1 << (bitPosition % IntSize))) != 0; diff --git a/src/Dependencies/Collections/Internal/SegmentedHashSetEqualityComparer`1.cs b/src/Dependencies/Collections/Internal/SegmentedHashSetEqualityComparer`1.cs index 449981afc611b..af2d74b8e1498 100644 --- a/src/Dependencies/Collections/Internal/SegmentedHashSetEqualityComparer`1.cs +++ b/src/Dependencies/Collections/Internal/SegmentedHashSetEqualityComparer`1.cs @@ -1,5 +1,6 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. // NOTE: This code is derived from an implementation originally in dotnet/runtime: // https://github.com/dotnet/runtime/blob/v5.0.7/src/libraries/System.Private.CoreLib/src/System/Collections/Generic/HashSetEqualityComparer.cs @@ -26,7 +27,7 @@ public bool Equals(SegmentedHashSet? x, SegmentedHashSet? y) return false; } - EqualityComparer defaultComparer = EqualityComparer.Default; + var defaultComparer = EqualityComparer.Default; // If both sets use the same comparer, they're equal if they're the same // size and one is a "subset" of the other. @@ -36,10 +37,10 @@ public bool Equals(SegmentedHashSet? x, SegmentedHashSet? y) } // Otherwise, do an O(N^2) match. - foreach (T yi in y) + foreach (var yi in y) { - bool found = false; - foreach (T xi in x) + var found = false; + foreach (var xi in x) { if (defaultComparer.Equals(yi, xi)) { @@ -59,11 +60,11 @@ public bool Equals(SegmentedHashSet? x, SegmentedHashSet? y) public int GetHashCode(SegmentedHashSet? obj) { - int hashCode = 0; // default to 0 for null/empty set + var hashCode = 0; // default to 0 for null/empty set if (obj != null) { - foreach (T t in obj) + foreach (var t in obj) { if (t != null) { diff --git a/src/Dependencies/Collections/SegmentedHashSet`1.cs b/src/Dependencies/Collections/SegmentedHashSet`1.cs index 100c640d2c440..9e525fded282d 100644 --- a/src/Dependencies/Collections/SegmentedHashSet`1.cs +++ b/src/Dependencies/Collections/SegmentedHashSet`1.cs @@ -1,5 +1,6 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. // NOTE: This code is derived from an implementation originally in dotnet/runtime: // https://github.com/dotnet/runtime/blob/v5.0.7/src/libraries/System.Private.CoreLib/src/System/Collections/Generic/HashSet.cs @@ -99,7 +100,7 @@ public SegmentedHashSet(IEnumerable collection, IEqualityComparer? compare // contain duplicates, so call TrimExcess if resulting SegmentedHashSet is larger than the threshold. if (collection is ICollection coll) { - int count = coll.Count; + var count = coll.Count; if (count > 0) { Initialize(count); @@ -139,8 +140,8 @@ private void ConstructFrom(SegmentedHashSet source) return; } - int capacity = source._buckets!.Length; - int threshold = HashHelpers.ExpandPrime(source.Count + 1); + var capacity = source._buckets!.Length; + var threshold = HashHelpers.ExpandPrime(source.Count + 1); if (threshold >= capacity) { @@ -157,10 +158,10 @@ private void ConstructFrom(SegmentedHashSet source) { Initialize(source.Count); - Entry[]? entries = source._entries; - for (int i = 0; i < source._count; i++) + var entries = source._entries; + for (var i = 0; i < source._count; i++) { - ref Entry entry = ref entries![i]; + ref var entry = ref entries![i]; if (entry.Next >= -1) { AddIfNotPresent(entry.Value, out _); @@ -180,7 +181,7 @@ private void ConstructFrom(SegmentedHashSet source) /// Removes all elements from the object. public void Clear() { - int count = _count; + var count = _count; if (count > 0) { Debug.Assert(_buckets != null, "_buckets should be non-null"); @@ -202,25 +203,25 @@ public void Clear() /// Gets the index of the item in , or -1 if it's not in the set. private int FindItemIndex(T item) { - int[]? buckets = _buckets; + var buckets = _buckets; if (buckets != null) { - Entry[]? entries = _entries; + var entries = _entries; Debug.Assert(entries != null, "Expected _entries to be initialized"); uint collisionCount = 0; - IEqualityComparer? comparer = _comparer; + var comparer = _comparer; if (SupportsComparerDevirtualization && comparer == null) { - int hashCode = item != null ? item.GetHashCode() : 0; + var hashCode = item != null ? item.GetHashCode() : 0; if (typeof(T).IsValueType) { // ValueType: Devirtualize with EqualityComparer.Default intrinsic - int i = GetBucketRef(hashCode) - 1; // Value in _buckets is 1-based + var i = GetBucketRef(hashCode) - 1; // Value in _buckets is 1-based while (i >= 0) { - ref Entry entry = ref entries[i]; + ref var entry = ref entries[i]; if (entry.HashCode == hashCode && EqualityComparer.Default.Equals(entry.Value, item)) { return i; @@ -239,11 +240,11 @@ private int FindItemIndex(T item) { // Object type: Shared Generic, EqualityComparer.Default won't devirtualize (https://github.com/dotnet/runtime/issues/10050), // so cache in a local rather than get EqualityComparer per loop iteration. - EqualityComparer defaultComparer = EqualityComparer.Default; - int i = GetBucketRef(hashCode) - 1; // Value in _buckets is 1-based + var defaultComparer = EqualityComparer.Default; + var i = GetBucketRef(hashCode) - 1; // Value in _buckets is 1-based while (i >= 0) { - ref Entry entry = ref entries[i]; + ref var entry = ref entries[i]; if (entry.HashCode == hashCode && defaultComparer.Equals(entry.Value, item)) { return i; @@ -261,11 +262,11 @@ private int FindItemIndex(T item) } else { - int hashCode = item != null ? comparer.GetHashCode(item) : 0; - int i = GetBucketRef(hashCode) - 1; // Value in _buckets is 1-based + var hashCode = item != null ? comparer.GetHashCode(item) : 0; + var i = GetBucketRef(hashCode) - 1; // Value in _buckets is 1-based while (i >= 0) { - ref Entry entry = ref entries[i]; + ref var entry = ref entries[i]; if (entry.HashCode == hashCode && comparer.Equals(entry.Value, item)) { return i; @@ -289,7 +290,7 @@ private int FindItemIndex(T item) [MethodImpl(MethodImplOptions.AggressiveInlining)] private ref int GetBucketRef(int hashCode) { - int[] buckets = _buckets!; + var buckets = _buckets!; #if TARGET_64BIT return ref buckets[HashHelpers.FastMod((uint)hashCode, (uint)buckets.Length, _fastModMultiplier)]; #else @@ -301,19 +302,19 @@ public bool Remove(T item) { if (_buckets != null) { - Entry[]? entries = _entries; + var entries = _entries; Debug.Assert(entries != null, "entries should be non-null"); uint collisionCount = 0; - int last = -1; - int hashCode = item != null ? (_comparer?.GetHashCode(item) ?? item.GetHashCode()) : 0; + var last = -1; + var hashCode = item != null ? (_comparer?.GetHashCode(item) ?? item.GetHashCode()) : 0; - ref int bucket = ref GetBucketRef(hashCode); - int i = bucket - 1; // Value in buckets is 1-based + ref var bucket = ref GetBucketRef(hashCode); + var i = bucket - 1; // Value in buckets is 1-based while (i >= 0) { - ref Entry entry = ref entries[i]; + ref var entry = ref entries[i]; if (entry.HashCode == hashCode && (_comparer?.Equals(entry.Value, item) ?? EqualityComparer.Default.Equals(entry.Value, item))) { @@ -364,7 +365,7 @@ public bool Remove(T item) #region IEnumerable methods - public Enumerator GetEnumerator() => new Enumerator(this); + public Enumerator GetEnumerator() => new(this); IEnumerator IEnumerable.GetEnumerator() => GetEnumerator(); @@ -393,7 +394,7 @@ public bool TryGetValue(T equalValue, [MaybeNullWhen(false)] out T actualValue) { if (_buckets != null) { - int index = FindItemIndex(equalValue); + var index = FindItemIndex(equalValue); if (index >= 0) { actualValue = _entries![index].Value; @@ -414,7 +415,7 @@ public void UnionWith(IEnumerable other) ThrowHelper.ThrowArgumentNullException(ExceptionArgument.other); } - foreach (T item in other) + foreach (var item in other) { AddIfNotPresent(item, out _); } @@ -480,7 +481,7 @@ public void ExceptWith(IEnumerable other) } // Remove every element in other from this. - foreach (T element in other) + foreach (var element in other) { Remove(element); } @@ -556,7 +557,7 @@ public bool IsSubsetOf(IEnumerable other) return IsSubsetOfHashSetWithSameComparer(otherAsSet); } - (int uniqueCount, int unfoundCount) = CheckUniqueAndUnfoundElements(other, returnIfUnfound: false); + (var uniqueCount, var unfoundCount) = CheckUniqueAndUnfoundElements(other, returnIfUnfound: false); return uniqueCount == Count && unfoundCount >= 0; } @@ -604,7 +605,7 @@ public bool IsProperSubsetOf(IEnumerable other) } } - (int uniqueCount, int unfoundCount) = CheckUniqueAndUnfoundElements(other, returnIfUnfound: false); + (var uniqueCount, var unfoundCount) = CheckUniqueAndUnfoundElements(other, returnIfUnfound: false); return uniqueCount == Count && unfoundCount > 0; } @@ -684,7 +685,7 @@ public bool IsProperSupersetOf(IEnumerable other) } // Couldn't fall out in the above cases; do it the long way - (int uniqueCount, int unfoundCount) = CheckUniqueAndUnfoundElements(other, returnIfUnfound: true); + (var uniqueCount, var unfoundCount) = CheckUniqueAndUnfoundElements(other, returnIfUnfound: true); return uniqueCount < Count && unfoundCount == 0; } @@ -709,7 +710,7 @@ public bool Overlaps(IEnumerable other) return true; } - foreach (T element in other) + foreach (var element in other) { if (Contains(element)) { @@ -760,7 +761,7 @@ other is ICollection otherAsCollection && return false; } - (int uniqueCount, int unfoundCount) = CheckUniqueAndUnfoundElements(other, returnIfUnfound: true); + (var uniqueCount, var unfoundCount) = CheckUniqueAndUnfoundElements(other, returnIfUnfound: true); return uniqueCount == Count && unfoundCount == 0; } } @@ -799,10 +800,10 @@ public void CopyTo(T[] array, int arrayIndex, int count) ThrowHelper.ThrowArgumentException(ExceptionResource.Arg_ArrayPlusOffTooSmall); } - Entry[]? entries = _entries; - for (int i = 0; i < _count && count != 0; i++) + var entries = _entries; + for (var i = 0; i < _count && count != 0; i++) { - ref Entry entry = ref entries![i]; + ref var entry = ref entries![i]; if (entry.Next >= -1) { array[arrayIndex++] = entry.Value; @@ -819,15 +820,15 @@ public int RemoveWhere(Predicate match) ThrowHelper.ThrowArgumentNullException(ExceptionArgument.match); } - Entry[]? entries = _entries; - int numRemoved = 0; - for (int i = 0; i < _count; i++) + var entries = _entries; + var numRemoved = 0; + for (var i = 0; i < _count; i++) { - ref Entry entry = ref entries![i]; + ref var entry = ref entries![i]; if (entry.Next >= -1) { // Cache value in case delegate removes it - T value = entry.Value; + var value = entry.Value; if (match(value)) { // Check again that remove actually removed it. @@ -859,7 +860,7 @@ public int EnsureCapacity(int capacity) ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.capacity); } - int currentCapacity = _entries == null ? 0 : _entries.Length; + var currentCapacity = _entries == null ? 0 : _entries.Length; if (currentCapacity >= capacity) { return currentCapacity; @@ -870,7 +871,7 @@ public int EnsureCapacity(int capacity) return Initialize(capacity); } - int newSize = HashHelpers.GetPrime(capacity); + var newSize = HashHelpers.GetPrime(capacity); Resize(newSize); return newSize; } @@ -884,7 +885,7 @@ private void Resize(int newSize) var entries = new Entry[newSize]; - int count = _count; + var count = _count; Array.Copy(_entries, entries, count); // Assign member variables after both arrays allocated to guard against corruption from OOM if second fails @@ -892,12 +893,12 @@ private void Resize(int newSize) #if TARGET_64BIT _fastModMultiplier = HashHelpers.GetFastModMultiplier((uint)newSize); #endif - for (int i = 0; i < count; i++) + for (var i = 0; i < count; i++) { - ref Entry entry = ref entries[i]; + ref var entry = ref entries[i]; if (entry.Next >= -1) { - ref int bucket = ref GetBucketRef(entry.HashCode); + ref var bucket = ref GetBucketRef(entry.HashCode); entry.Next = bucket - 1; // Value in _buckets is 1-based bucket = i + 1; } @@ -912,29 +913,29 @@ private void Resize(int newSize) /// public void TrimExcess() { - int capacity = Count; + var capacity = Count; - int newSize = HashHelpers.GetPrime(capacity); - Entry[]? oldEntries = _entries; - int currentCapacity = oldEntries == null ? 0 : oldEntries.Length; + var newSize = HashHelpers.GetPrime(capacity); + var oldEntries = _entries; + var currentCapacity = oldEntries == null ? 0 : oldEntries.Length; if (newSize >= currentCapacity) { return; } - int oldCount = _count; + var oldCount = _count; _version++; Initialize(newSize); - Entry[]? entries = _entries; - int count = 0; - for (int i = 0; i < oldCount; i++) + var entries = _entries; + var count = 0; + for (var i = 0; i < oldCount; i++) { - int hashCode = oldEntries![i].HashCode; // At this point, we know we have entries. + var hashCode = oldEntries![i].HashCode; // At this point, we know we have entries. if (oldEntries[i].Next >= -1) { - ref Entry entry = ref entries![count]; + ref var entry = ref entries![count]; entry = oldEntries[i]; - ref int bucket = ref GetBucketRef(hashCode); + ref var bucket = ref GetBucketRef(hashCode); entry.Next = bucket - 1; // Value in _buckets is 1-based bucket = count + 1; count++; @@ -958,7 +959,7 @@ public void TrimExcess() /// private int Initialize(int capacity) { - int size = HashHelpers.GetPrime(capacity); + var size = HashHelpers.GetPrime(capacity); var buckets = new int[size]; var entries = new Entry[size]; @@ -985,26 +986,26 @@ private bool AddIfNotPresent(T value, out int location) } Debug.Assert(_buckets != null); - Entry[]? entries = _entries; + var entries = _entries; Debug.Assert(entries != null, "expected entries to be non-null"); - IEqualityComparer? comparer = _comparer; + var comparer = _comparer; int hashCode; uint collisionCount = 0; - ref int bucket = ref Unsafe.NullRef(); + ref var bucket = ref Unsafe.NullRef(); if (SupportsComparerDevirtualization && comparer == null) { hashCode = value != null ? value.GetHashCode() : 0; bucket = ref GetBucketRef(hashCode); - int i = bucket - 1; // Value in _buckets is 1-based + var i = bucket - 1; // Value in _buckets is 1-based if (typeof(T).IsValueType) { // ValueType: Devirtualize with EqualityComparer.Default intrinsic while (i >= 0) { - ref Entry entry = ref entries[i]; + ref var entry = ref entries[i]; if (entry.HashCode == hashCode && EqualityComparer.Default.Equals(entry.Value, value)) { location = i; @@ -1024,10 +1025,10 @@ private bool AddIfNotPresent(T value, out int location) { // Object type: Shared Generic, EqualityComparer.Default won't devirtualize (https://github.com/dotnet/runtime/issues/10050), // so cache in a local rather than get EqualityComparer per loop iteration. - EqualityComparer defaultComparer = EqualityComparer.Default; + var defaultComparer = EqualityComparer.Default; while (i >= 0) { - ref Entry entry = ref entries[i]; + ref var entry = ref entries[i]; if (entry.HashCode == hashCode && defaultComparer.Equals(entry.Value, value)) { location = i; @@ -1048,10 +1049,10 @@ private bool AddIfNotPresent(T value, out int location) { hashCode = value != null ? comparer.GetHashCode(value) : 0; bucket = ref GetBucketRef(hashCode); - int i = bucket - 1; // Value in _buckets is 1-based + var i = bucket - 1; // Value in _buckets is 1-based while (i >= 0) { - ref Entry entry = ref entries[i]; + ref var entry = ref entries[i]; if (entry.HashCode == hashCode && comparer.Equals(entry.Value, value)) { location = i; @@ -1078,7 +1079,7 @@ private bool AddIfNotPresent(T value, out int location) } else { - int count = _count; + var count = _count; if (count == entries.Length) { Resize(); @@ -1090,7 +1091,7 @@ private bool AddIfNotPresent(T value, out int location) } { - ref Entry entry = ref entries![index]; + ref var entry = ref entries![index]; entry.HashCode = hashCode; entry.Next = bucket - 1; // Value in _buckets is 1-based entry.Value = value; @@ -1109,7 +1110,7 @@ private bool AddIfNotPresent(T value, out int location) /// private bool ContainsAllElements(IEnumerable other) { - foreach (T element in other) + foreach (var element in other) { if (!Contains(element)) { @@ -1132,7 +1133,7 @@ private bool ContainsAllElements(IEnumerable other) /// internal bool IsSubsetOfHashSetWithSameComparer(SegmentedHashSet other) { - foreach (T item in this) + foreach (var item in this) { if (!other.Contains(item)) { @@ -1149,13 +1150,13 @@ internal bool IsSubsetOfHashSetWithSameComparer(SegmentedHashSet other) /// private void IntersectWithHashSetWithSameComparer(SegmentedHashSet other) { - Entry[]? entries = _entries; - for (int i = 0; i < _count; i++) + var entries = _entries; + for (var i = 0; i < _count; i++) { - ref Entry entry = ref entries![i]; + ref var entry = ref entries![i]; if (entry.Next >= -1) { - T item = entry.Value; + var item = entry.Value; if (!other.Contains(item)) { Remove(item); @@ -1176,18 +1177,18 @@ private unsafe void IntersectWithEnumerable(IEnumerable other) // Keep track of current last index; don't want to move past the end of our bit array // (could happen if another thread is modifying the collection). - int originalCount = _count; + var originalCount = _count; int intArrayLength = BitHelper.ToIntArrayLength(originalCount); Span span = stackalloc int[StackAllocThreshold]; - BitHelper bitHelper = intArrayLength <= StackAllocThreshold ? + var bitHelper = intArrayLength <= StackAllocThreshold ? new BitHelper(span.Slice(0, intArrayLength), clear: true) : new BitHelper(new int[intArrayLength], clear: false); // Mark if contains: find index of in slots array and mark corresponding element in bit array. - foreach (T item in other) + foreach (var item in other) { - int index = FindItemIndex(item); + var index = FindItemIndex(item); if (index >= 0) { bitHelper.MarkBit(index); @@ -1196,9 +1197,9 @@ private unsafe void IntersectWithEnumerable(IEnumerable other) // If anything unmarked, remove it. Perf can be optimized here if BitHelper had a // FindFirstUnmarked method. - for (int i = 0; i < originalCount; i++) + for (var i = 0; i < originalCount; i++) { - ref Entry entry = ref _entries![i]; + ref var entry = ref _entries![i]; if (entry.Next >= -1 && !bitHelper.IsMarked(i)) { Remove(entry.Value); @@ -1216,7 +1217,7 @@ private unsafe void IntersectWithEnumerable(IEnumerable other) /// private void SymmetricExceptWithUniqueHashSet(SegmentedHashSet other) { - foreach (T item in other) + foreach (var item in other) { if (!Remove(item)) { @@ -1244,23 +1245,22 @@ private void SymmetricExceptWithUniqueHashSet(SegmentedHashSet other) /// private unsafe void SymmetricExceptWithEnumerable(IEnumerable other) { - int originalCount = _count; + var originalCount = _count; int intArrayLength = BitHelper.ToIntArrayLength(originalCount); Span itemsToRemoveSpan = stackalloc int[StackAllocThreshold / 2]; - BitHelper itemsToRemove = intArrayLength <= StackAllocThreshold / 2 ? + var itemsToRemove = intArrayLength <= StackAllocThreshold / 2 ? new BitHelper(itemsToRemoveSpan.Slice(0, intArrayLength), clear: true) : new BitHelper(new int[intArrayLength], clear: false); Span itemsAddedFromOtherSpan = stackalloc int[StackAllocThreshold / 2]; - BitHelper itemsAddedFromOther = intArrayLength <= StackAllocThreshold / 2 ? + var itemsAddedFromOther = intArrayLength <= StackAllocThreshold / 2 ? new BitHelper(itemsAddedFromOtherSpan.Slice(0, intArrayLength), clear: true) : new BitHelper(new int[intArrayLength], clear: false); - foreach (T item in other) + foreach (var item in other) { - int location; - if (AddIfNotPresent(item, out location)) + if (AddIfNotPresent(item, out var location)) { // wasn't already present in collection; flag it as something not to remove // *NOTE* if location is out of range, we should ignore. BitHelper will @@ -1283,7 +1283,7 @@ private unsafe void SymmetricExceptWithEnumerable(IEnumerable other) } // if anything marked, remove it - for (int i = 0; i < originalCount; i++) + for (var i = 0; i < originalCount; i++) { if (itemsToRemove.IsMarked(i)) { @@ -1320,8 +1320,8 @@ private unsafe (int UniqueCount, int UnfoundCount) CheckUniqueAndUnfoundElements // Need special case in case this has no elements. if (_count == 0) { - int numElementsInOther = 0; - foreach (T item in other) + var numElementsInOther = 0; + foreach (var item in other) { numElementsInOther++; break; // break right away, all we want to know is whether other has 0 or 1 elements @@ -1332,20 +1332,20 @@ private unsafe (int UniqueCount, int UnfoundCount) CheckUniqueAndUnfoundElements Debug.Assert((_buckets != null) && (_count > 0), "_buckets was null but count greater than 0"); - int originalCount = _count; + var originalCount = _count; int intArrayLength = BitHelper.ToIntArrayLength(originalCount); Span span = stackalloc int[StackAllocThreshold]; - BitHelper bitHelper = intArrayLength <= StackAllocThreshold ? + var bitHelper = intArrayLength <= StackAllocThreshold ? new BitHelper(span.Slice(0, intArrayLength), clear: true) : new BitHelper(new int[intArrayLength], clear: false); - int unfoundCount = 0; // count of items in other not found in this - int uniqueFoundCount = 0; // count of unique items in other found in this + var unfoundCount = 0; // count of items in other not found in this + var uniqueFoundCount = 0; // count of unique items in other found in this - foreach (T item in other) + foreach (var item in other) { - int index = FindItemIndex(item); + var index = FindItemIndex(item); if (index >= 0) { if (!bitHelper.IsMarked(index)) @@ -1375,7 +1375,7 @@ private unsafe (int UniqueCount, int UnfoundCount) CheckUniqueAndUnfoundElements /// internal static bool EqualityComparersAreEqual(SegmentedHashSet set1, SegmentedHashSet set2) => set1.Comparer.Equals(set2.Comparer); -#endregion + #endregion private struct Entry { @@ -1415,7 +1415,7 @@ public bool MoveNext() // dictionary.count+1 could be negative if dictionary.count is int.MaxValue while ((uint)_index < (uint)_hashSet._count) { - ref Entry entry = ref _hashSet._entries![_index++]; + ref var entry = ref _hashSet._entries![_index++]; if (entry.Next >= -1) { _current = entry.Value; From 57f35617e71ae643f6608431d8ad3b7e8af2567d Mon Sep 17 00:00:00 2001 From: Sam Harwell Date: Fri, 2 Jul 2021 09:53:43 -0700 Subject: [PATCH 008/413] Fix naming code style diagnostics --- .../Collections/SegmentedHashSet`1.cs | 92 +++++++++---------- 1 file changed, 46 insertions(+), 46 deletions(-) diff --git a/src/Dependencies/Collections/SegmentedHashSet`1.cs b/src/Dependencies/Collections/SegmentedHashSet`1.cs index 9e525fded282d..bc4518e8d4362 100644 --- a/src/Dependencies/Collections/SegmentedHashSet`1.cs +++ b/src/Dependencies/Collections/SegmentedHashSet`1.cs @@ -162,9 +162,9 @@ private void ConstructFrom(SegmentedHashSet source) for (var i = 0; i < source._count; i++) { ref var entry = ref entries![i]; - if (entry.Next >= -1) + if (entry._next >= -1) { - AddIfNotPresent(entry.Value, out _); + AddIfNotPresent(entry._value, out _); } } } @@ -222,11 +222,11 @@ private int FindItemIndex(T item) while (i >= 0) { ref var entry = ref entries[i]; - if (entry.HashCode == hashCode && EqualityComparer.Default.Equals(entry.Value, item)) + if (entry._hashCode == hashCode && EqualityComparer.Default.Equals(entry._value, item)) { return i; } - i = entry.Next; + i = entry._next; collisionCount++; if (collisionCount > (uint)entries.Length) @@ -245,11 +245,11 @@ private int FindItemIndex(T item) while (i >= 0) { ref var entry = ref entries[i]; - if (entry.HashCode == hashCode && defaultComparer.Equals(entry.Value, item)) + if (entry._hashCode == hashCode && defaultComparer.Equals(entry._value, item)) { return i; } - i = entry.Next; + i = entry._next; collisionCount++; if (collisionCount > (uint)entries.Length) @@ -267,11 +267,11 @@ private int FindItemIndex(T item) while (i >= 0) { ref var entry = ref entries[i]; - if (entry.HashCode == hashCode && comparer.Equals(entry.Value, item)) + if (entry._hashCode == hashCode && comparer.Equals(entry._value, item)) { return i; } - i = entry.Next; + i = entry._next; collisionCount++; if (collisionCount > (uint)entries.Length) @@ -316,23 +316,23 @@ public bool Remove(T item) { ref var entry = ref entries[i]; - if (entry.HashCode == hashCode && (_comparer?.Equals(entry.Value, item) ?? EqualityComparer.Default.Equals(entry.Value, item))) + if (entry._hashCode == hashCode && (_comparer?.Equals(entry._value, item) ?? EqualityComparer.Default.Equals(entry._value, item))) { if (last < 0) { - bucket = entry.Next + 1; // Value in buckets is 1-based + bucket = entry._next + 1; // Value in buckets is 1-based } else { - entries[last].Next = entry.Next; + entries[last]._next = entry._next; } Debug.Assert((StartOfFreeList - _freeList) < 0, "shouldn't underflow because max hashtable length is MaxPrimeArrayLength = 0x7FEFFFFD(2146435069) _freelist underflow threshold 2147483646"); - entry.Next = StartOfFreeList - _freeList; + entry._next = StartOfFreeList - _freeList; if (RuntimeHelpers.IsReferenceOrContainsReferences()) { - entry.Value = default!; + entry._value = default!; } _freeList = i; @@ -341,7 +341,7 @@ public bool Remove(T item) } last = i; - i = entry.Next; + i = entry._next; collisionCount++; if (collisionCount > (uint)entries.Length) @@ -397,7 +397,7 @@ public bool TryGetValue(T equalValue, [MaybeNullWhen(false)] out T actualValue) var index = FindItemIndex(equalValue); if (index >= 0) { - actualValue = _entries![index].Value; + actualValue = _entries![index]._value; return true; } } @@ -804,9 +804,9 @@ public void CopyTo(T[] array, int arrayIndex, int count) for (var i = 0; i < _count && count != 0; i++) { ref var entry = ref entries![i]; - if (entry.Next >= -1) + if (entry._next >= -1) { - array[arrayIndex++] = entry.Value; + array[arrayIndex++] = entry._value; count--; } } @@ -825,10 +825,10 @@ public int RemoveWhere(Predicate match) for (var i = 0; i < _count; i++) { ref var entry = ref entries![i]; - if (entry.Next >= -1) + if (entry._next >= -1) { // Cache value in case delegate removes it - var value = entry.Value; + var value = entry._value; if (match(value)) { // Check again that remove actually removed it. @@ -896,10 +896,10 @@ private void Resize(int newSize) for (var i = 0; i < count; i++) { ref var entry = ref entries[i]; - if (entry.Next >= -1) + if (entry._next >= -1) { - ref var bucket = ref GetBucketRef(entry.HashCode); - entry.Next = bucket - 1; // Value in _buckets is 1-based + ref var bucket = ref GetBucketRef(entry._hashCode); + entry._next = bucket - 1; // Value in _buckets is 1-based bucket = i + 1; } } @@ -930,13 +930,13 @@ public void TrimExcess() var count = 0; for (var i = 0; i < oldCount; i++) { - var hashCode = oldEntries![i].HashCode; // At this point, we know we have entries. - if (oldEntries[i].Next >= -1) + var hashCode = oldEntries![i]._hashCode; // At this point, we know we have entries. + if (oldEntries[i]._next >= -1) { ref var entry = ref entries![count]; entry = oldEntries[i]; ref var bucket = ref GetBucketRef(hashCode); - entry.Next = bucket - 1; // Value in _buckets is 1-based + entry._next = bucket - 1; // Value in _buckets is 1-based bucket = count + 1; count++; } @@ -1006,12 +1006,12 @@ private bool AddIfNotPresent(T value, out int location) while (i >= 0) { ref var entry = ref entries[i]; - if (entry.HashCode == hashCode && EqualityComparer.Default.Equals(entry.Value, value)) + if (entry._hashCode == hashCode && EqualityComparer.Default.Equals(entry._value, value)) { location = i; return false; } - i = entry.Next; + i = entry._next; collisionCount++; if (collisionCount > (uint)entries.Length) @@ -1029,12 +1029,12 @@ private bool AddIfNotPresent(T value, out int location) while (i >= 0) { ref var entry = ref entries[i]; - if (entry.HashCode == hashCode && defaultComparer.Equals(entry.Value, value)) + if (entry._hashCode == hashCode && defaultComparer.Equals(entry._value, value)) { location = i; return false; } - i = entry.Next; + i = entry._next; collisionCount++; if (collisionCount > (uint)entries.Length) @@ -1053,12 +1053,12 @@ private bool AddIfNotPresent(T value, out int location) while (i >= 0) { ref var entry = ref entries[i]; - if (entry.HashCode == hashCode && comparer.Equals(entry.Value, value)) + if (entry._hashCode == hashCode && comparer.Equals(entry._value, value)) { location = i; return false; } - i = entry.Next; + i = entry._next; collisionCount++; if (collisionCount > (uint)entries.Length) @@ -1074,8 +1074,8 @@ private bool AddIfNotPresent(T value, out int location) { index = _freeList; _freeCount--; - Debug.Assert((StartOfFreeList - entries![_freeList].Next) >= -1, "shouldn't overflow because `next` cannot underflow"); - _freeList = StartOfFreeList - entries[_freeList].Next; + Debug.Assert((StartOfFreeList - entries![_freeList]._next) >= -1, "shouldn't overflow because `next` cannot underflow"); + _freeList = StartOfFreeList - entries[_freeList]._next; } else { @@ -1092,9 +1092,9 @@ private bool AddIfNotPresent(T value, out int location) { ref var entry = ref entries![index]; - entry.HashCode = hashCode; - entry.Next = bucket - 1; // Value in _buckets is 1-based - entry.Value = value; + entry._hashCode = hashCode; + entry._next = bucket - 1; // Value in _buckets is 1-based + entry._value = value; bucket = index + 1; _version++; location = index; @@ -1154,9 +1154,9 @@ private void IntersectWithHashSetWithSameComparer(SegmentedHashSet other) for (var i = 0; i < _count; i++) { ref var entry = ref entries![i]; - if (entry.Next >= -1) + if (entry._next >= -1) { - var item = entry.Value; + var item = entry._value; if (!other.Contains(item)) { Remove(item); @@ -1200,9 +1200,9 @@ private unsafe void IntersectWithEnumerable(IEnumerable other) for (var i = 0; i < originalCount; i++) { ref var entry = ref _entries![i]; - if (entry.Next >= -1 && !bitHelper.IsMarked(i)) + if (entry._next >= -1 && !bitHelper.IsMarked(i)) { - Remove(entry.Value); + Remove(entry._value); } } } @@ -1287,7 +1287,7 @@ private unsafe void SymmetricExceptWithEnumerable(IEnumerable other) { if (itemsToRemove.IsMarked(i)) { - Remove(_entries![i].Value); + Remove(_entries![i]._value); } } } @@ -1379,14 +1379,14 @@ private unsafe (int UniqueCount, int UnfoundCount) CheckUniqueAndUnfoundElements private struct Entry { - public int HashCode; + public int _hashCode; /// /// 0-based index of next entry in chain: -1 means end of chain /// also encodes whether this entry _itself_ is part of the free list by changing sign and subtracting 3, /// so -2 means end of free list, -3 means index 0 but on free list, -4 means index 1 but on free list, etc. /// - public int Next; - public T Value; + public int _next; + public T _value; } public struct Enumerator : IEnumerator @@ -1416,9 +1416,9 @@ public bool MoveNext() while ((uint)_index < (uint)_hashSet._count) { ref var entry = ref _hashSet._entries![_index++]; - if (entry.Next >= -1) + if (entry._next >= -1) { - _current = entry.Value; + _current = entry._value; return true; } } From 88fb7f4301fe3bbdc0c8e689291d9f9dfcf3488e Mon Sep 17 00:00:00 2001 From: Sam Harwell Date: Fri, 2 Jul 2021 09:58:12 -0700 Subject: [PATCH 009/413] Clean up remaining build errors --- .../Collections/Internal/BitHelper.cs | 2 ++ .../SegmentedHashSetEqualityComparer`1.cs | 2 ++ .../Collections/Internal/ThrowHelper.cs | 6 ++++++ .../Collections/SegmentedHashSet`1.cs | 16 +++++++++++----- 4 files changed, 21 insertions(+), 5 deletions(-) diff --git a/src/Dependencies/Collections/Internal/BitHelper.cs b/src/Dependencies/Collections/Internal/BitHelper.cs index 079f8b6738cff..bbe971cf0f5d2 100644 --- a/src/Dependencies/Collections/Internal/BitHelper.cs +++ b/src/Dependencies/Collections/Internal/BitHelper.cs @@ -8,6 +8,8 @@ // See the commentary in https://github.com/dotnet/roslyn/pull/50156 for notes on incorporating changes made to the // reference implementation. +using System; + namespace Microsoft.CodeAnalysis.Collections.Internal { internal ref struct BitHelper diff --git a/src/Dependencies/Collections/Internal/SegmentedHashSetEqualityComparer`1.cs b/src/Dependencies/Collections/Internal/SegmentedHashSetEqualityComparer`1.cs index af2d74b8e1498..1dfbce88e090a 100644 --- a/src/Dependencies/Collections/Internal/SegmentedHashSetEqualityComparer`1.cs +++ b/src/Dependencies/Collections/Internal/SegmentedHashSetEqualityComparer`1.cs @@ -8,6 +8,8 @@ // See the commentary in https://github.com/dotnet/roslyn/pull/50156 for notes on incorporating changes made to the // reference implementation. +using System.Collections.Generic; + namespace Microsoft.CodeAnalysis.Collections.Internal { /// Equality comparer for hashsets of hashsets diff --git a/src/Dependencies/Collections/Internal/ThrowHelper.cs b/src/Dependencies/Collections/Internal/ThrowHelper.cs index ffd0217983087..9c5a3cceae509 100644 --- a/src/Dependencies/Collections/Internal/ThrowHelper.cs +++ b/src/Dependencies/Collections/Internal/ThrowHelper.cs @@ -239,6 +239,8 @@ private static string GetArgumentName(ExceptionArgument argument) return "dictionary"; case ExceptionArgument.array: return "array"; + case ExceptionArgument.info: + return "info"; case ExceptionArgument.key: return "key"; case ExceptionArgument.value: @@ -269,6 +271,8 @@ private static string GetArgumentName(ExceptionArgument argument) return "length"; case ExceptionArgument.destinationArray: return "destinationArray"; + case ExceptionArgument.other: + return "other"; default: Debug.Fail("The enum value is not defined, please check the ExceptionArgument Enum."); return ""; @@ -319,6 +323,7 @@ internal enum ExceptionArgument { dictionary, array, + info, key, value, startIndex, @@ -334,6 +339,7 @@ internal enum ExceptionArgument source, length, destinationArray, + other, } // diff --git a/src/Dependencies/Collections/SegmentedHashSet`1.cs b/src/Dependencies/Collections/SegmentedHashSet`1.cs index bc4518e8d4362..3c9f5c113fdb0 100644 --- a/src/Dependencies/Collections/SegmentedHashSet`1.cs +++ b/src/Dependencies/Collections/SegmentedHashSet`1.cs @@ -8,18 +8,22 @@ // See the commentary in https://github.com/dotnet/roslyn/pull/50156 for notes on incorporating changes made to the // reference implementation. +using System; +using System.Collections; +using System.Collections.Generic; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Runtime.CompilerServices; - -using Internal.Runtime.CompilerServices; +using Microsoft.CodeAnalysis.Collections.Internal; namespace Microsoft.CodeAnalysis.Collections { [DebuggerTypeProxy(typeof(ICollectionDebugView<>))] [DebuggerDisplay("Count = {Count}")] - [TypeForwardedFrom("System.Core, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089")] - internal class SegmentedHashSet : ICollection, ISet, IReadOnlyCollection, IReadOnlySet + internal class SegmentedHashSet : ICollection, ISet, IReadOnlyCollection +#if NET5_0_OR_GREATER + , IReadOnlySet +#endif { private const bool SupportsComparerDevirtualization #if NETCOREAPP @@ -330,7 +334,9 @@ public bool Remove(T item) Debug.Assert((StartOfFreeList - _freeList) < 0, "shouldn't underflow because max hashtable length is MaxPrimeArrayLength = 0x7FEFFFFD(2146435069) _freelist underflow threshold 2147483646"); entry._next = StartOfFreeList - _freeList; +#if NETCOREAPP if (RuntimeHelpers.IsReferenceOrContainsReferences()) +#endif { entry._value = default!; } @@ -993,7 +999,7 @@ private bool AddIfNotPresent(T value, out int location) int hashCode; uint collisionCount = 0; - ref var bucket = ref Unsafe.NullRef(); + ref var bucket = ref RoslynUnsafe.NullRef(); if (SupportsComparerDevirtualization && comparer == null) { From 677b2bec99a3d92691955ca77406603e2e18cca9 Mon Sep 17 00:00:00 2001 From: Sam Harwell Date: Fri, 2 Jul 2021 10:17:36 -0700 Subject: [PATCH 010/413] Assume TARGET_64BIT for performance --- src/Dependencies/Collections/SegmentedHashSet`1.cs | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/src/Dependencies/Collections/SegmentedHashSet`1.cs b/src/Dependencies/Collections/SegmentedHashSet`1.cs index 3c9f5c113fdb0..6f395076584c0 100644 --- a/src/Dependencies/Collections/SegmentedHashSet`1.cs +++ b/src/Dependencies/Collections/SegmentedHashSet`1.cs @@ -49,9 +49,7 @@ private const bool SupportsComparerDevirtualization private int[]? _buckets; private Entry[]? _entries; -#if TARGET_64BIT private ulong _fastModMultiplier; -#endif private int _count; private int _freeList; private int _freeCount; @@ -154,9 +152,7 @@ private void ConstructFrom(SegmentedHashSet source) _freeList = source._freeList; _freeCount = source._freeCount; _count = source._count; -#if TARGET_64BIT _fastModMultiplier = source._fastModMultiplier; -#endif } else { @@ -295,11 +291,7 @@ private int FindItemIndex(T item) private ref int GetBucketRef(int hashCode) { var buckets = _buckets!; -#if TARGET_64BIT return ref buckets[HashHelpers.FastMod((uint)hashCode, (uint)buckets.Length, _fastModMultiplier)]; -#else - return ref buckets[(uint)hashCode % (uint)buckets.Length]; -#endif } public bool Remove(T item) @@ -896,9 +888,7 @@ private void Resize(int newSize) // Assign member variables after both arrays allocated to guard against corruption from OOM if second fails _buckets = new int[newSize]; -#if TARGET_64BIT _fastModMultiplier = HashHelpers.GetFastModMultiplier((uint)newSize); -#endif for (var i = 0; i < count; i++) { ref var entry = ref entries[i]; @@ -973,9 +963,7 @@ private int Initialize(int capacity) _freeList = -1; _buckets = buckets; _entries = entries; -#if TARGET_64BIT _fastModMultiplier = HashHelpers.GetFastModMultiplier((uint)size); -#endif return size; } From c0c39b1656746f4f35699b5fbc1c32afa03bedc8 Mon Sep 17 00:00:00 2001 From: Sam Harwell Date: Fri, 2 Jul 2021 10:18:01 -0700 Subject: [PATCH 011/413] Use segmented arrays internally --- .../Collections/SegmentedHashSet`1.cs | 86 +++++++++---------- 1 file changed, 43 insertions(+), 43 deletions(-) diff --git a/src/Dependencies/Collections/SegmentedHashSet`1.cs b/src/Dependencies/Collections/SegmentedHashSet`1.cs index 6f395076584c0..6eb2a0c96e485 100644 --- a/src/Dependencies/Collections/SegmentedHashSet`1.cs +++ b/src/Dependencies/Collections/SegmentedHashSet`1.cs @@ -47,8 +47,8 @@ private const bool SupportsComparerDevirtualization private const int ShrinkThreshold = 3; private const int StartOfFreeList = -3; - private int[]? _buckets; - private Entry[]? _entries; + private SegmentedArray _buckets; + private SegmentedArray _entries; private ulong _fastModMultiplier; private int _count; private int _freeList; @@ -111,7 +111,7 @@ public SegmentedHashSet(IEnumerable collection, IEqualityComparer? compare UnionWith(collection); - if (_count > 0 && _entries!.Length / _count > ShrinkThreshold) + if (_count > 0 && _entries.Length / _count > ShrinkThreshold) { TrimExcess(); } @@ -142,13 +142,13 @@ private void ConstructFrom(SegmentedHashSet source) return; } - var capacity = source._buckets!.Length; + var capacity = source._buckets.Length; var threshold = HashHelpers.ExpandPrime(source.Count + 1); if (threshold >= capacity) { - _buckets = (int[])source._buckets.Clone(); - _entries = (Entry[])source._entries!.Clone(); + _buckets = (SegmentedArray)source._buckets.Clone(); + _entries = (SegmentedArray)source._entries.Clone(); _freeList = source._freeList; _freeCount = source._freeCount; _count = source._count; @@ -161,7 +161,7 @@ private void ConstructFrom(SegmentedHashSet source) var entries = source._entries; for (var i = 0; i < source._count; i++) { - ref var entry = ref entries![i]; + ref var entry = ref entries[i]; if (entry._next >= -1) { AddIfNotPresent(entry._value, out _); @@ -184,14 +184,14 @@ public void Clear() var count = _count; if (count > 0) { - Debug.Assert(_buckets != null, "_buckets should be non-null"); - Debug.Assert(_entries != null, "_entries should be non-null"); + Debug.Assert(_buckets.Length > 0, "_buckets should be non-empty"); + Debug.Assert(_entries.Length > 0, "_entries should be non-empty"); - Array.Clear(_buckets, 0, _buckets.Length); + SegmentedArray.Clear(_buckets, 0, _buckets.Length); _count = 0; _freeList = -1; _freeCount = 0; - Array.Clear(_entries, 0, count); + SegmentedArray.Clear(_entries, 0, count); } } @@ -204,10 +204,10 @@ public void Clear() private int FindItemIndex(T item) { var buckets = _buckets; - if (buckets != null) + if (buckets.Length > 0) { var entries = _entries; - Debug.Assert(entries != null, "Expected _entries to be initialized"); + Debug.Assert(entries.Length > 0, "Expected _entries to be initialized"); uint collisionCount = 0; var comparer = _comparer; @@ -290,16 +290,16 @@ private int FindItemIndex(T item) [MethodImpl(MethodImplOptions.AggressiveInlining)] private ref int GetBucketRef(int hashCode) { - var buckets = _buckets!; - return ref buckets[HashHelpers.FastMod((uint)hashCode, (uint)buckets.Length, _fastModMultiplier)]; + var buckets = _buckets; + return ref buckets[(int)HashHelpers.FastMod((uint)hashCode, (uint)buckets.Length, _fastModMultiplier)]; } public bool Remove(T item) { - if (_buckets != null) + if (_buckets.Length > 0) { var entries = _entries; - Debug.Assert(entries != null, "entries should be non-null"); + Debug.Assert(entries.Length > 0, "entries should be non-empty"); uint collisionCount = 0; var last = -1; @@ -390,12 +390,12 @@ public bool Remove(T item) /// public bool TryGetValue(T equalValue, [MaybeNullWhen(false)] out T actualValue) { - if (_buckets != null) + if (_buckets.Length > 0) { var index = FindItemIndex(equalValue); if (index >= 0) { - actualValue = _entries![index]._value; + actualValue = _entries[index]._value; return true; } } @@ -801,7 +801,7 @@ public void CopyTo(T[] array, int arrayIndex, int count) var entries = _entries; for (var i = 0; i < _count && count != 0; i++) { - ref var entry = ref entries![i]; + ref var entry = ref entries[i]; if (entry._next >= -1) { array[arrayIndex++] = entry._value; @@ -822,7 +822,7 @@ public int RemoveWhere(Predicate match) var numRemoved = 0; for (var i = 0; i < _count; i++) { - ref var entry = ref entries![i]; + ref var entry = ref entries[i]; if (entry._next >= -1) { // Cache value in case delegate removes it @@ -858,13 +858,13 @@ public int EnsureCapacity(int capacity) ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.capacity); } - var currentCapacity = _entries == null ? 0 : _entries.Length; + var currentCapacity = _entries.Length; if (currentCapacity >= capacity) { return currentCapacity; } - if (_buckets == null) + if (_buckets.Length == 0) { return Initialize(capacity); } @@ -878,16 +878,16 @@ public int EnsureCapacity(int capacity) private void Resize(int newSize) { - Debug.Assert(_entries != null, "_entries should be non-null"); + Debug.Assert(_entries.Length > 0, "_entries should be non-empty"); Debug.Assert(newSize >= _entries.Length); - var entries = new Entry[newSize]; + var entries = new SegmentedArray(newSize); var count = _count; - Array.Copy(_entries, entries, count); + SegmentedArray.Copy(_entries, entries, count); // Assign member variables after both arrays allocated to guard against corruption from OOM if second fails - _buckets = new int[newSize]; + _buckets = new SegmentedArray(newSize); _fastModMultiplier = HashHelpers.GetFastModMultiplier((uint)newSize); for (var i = 0; i < count; i++) { @@ -913,7 +913,7 @@ public void TrimExcess() var newSize = HashHelpers.GetPrime(capacity); var oldEntries = _entries; - var currentCapacity = oldEntries == null ? 0 : oldEntries.Length; + var currentCapacity = oldEntries.Length; if (newSize >= currentCapacity) { return; @@ -926,10 +926,10 @@ public void TrimExcess() var count = 0; for (var i = 0; i < oldCount; i++) { - var hashCode = oldEntries![i]._hashCode; // At this point, we know we have entries. + var hashCode = oldEntries[i]._hashCode; // At this point, we know we have entries. if (oldEntries[i]._next >= -1) { - ref var entry = ref entries![count]; + ref var entry = ref entries[count]; entry = oldEntries[i]; ref var bucket = ref GetBucketRef(hashCode); entry._next = bucket - 1; // Value in _buckets is 1-based @@ -956,8 +956,8 @@ public void TrimExcess() private int Initialize(int capacity) { var size = HashHelpers.GetPrime(capacity); - var buckets = new int[size]; - var entries = new Entry[size]; + var buckets = new SegmentedArray(size); + var entries = new SegmentedArray(size); // Assign member variables after both arrays are allocated to guard against corruption from OOM if second fails. _freeList = -1; @@ -974,14 +974,14 @@ private int Initialize(int capacity) /// true if the element is added to the object; false if the element is already present. private bool AddIfNotPresent(T value, out int location) { - if (_buckets == null) + if (_buckets.Length == 0) { Initialize(0); } - Debug.Assert(_buckets != null); + Debug.Assert(_buckets.Length > 0); var entries = _entries; - Debug.Assert(entries != null, "expected entries to be non-null"); + Debug.Assert(entries.Length > 0, "expected entries to be non-empty"); var comparer = _comparer; int hashCode; @@ -1068,7 +1068,7 @@ private bool AddIfNotPresent(T value, out int location) { index = _freeList; _freeCount--; - Debug.Assert((StartOfFreeList - entries![_freeList]._next) >= -1, "shouldn't overflow because `next` cannot underflow"); + Debug.Assert((StartOfFreeList - entries[_freeList]._next) >= -1, "shouldn't overflow because `next` cannot underflow"); _freeList = StartOfFreeList - entries[_freeList]._next; } else @@ -1085,7 +1085,7 @@ private bool AddIfNotPresent(T value, out int location) } { - ref var entry = ref entries![index]; + ref var entry = ref entries[index]; entry._hashCode = hashCode; entry._next = bucket - 1; // Value in _buckets is 1-based entry._value = value; @@ -1147,7 +1147,7 @@ private void IntersectWithHashSetWithSameComparer(SegmentedHashSet other) var entries = _entries; for (var i = 0; i < _count; i++) { - ref var entry = ref entries![i]; + ref var entry = ref entries[i]; if (entry._next >= -1) { var item = entry._value; @@ -1167,7 +1167,7 @@ private void IntersectWithHashSetWithSameComparer(SegmentedHashSet other) /// private unsafe void IntersectWithEnumerable(IEnumerable other) { - Debug.Assert(_buckets != null, "_buckets shouldn't be null; callers should check first"); + Debug.Assert(_buckets.Length > 0, "_buckets shouldn't be empty; callers should check first"); // Keep track of current last index; don't want to move past the end of our bit array // (could happen if another thread is modifying the collection). @@ -1193,7 +1193,7 @@ private unsafe void IntersectWithEnumerable(IEnumerable other) // FindFirstUnmarked method. for (var i = 0; i < originalCount; i++) { - ref var entry = ref _entries![i]; + ref var entry = ref _entries[i]; if (entry._next >= -1 && !bitHelper.IsMarked(i)) { Remove(entry._value); @@ -1281,7 +1281,7 @@ private unsafe void SymmetricExceptWithEnumerable(IEnumerable other) { if (itemsToRemove.IsMarked(i)) { - Remove(_entries![i]._value); + Remove(_entries[i]._value); } } } @@ -1324,7 +1324,7 @@ private unsafe (int UniqueCount, int UnfoundCount) CheckUniqueAndUnfoundElements return (UniqueCount: 0, UnfoundCount: numElementsInOther); } - Debug.Assert((_buckets != null) && (_count > 0), "_buckets was null but count greater than 0"); + Debug.Assert((_buckets.Length > 0) && (_count > 0), "_buckets was empty but count greater than 0"); var originalCount = _count; int intArrayLength = BitHelper.ToIntArrayLength(originalCount); @@ -1409,7 +1409,7 @@ public bool MoveNext() // dictionary.count+1 could be negative if dictionary.count is int.MaxValue while ((uint)_index < (uint)_hashSet._count) { - ref var entry = ref _hashSet._entries![_index++]; + ref var entry = ref _hashSet._entries[_index++]; if (entry._next >= -1) { _current = entry._value; From 0bbcabb12b1a1c5f82278bd6148dca25ecfbd68a Mon Sep 17 00:00:00 2001 From: Sam Harwell Date: Fri, 2 Jul 2021 14:29:57 -0700 Subject: [PATCH 012/413] Update tests for SegmentedHashSet --- .../HashSet/ISet_Generic_Tests`1.cs | 41 ++- .../HashSet/SegmentedHashSet_Generic_Tests.cs | 48 ++-- .../SegmentedHashSet_Generic_Tests`1.cs | 249 +++++++----------- ...tedHashSet_IEnumerable_NonGeneric_Tests.cs | 18 +- .../Collections/HashSet/TestingTypes.cs | 71 ++--- .../Collections/List/TestBase.Generic.cs | 13 +- .../Collections/List/TestBase.NonGeneric.cs | 2 +- 7 files changed, 198 insertions(+), 244 deletions(-) diff --git a/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/ISet_Generic_Tests`1.cs b/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/ISet_Generic_Tests`1.cs index f1e9dbcae03a7..5e770c6ce0170 100644 --- a/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/ISet_Generic_Tests`1.cs +++ b/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/ISet_Generic_Tests`1.cs @@ -1,5 +1,6 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. // NOTE: This code is derived from an implementation originally in dotnet/runtime: // https://github.com/dotnet/runtime/blob/v5.0.7/src/libraries/Common/tests/System/Collections/ISet.Generic.Tests.cs @@ -7,12 +8,14 @@ // See the commentary in https://github.com/dotnet/roslyn/pull/50156 for notes on incorporating changes made to the // reference implementation. +using System; using System.Collections.Generic; using System.Diagnostics; using System.Linq; +using Roslyn.Test.Utilities; using Xunit; -namespace System.Collections.Tests +namespace Microsoft.CodeAnalysis.UnitTests.Collections { /// /// Contains tests that ensure the correctness of any class that implements the generic @@ -25,6 +28,7 @@ namespace System.Collections.Tests /// compares it to the actual result of the set operation. /// public abstract class ISet_Generic_Tests : ICollection_Generic_Tests + where T : notnull { #region ISet Helper methods @@ -139,7 +143,7 @@ private void Validate_ExceptWith(ISet set, IEnumerable enumerable) private void Validate_IntersectWith(ISet set, IEnumerable enumerable) { - if (set.Count == 0 || Enumerable.Count(enumerable) == 0) + if (set.Count == 0 || !Enumerable.Any(enumerable)) { set.IntersectWith(enumerable); Assert.Equal(0, set.Count); @@ -308,16 +312,16 @@ private void Validate_UnionWith(ISet set, IEnumerable enumerable) public void ISet_Generic_NullEnumerableArgument(int count) { ISet set = GenericISetFactory(count); - Assert.Throws(() => set.ExceptWith(null)); - Assert.Throws(() => set.IntersectWith(null)); - Assert.Throws(() => set.IsProperSubsetOf(null)); - Assert.Throws(() => set.IsProperSupersetOf(null)); - Assert.Throws(() => set.IsSubsetOf(null)); - Assert.Throws(() => set.IsSupersetOf(null)); - Assert.Throws(() => set.Overlaps(null)); - Assert.Throws(() => set.SetEquals(null)); - Assert.Throws(() => set.SymmetricExceptWith(null)); - Assert.Throws(() => set.UnionWith(null)); + Assert.Throws(() => set.ExceptWith(null!)); + Assert.Throws(() => set.IntersectWith(null!)); + Assert.Throws(() => set.IsProperSubsetOf(null!)); + Assert.Throws(() => set.IsProperSupersetOf(null!)); + Assert.Throws(() => set.IsSubsetOf(null!)); + Assert.Throws(() => set.IsSupersetOf(null!)); + Assert.Throws(() => set.Overlaps(null!)); + Assert.Throws(() => set.SetEquals(null!)); + Assert.Throws(() => set.SymmetricExceptWith(null!)); + Assert.Throws(() => set.UnionWith(null!)); } [Theory] @@ -422,9 +426,8 @@ public void ISet_Generic_ExceptWith_Itself(int setLength) Validate_ExceptWith(set, set); } - [Theory] + [ConditionalTheory(typeof(CoreClrOnly))] [MemberData(nameof(ValidCollectionSizes))] - [SkipOnTargetFramework(TargetFrameworkMonikers.NetFramework, ".NET Framework throws InvalidOperationException")] public void ISet_Generic_IntersectWith_Itself(int setLength) { ISet set = GenericISetFactory(setLength); @@ -500,7 +503,6 @@ public void ISet_Generic_UnionWith_Itself(int setLength) #region Set Function tests on a large Set [Fact] - [OuterLoop] public void ISet_Generic_ExceptWith_LargeSet() { ISet set = GenericISetFactory(ISet_Large_Capacity); @@ -509,7 +511,6 @@ public void ISet_Generic_ExceptWith_LargeSet() } [Fact] - [OuterLoop] public void ISet_Generic_IntersectWith_LargeSet() { ISet set = GenericISetFactory(ISet_Large_Capacity); @@ -518,7 +519,6 @@ public void ISet_Generic_IntersectWith_LargeSet() } [Fact] - [OuterLoop] public void ISet_Generic_IsProperSubsetOf_LargeSet() { ISet set = GenericISetFactory(ISet_Large_Capacity); @@ -527,7 +527,6 @@ public void ISet_Generic_IsProperSubsetOf_LargeSet() } [Fact] - [OuterLoop] public void ISet_Generic_IsProperSupersetOf_LargeSet() { ISet set = GenericISetFactory(ISet_Large_Capacity); @@ -536,7 +535,6 @@ public void ISet_Generic_IsProperSupersetOf_LargeSet() } [Fact] - [OuterLoop] public void ISet_Generic_IsSubsetOf_LargeSet() { ISet set = GenericISetFactory(ISet_Large_Capacity); @@ -545,7 +543,6 @@ public void ISet_Generic_IsSubsetOf_LargeSet() } [Fact] - [OuterLoop] public void ISet_Generic_IsSupersetOf_LargeSet() { ISet set = GenericISetFactory(ISet_Large_Capacity); @@ -554,7 +551,6 @@ public void ISet_Generic_IsSupersetOf_LargeSet() } [Fact] - [OuterLoop] public void ISet_Generic_Overlaps_LargeSet() { ISet set = GenericISetFactory(ISet_Large_Capacity); @@ -563,7 +559,6 @@ public void ISet_Generic_Overlaps_LargeSet() } [Fact] - [OuterLoop] public void ISet_Generic_SetEquals_LargeSet() { ISet set = GenericISetFactory(ISet_Large_Capacity); @@ -572,7 +567,6 @@ public void ISet_Generic_SetEquals_LargeSet() } [Fact] - [OuterLoop] public void ISet_Generic_SymmetricExceptWith_LargeSet() { ISet set = GenericISetFactory(ISet_Large_Capacity); @@ -581,7 +575,6 @@ public void ISet_Generic_SymmetricExceptWith_LargeSet() } [Fact] - [OuterLoop] public void ISet_Generic_UnionWith_LargeSet() { ISet set = GenericISetFactory(ISet_Large_Capacity); diff --git a/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/SegmentedHashSet_Generic_Tests.cs b/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/SegmentedHashSet_Generic_Tests.cs index 253941240153e..3068f0fff5e8a 100644 --- a/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/SegmentedHashSet_Generic_Tests.cs +++ b/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/SegmentedHashSet_Generic_Tests.cs @@ -1,5 +1,6 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. // NOTE: This code is derived from an implementation originally in dotnet/runtime: // https://github.com/dotnet/runtime/blob/v5.0.7/src/libraries/System.Collections/tests/Generic/HashSet/HashSet.Generic.cs @@ -7,12 +8,13 @@ // See the commentary in https://github.com/dotnet/roslyn/pull/50156 for notes on incorporating changes made to the // reference implementation. +using System; using System.Collections.Generic; -using Xunit; +using Microsoft.CodeAnalysis.Collections; -namespace System.Collections.Tests +namespace Microsoft.CodeAnalysis.UnitTests.Collections { - public class HashSet_Generic_Tests_string : HashSet_Generic_Tests + public class SegmentedHashSet_Generic_Tests_string : SegmentedHashSet_Generic_Tests { protected override string CreateT(int seed) { @@ -24,7 +26,7 @@ protected override string CreateT(int seed) } } - public class HashSet_Generic_Tests_int : HashSet_Generic_Tests + public class SegmentedHashSet_Generic_Tests_int : SegmentedHashSet_Generic_Tests { protected override int CreateT(int seed) { @@ -35,7 +37,7 @@ protected override int CreateT(int seed) protected override bool DefaultValueAllowed => true; } - public class HashSet_Generic_Tests_int_With_Comparer_WrapStructural_Int : HashSet_Generic_Tests + public class SegmentedHashSet_Generic_Tests_int_With_Comparer_WrapStructural_Int : SegmentedHashSet_Generic_Tests { protected override IEqualityComparer GetIEqualityComparer() { @@ -55,11 +57,11 @@ protected override int CreateT(int seed) protected override ISet GenericISetFactory() { - return new HashSet(new WrapStructural_Int()); + return new SegmentedHashSet(new WrapStructural_Int()); } } - public class HashSet_Generic_Tests_int_With_Comparer_WrapStructural_SimpleInt : HashSet_Generic_Tests + public class SegmentedHashSet_Generic_Tests_int_With_Comparer_WrapStructural_SimpleInt : SegmentedHashSet_Generic_Tests { protected override IEqualityComparer GetIEqualityComparer() { @@ -79,12 +81,11 @@ protected override SimpleInt CreateT(int seed) protected override ISet GenericISetFactory() { - return new HashSet(new WrapStructural_SimpleInt()); + return new SegmentedHashSet(new WrapStructural_SimpleInt()); } } - [OuterLoop] - public class HashSet_Generic_Tests_EquatableBackwardsOrder : HashSet_Generic_Tests + public class SegmentedHashSet_Generic_Tests_EquatableBackwardsOrder : SegmentedHashSet_Generic_Tests { protected override EquatableBackwardsOrder CreateT(int seed) { @@ -94,12 +95,11 @@ protected override EquatableBackwardsOrder CreateT(int seed) protected override ISet GenericISetFactory() { - return new HashSet(); + return new SegmentedHashSet(); } } - [OuterLoop] - public class HashSet_Generic_Tests_int_With_Comparer_SameAsDefaultComparer : HashSet_Generic_Tests + public class SegmentedHashSet_Generic_Tests_int_With_Comparer_SameAsDefaultComparer : SegmentedHashSet_Generic_Tests { protected override IEqualityComparer GetIEqualityComparer() { @@ -114,12 +114,11 @@ protected override int CreateT(int seed) protected override ISet GenericISetFactory() { - return new HashSet(new Comparer_SameAsDefaultComparer()); + return new SegmentedHashSet(new Comparer_SameAsDefaultComparer()); } } - [OuterLoop] - public class HashSet_Generic_Tests_int_With_Comparer_HashCodeAlwaysReturnsZero : HashSet_Generic_Tests + public class SegmentedHashSet_Generic_Tests_int_With_Comparer_HashCodeAlwaysReturnsZero : SegmentedHashSet_Generic_Tests { protected override IEqualityComparer GetIEqualityComparer() { @@ -134,12 +133,11 @@ protected override int CreateT(int seed) protected override ISet GenericISetFactory() { - return new HashSet(new Comparer_HashCodeAlwaysReturnsZero()); + return new SegmentedHashSet(new Comparer_HashCodeAlwaysReturnsZero()); } } - [OuterLoop] - public class HashSet_Generic_Tests_int_With_Comparer_ModOfInt : HashSet_Generic_Tests + public class SegmentedHashSet_Generic_Tests_int_With_Comparer_ModOfInt : SegmentedHashSet_Generic_Tests { protected override IEqualityComparer GetIEqualityComparer() { @@ -159,12 +157,11 @@ protected override int CreateT(int seed) protected override ISet GenericISetFactory() { - return new HashSet(new Comparer_ModOfInt(15000)); + return new SegmentedHashSet(new Comparer_ModOfInt(15000)); } } - [OuterLoop] - public class HashSet_Generic_Tests_int_With_Comparer_AbsOfInt : HashSet_Generic_Tests + public class SegmentedHashSet_Generic_Tests_int_With_Comparer_AbsOfInt : SegmentedHashSet_Generic_Tests { protected override IEqualityComparer GetIEqualityComparer() { @@ -179,12 +176,11 @@ protected override int CreateT(int seed) protected override ISet GenericISetFactory() { - return new HashSet(new Comparer_AbsOfInt()); + return new SegmentedHashSet(new Comparer_AbsOfInt()); } } - [OuterLoop] - public class HashSet_Generic_Tests_int_With_Comparer_BadIntEqualityComparer : HashSet_Generic_Tests + public class SegmentedHashSet_Generic_Tests_int_With_Comparer_BadIntEqualityComparer : SegmentedHashSet_Generic_Tests { protected override IEqualityComparer GetIEqualityComparer() { @@ -199,7 +195,7 @@ protected override int CreateT(int seed) protected override ISet GenericISetFactory() { - return new HashSet(new BadIntEqualityComparer()); + return new SegmentedHashSet(new BadIntEqualityComparer()); } } } diff --git a/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/SegmentedHashSet_Generic_Tests`1.cs b/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/SegmentedHashSet_Generic_Tests`1.cs index 0c98e521ed4f3..7ad7b9238e273 100644 --- a/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/SegmentedHashSet_Generic_Tests`1.cs +++ b/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/SegmentedHashSet_Generic_Tests`1.cs @@ -1,5 +1,6 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. // NOTE: This code is derived from an implementation originally in dotnet/runtime: // https://github.com/dotnet/runtime/blob/v5.0.7/src/libraries/System.Collections/tests/Generic/HashSet/HashSet.Generic.Tests.cs @@ -7,30 +8,32 @@ // See the commentary in https://github.com/dotnet/roslyn/pull/50156 for notes on incorporating changes made to the // reference implementation. +using System; +using System.Collections; using System.Collections.Generic; -using System.IO; using System.Linq; -using System.Runtime.Serialization.Formatters.Binary; +using Microsoft.CodeAnalysis.Collections; using Xunit; -namespace System.Collections.Tests +namespace Microsoft.CodeAnalysis.UnitTests.Collections { /// - /// Contains tests that ensure the correctness of the HashSet class. + /// Contains tests that ensure the correctness of the SegmentedHashSet class. /// - public abstract class HashSet_Generic_Tests : ISet_Generic_Tests + public abstract class SegmentedHashSet_Generic_Tests : ISet_Generic_Tests + where T : notnull { #region ISet Helper Methods protected override bool ResetImplemented => true; - protected override ModifyOperation ModifyEnumeratorThrows => PlatformDetection.IsNetFramework ? base.ModifyEnumeratorThrows : (base.ModifyEnumeratorAllowed & ~(ModifyOperation.Remove | ModifyOperation.Clear)); + protected override ModifyOperation ModifyEnumeratorThrows => base.ModifyEnumeratorAllowed & ~(ModifyOperation.Remove | ModifyOperation.Clear); - protected override ModifyOperation ModifyEnumeratorAllowed => PlatformDetection.IsNetFramework ? base.ModifyEnumeratorAllowed : ModifyOperation.Overwrite | ModifyOperation.Remove | ModifyOperation.Clear; + protected override ModifyOperation ModifyEnumeratorAllowed => ModifyOperation.Overwrite | ModifyOperation.Remove | ModifyOperation.Clear; protected override ISet GenericISetFactory() { - return new HashSet(); + return new SegmentedHashSet(); } #endregion @@ -50,7 +53,7 @@ private static IEnumerable NonSquares(int limit) [Fact] public void HashSet_Generic_Constructor() { - HashSet set = new HashSet(); + SegmentedHashSet set = new SegmentedHashSet(); Assert.Empty(set); } @@ -58,7 +61,7 @@ public void HashSet_Generic_Constructor() public void HashSet_Generic_Constructor_IEqualityComparer() { IEqualityComparer comparer = GetIEqualityComparer(); - HashSet set = new HashSet(comparer); + SegmentedHashSet set = new SegmentedHashSet(comparer); if (comparer == null) Assert.Equal(EqualityComparer.Default, set.Comparer); else @@ -68,8 +71,8 @@ public void HashSet_Generic_Constructor_IEqualityComparer() [Fact] public void HashSet_Generic_Constructor_NullIEqualityComparer() { - IEqualityComparer comparer = null; - HashSet set = new HashSet(comparer); + IEqualityComparer? comparer = null; + SegmentedHashSet set = new SegmentedHashSet(comparer); if (comparer == null) Assert.Equal(EqualityComparer.Default, set.Comparer); else @@ -83,7 +86,7 @@ public void HashSet_Generic_Constructor_IEnumerable(EnumerableType enumerableTyp _ = setLength; _ = numberOfMatchingElements; IEnumerable enumerable = CreateEnumerable(enumerableType, null, enumerableLength, 0, numberOfDuplicateElements); - HashSet set = new HashSet(enumerable); + SegmentedHashSet set = new SegmentedHashSet(enumerable); Assert.True(set.SetEquals(enumerable)); } @@ -92,8 +95,8 @@ public void HashSet_Generic_Constructor_IEnumerable(EnumerableType enumerableTyp public void HashSet_Generic_Constructor_IEnumerable_WithManyDuplicates(int count) { IEnumerable items = CreateEnumerable(EnumerableType.List, null, count, 0, 0); - HashSet hashSetFromDuplicates = new HashSet(Enumerable.Range(0, 40).SelectMany(i => items).ToArray()); - HashSet hashSetFromNoDuplicates = new HashSet(items); + SegmentedHashSet hashSetFromDuplicates = new SegmentedHashSet(Enumerable.Range(0, 40).SelectMany(i => items).ToArray()); + SegmentedHashSet hashSetFromNoDuplicates = new SegmentedHashSet(items); Assert.True(hashSetFromNoDuplicates.SetEquals(hashSetFromDuplicates)); } @@ -101,21 +104,21 @@ public void HashSet_Generic_Constructor_IEnumerable_WithManyDuplicates(int count [MemberData(nameof(ValidCollectionSizes))] public void HashSet_Generic_Constructor_HashSet_SparselyFilled(int count) { - HashSet source = (HashSet)CreateEnumerable(EnumerableType.HashSet, null, count, 0, 0); + SegmentedHashSet source = (SegmentedHashSet)CreateEnumerable(EnumerableType.SegmentedHashSet, null, count, 0, 0); List sourceElements = source.ToList(); foreach (int i in NonSquares(count)) source.Remove(sourceElements[i]);// Unevenly spaced survivors increases chance of catching any spacing-related bugs. - HashSet set = new HashSet(source, GetIEqualityComparer()); + SegmentedHashSet set = new SegmentedHashSet(source, GetIEqualityComparer()); Assert.True(set.SetEquals(source)); } [Fact] public void HashSet_Generic_Constructor_IEnumerable_Null() { - Assert.Throws(() => new HashSet((IEnumerable)null)); - Assert.Throws(() => new HashSet((IEnumerable)null, EqualityComparer.Default)); + Assert.Throws(() => new SegmentedHashSet((IEnumerable)null!)); + Assert.Throws(() => new SegmentedHashSet((IEnumerable)null!, EqualityComparer.Default)); } [Theory] @@ -126,7 +129,7 @@ public void HashSet_Generic_Constructor_IEnumerable_IEqualityComparer(Enumerable _ = numberOfMatchingElements; _ = numberOfDuplicateElements; IEnumerable enumerable = CreateEnumerable(enumerableType, null, enumerableLength, 0, 0); - HashSet set = new HashSet(enumerable, GetIEqualityComparer()); + SegmentedHashSet set = new SegmentedHashSet(enumerable, GetIEqualityComparer()); Assert.True(set.SetEquals(enumerable)); } @@ -138,7 +141,7 @@ public void HashSet_Generic_Constructor_IEnumerable_IEqualityComparer(Enumerable [MemberData(nameof(ValidCollectionSizes))] public void HashSet_Generic_RemoveWhere_AllElements(int setLength) { - HashSet set = (HashSet)GenericISetFactory(setLength); + SegmentedHashSet set = (SegmentedHashSet)GenericISetFactory(setLength); int removedCount = set.RemoveWhere((value) => { return true; }); Assert.Equal(setLength, removedCount); } @@ -147,7 +150,7 @@ public void HashSet_Generic_RemoveWhere_AllElements(int setLength) [MemberData(nameof(ValidCollectionSizes))] public void HashSet_Generic_RemoveWhere_NoElements(int setLength) { - HashSet set = (HashSet)GenericISetFactory(setLength); + SegmentedHashSet set = (SegmentedHashSet)GenericISetFactory(setLength); int removedCount = set.RemoveWhere((value) => { return false; }); Assert.Equal(0, removedCount); Assert.Equal(setLength, set.Count); @@ -157,8 +160,8 @@ public void HashSet_Generic_RemoveWhere_NoElements(int setLength) public void HashSet_Generic_RemoveWhere_NewObject() // Regression Dev10_624201 { object[] array = new object[2]; - object obj = new object(); - HashSet set = new HashSet(); + object obj = new(); + SegmentedHashSet set = new SegmentedHashSet(); set.Add(obj); set.Remove(obj); @@ -171,8 +174,8 @@ public void HashSet_Generic_RemoveWhere_NewObject() // Regression Dev10_624201 [MemberData(nameof(ValidCollectionSizes))] public void HashSet_Generic_RemoveWhere_NullMatchPredicate(int setLength) { - HashSet set = (HashSet)GenericISetFactory(setLength); - Assert.Throws(() => set.RemoveWhere(null)); + SegmentedHashSet set = (SegmentedHashSet)GenericISetFactory(setLength); + Assert.Throws(() => set.RemoveWhere(null!)); } #endregion @@ -183,7 +186,7 @@ public void HashSet_Generic_RemoveWhere_NullMatchPredicate(int setLength) [MemberData(nameof(ValidCollectionSizes))] public void HashSet_Generic_TrimExcess_OnValidSetThatHasntBeenRemovedFrom(int setLength) { - HashSet set = (HashSet)GenericISetFactory(setLength); + SegmentedHashSet set = (SegmentedHashSet)GenericISetFactory(setLength); set.TrimExcess(); } @@ -191,7 +194,7 @@ public void HashSet_Generic_TrimExcess_OnValidSetThatHasntBeenRemovedFrom(int se [MemberData(nameof(ValidCollectionSizes))] public void HashSet_Generic_TrimExcess_Repeatedly(int setLength) { - HashSet set = (HashSet)GenericISetFactory(setLength); + SegmentedHashSet set = (SegmentedHashSet)GenericISetFactory(setLength); List expected = set.ToList(); set.TrimExcess(); set.TrimExcess(); @@ -205,7 +208,7 @@ public void HashSet_Generic_TrimExcess_AfterRemovingOneElement(int setLength) { if (setLength > 0) { - HashSet set = (HashSet)GenericISetFactory(setLength); + SegmentedHashSet set = (SegmentedHashSet)GenericISetFactory(setLength); List expected = set.ToList(); T elementToRemove = set.ElementAt(0); @@ -224,7 +227,7 @@ public void HashSet_Generic_TrimExcess_AfterClearingAndAddingSomeElementsBack(in { if (setLength > 0) { - HashSet set = (HashSet)GenericISetFactory(setLength); + SegmentedHashSet set = (SegmentedHashSet)GenericISetFactory(setLength); set.TrimExcess(); set.Clear(); set.TrimExcess(); @@ -242,7 +245,7 @@ public void HashSet_Generic_TrimExcess_AfterClearingAndAddingAllElementsBack(int { if (setLength > 0) { - HashSet set = (HashSet)GenericISetFactory(setLength); + SegmentedHashSet set = (SegmentedHashSet)GenericISetFactory(setLength); set.TrimExcess(); set.Clear(); set.TrimExcess(); @@ -262,7 +265,7 @@ public void HashSet_Generic_TrimExcess_AfterClearingAndAddingAllElementsBack(int [MemberData(nameof(ValidCollectionSizes))] public void HashSet_Generic_CopyTo_NegativeCount_ThrowsArgumentOutOfRangeException(int count) { - HashSet set = (HashSet)GenericISetFactory(count); + SegmentedHashSet set = (SegmentedHashSet)GenericISetFactory(count); T[] arr = new T[count]; Assert.Throws(() => set.CopyTo(arr, 0, -1)); Assert.Throws(() => set.CopyTo(arr, 0, int.MinValue)); @@ -272,7 +275,7 @@ public void HashSet_Generic_CopyTo_NegativeCount_ThrowsArgumentOutOfRangeExcepti [MemberData(nameof(ValidCollectionSizes))] public void HashSet_Generic_CopyTo_NoIndexDefaultsToZero(int count) { - HashSet set = (HashSet)GenericISetFactory(count); + SegmentedHashSet set = (SegmentedHashSet)GenericISetFactory(count); T[] arr1 = new T[count]; T[] arr2 = new T[count]; set.CopyTo(arr1); @@ -289,28 +292,28 @@ public void SetComparer_SetEqualsTests() { List objects = new List() { CreateT(1), CreateT(2), CreateT(3), CreateT(4), CreateT(5), CreateT(6) }; - var set = new HashSet>() + var set = new SegmentedHashSet>() { - new HashSet { objects[0], objects[1], objects[2] }, - new HashSet { objects[3], objects[4], objects[5] } + new SegmentedHashSet { objects[0], objects[1], objects[2] }, + new SegmentedHashSet { objects[3], objects[4], objects[5] } }; - var noComparerSet = new HashSet>() + var noComparerSet = new SegmentedHashSet>() { - new HashSet { objects[0], objects[1], objects[2] }, - new HashSet { objects[3], objects[4], objects[5] } + new SegmentedHashSet { objects[0], objects[1], objects[2] }, + new SegmentedHashSet { objects[3], objects[4], objects[5] } }; - var comparerSet1 = new HashSet>(HashSet.CreateSetComparer()) + var comparerSet1 = new SegmentedHashSet>(SegmentedHashSet.CreateSetComparer()) { - new HashSet { objects[0], objects[1], objects[2] }, - new HashSet { objects[3], objects[4], objects[5] } + new SegmentedHashSet { objects[0], objects[1], objects[2] }, + new SegmentedHashSet { objects[3], objects[4], objects[5] } }; - var comparerSet2 = new HashSet>(HashSet.CreateSetComparer()) + var comparerSet2 = new SegmentedHashSet>(SegmentedHashSet.CreateSetComparer()) { - new HashSet { objects[3], objects[4], objects[5] }, - new HashSet { objects[0], objects[1], objects[2] } + new SegmentedHashSet { objects[3], objects[4], objects[5] }, + new SegmentedHashSet { objects[0], objects[1], objects[2] } }; Assert.False(noComparerSet.SetEquals(set)); @@ -323,26 +326,26 @@ public void SetComparer_SequenceEqualTests() { List objects = new List() { CreateT(1), CreateT(2), CreateT(3), CreateT(4), CreateT(5), CreateT(6) }; - var set = new HashSet>() + var set = new SegmentedHashSet>() { - new HashSet { objects[0], objects[1], objects[2] }, - new HashSet { objects[3], objects[4], objects[5] } + new SegmentedHashSet { objects[0], objects[1], objects[2] }, + new SegmentedHashSet { objects[3], objects[4], objects[5] } }; - var noComparerSet = new HashSet>() + var noComparerSet = new SegmentedHashSet>() { - new HashSet { objects[0], objects[1], objects[2] }, - new HashSet { objects[3], objects[4], objects[5] } + new SegmentedHashSet { objects[0], objects[1], objects[2] }, + new SegmentedHashSet { objects[3], objects[4], objects[5] } }; - var comparerSet = new HashSet>(HashSet.CreateSetComparer()) + var comparerSet = new SegmentedHashSet>(SegmentedHashSet.CreateSetComparer()) { - new HashSet { objects[0], objects[1], objects[2] }, - new HashSet { objects[3], objects[4], objects[5] } + new SegmentedHashSet { objects[0], objects[1], objects[2] }, + new SegmentedHashSet { objects[3], objects[4], objects[5] } }; Assert.False(noComparerSet.SequenceEqual(set)); - Assert.True(noComparerSet.SequenceEqual(set, HashSet.CreateSetComparer())); + Assert.True(noComparerSet.SequenceEqual(set, SegmentedHashSet.CreateSetComparer())); Assert.False(comparerSet.SequenceEqual(set)); } @@ -351,7 +354,7 @@ public void SetComparer_SequenceEqualTests() [Fact] public void CanBeCastedToISet() { - HashSet set = new HashSet(); + SegmentedHashSet set = new SegmentedHashSet(); ISet iset = (set as ISet); Assert.NotNull(iset); } @@ -360,7 +363,7 @@ public void CanBeCastedToISet() [MemberData(nameof(ValidCollectionSizes))] public void HashSet_Generic_Constructor_int(int capacity) { - HashSet set = new HashSet(capacity); + SegmentedHashSet set = new SegmentedHashSet(capacity); Assert.Equal(0, set.Count); } @@ -368,7 +371,7 @@ public void HashSet_Generic_Constructor_int(int capacity) [MemberData(nameof(ValidCollectionSizes))] public void HashSet_Generic_Constructor_int_AddUpToAndBeyondCapacity(int capacity) { - HashSet set = new HashSet(capacity); + SegmentedHashSet set = new SegmentedHashSet(capacity); AddToCollection(set, capacity); Assert.Equal(capacity, set.Count); @@ -382,7 +385,7 @@ public void HashSet_Generic_Constructor_Capacity_ToNextPrimeNumber() { // Highest pre-computed number + 1. const int Capacity = 7199370; - var set = new HashSet(Capacity); + var set = new SegmentedHashSet(Capacity); // Assert that the HashTable's capacity is set to the descendant prime number of the given one. const int NextPrime = 7199371; @@ -392,8 +395,8 @@ public void HashSet_Generic_Constructor_Capacity_ToNextPrimeNumber() [Fact] public void HashSet_Generic_Constructor_int_Negative_ThrowsArgumentOutOfRangeException() { - AssertExtensions.Throws("capacity", () => new HashSet(-1)); - AssertExtensions.Throws("capacity", () => new HashSet(int.MinValue)); + Assert.Throws("capacity", () => new SegmentedHashSet(-1)); + Assert.Throws("capacity", () => new SegmentedHashSet(int.MinValue)); } [Theory] @@ -401,7 +404,7 @@ public void HashSet_Generic_Constructor_int_Negative_ThrowsArgumentOutOfRangeExc public void HashSet_Generic_Constructor_int_IEqualityComparer(int capacity) { IEqualityComparer comparer = GetIEqualityComparer(); - HashSet set = new HashSet(capacity, comparer); + SegmentedHashSet set = new SegmentedHashSet(capacity, comparer); Assert.Equal(0, set.Count); if (comparer == null) Assert.Equal(EqualityComparer.Default, set.Comparer); @@ -414,7 +417,7 @@ public void HashSet_Generic_Constructor_int_IEqualityComparer(int capacity) public void HashSet_Generic_Constructor_int_IEqualityComparer_AddUpToAndBeyondCapacity(int capacity) { IEqualityComparer comparer = GetIEqualityComparer(); - HashSet set = new HashSet(capacity, comparer); + SegmentedHashSet set = new SegmentedHashSet(capacity, comparer); AddToCollection(set, capacity); Assert.Equal(capacity, set.Count); @@ -427,8 +430,8 @@ public void HashSet_Generic_Constructor_int_IEqualityComparer_AddUpToAndBeyondCa public void HashSet_Generic_Constructor_int_IEqualityComparer_Negative_ThrowsArgumentOutOfRangeException() { IEqualityComparer comparer = GetIEqualityComparer(); - AssertExtensions.Throws("capacity", () => new HashSet(-1, comparer)); - AssertExtensions.Throws("capacity", () => new HashSet(int.MinValue, comparer)); + Assert.Throws("capacity", () => new SegmentedHashSet(-1, comparer)); + Assert.Throws("capacity", () => new SegmentedHashSet(int.MinValue, comparer)); } #region TryGetValue @@ -437,14 +440,13 @@ public void HashSet_Generic_Constructor_int_IEqualityComparer_Negative_ThrowsArg public void HashSet_Generic_TryGetValue_Contains() { T value = CreateT(1); - HashSet set = new HashSet { value }; + SegmentedHashSet set = new SegmentedHashSet { value }; T equalValue = CreateT(1); - T actualValue; - Assert.True(set.TryGetValue(equalValue, out actualValue)); + Assert.True(set.TryGetValue(equalValue, out T? actualValue)); Assert.Equal(value, actualValue); if (!typeof(T).IsValueType) { - Assert.Same((object)value, (object)actualValue); + Assert.Same((object)value, (object?)actualValue); } } @@ -452,14 +454,16 @@ public void HashSet_Generic_TryGetValue_Contains() public void HashSet_Generic_TryGetValue_Contains_OverwriteOutputParam() { T value = CreateT(1); - HashSet set = new HashSet { value }; + SegmentedHashSet set = new SegmentedHashSet { value }; T equalValue = CreateT(1); - T actualValue = CreateT(2); +#pragma warning disable IDE0059 // Unnecessary assignment of a value (intentional for the test) + T? actualValue = CreateT(2); +#pragma warning restore IDE0059 // Unnecessary assignment of a value Assert.True(set.TryGetValue(equalValue, out actualValue)); Assert.Equal(value, actualValue); if (!typeof(T).IsValueType) { - Assert.Same((object)value, (object)actualValue); + Assert.Same((object)value, (object?)actualValue); } } @@ -467,10 +471,9 @@ public void HashSet_Generic_TryGetValue_Contains_OverwriteOutputParam() public void HashSet_Generic_TryGetValue_NotContains() { T value = CreateT(1); - HashSet set = new HashSet { value }; + SegmentedHashSet set = new SegmentedHashSet { value }; T equalValue = CreateT(2); - T actualValue; - Assert.False(set.TryGetValue(equalValue, out actualValue)); + Assert.False(set.TryGetValue(equalValue, out T? actualValue)); Assert.Equal(default(T), actualValue); } @@ -478,9 +481,11 @@ public void HashSet_Generic_TryGetValue_NotContains() public void HashSet_Generic_TryGetValue_NotContains_OverwriteOutputParam() { T value = CreateT(1); - HashSet set = new HashSet { value }; + SegmentedHashSet set = new SegmentedHashSet { value }; T equalValue = CreateT(2); - T actualValue = equalValue; +#pragma warning disable IDE0059 // Unnecessary assignment of a value (intentional for the test) + T? actualValue = equalValue; +#pragma warning restore IDE0059 // Unnecessary assignment of a value Assert.False(set.TryGetValue(equalValue, out actualValue)); Assert.Equal(default(T), actualValue); } @@ -493,7 +498,7 @@ public void HashSet_Generic_TryGetValue_NotContains_OverwriteOutputParam() [MemberData(nameof(ValidCollectionSizes))] public void EnsureCapacity_Generic_RequestingLargerCapacity_DoesNotInvalidateEnumeration(int setLength) { - HashSet set = (HashSet)(GenericISetFactory(setLength)); + SegmentedHashSet set = (SegmentedHashSet)(GenericISetFactory(setLength)); var capacity = set.EnsureCapacity(0); IEnumerator valuesEnum = set.GetEnumerator(); IEnumerator valuesListEnum = new List(set).GetEnumerator(); @@ -510,14 +515,14 @@ public void EnsureCapacity_Generic_RequestingLargerCapacity_DoesNotInvalidateEnu [Fact] public void EnsureCapacity_Generic_NegativeCapacityRequested_Throws() { - var set = new HashSet(); - AssertExtensions.Throws("capacity", () => set.EnsureCapacity(-1)); + var set = new SegmentedHashSet(); + Assert.Throws("capacity", () => set.EnsureCapacity(-1)); } [Fact] public void EnsureCapacity_Generic_HashsetNotInitialized_RequestedZero_ReturnsZero() { - var set = new HashSet(); + var set = new SegmentedHashSet(); Assert.Equal(0, set.EnsureCapacity(0)); } @@ -528,7 +533,7 @@ public void EnsureCapacity_Generic_HashsetNotInitialized_RequestedZero_ReturnsZe [InlineData(4)] public void EnsureCapacity_Generic_HashsetNotInitialized_RequestedNonZero_CapacityIsSetToAtLeastTheRequested(int requestedCapacity) { - var set = new HashSet(); + var set = new SegmentedHashSet(); Assert.InRange(set.EnsureCapacity(requestedCapacity), requestedCapacity, int.MaxValue); } @@ -537,12 +542,12 @@ public void EnsureCapacity_Generic_HashsetNotInitialized_RequestedNonZero_Capaci [InlineData(7)] public void EnsureCapacity_Generic_RequestedCapacitySmallerThanCurrent_CapacityUnchanged(int currentCapacity) { - HashSet set; + SegmentedHashSet set; // assert capacity remains the same when ensuring a capacity smaller or equal than existing for (int i = 0; i <= currentCapacity; i++) { - set = new HashSet(currentCapacity); + set = new SegmentedHashSet(currentCapacity); Assert.Equal(currentCapacity, set.EnsureCapacity(i)); } } @@ -552,10 +557,10 @@ public void EnsureCapacity_Generic_RequestedCapacitySmallerThanCurrent_CapacityU [InlineData(89)] public void EnsureCapacity_Generic_ExistingCapacityRequested_SameValueReturned(int capacity) { - var set = new HashSet(capacity); + var set = new SegmentedHashSet(capacity); Assert.Equal(capacity, set.EnsureCapacity(capacity)); - set = (HashSet)GenericISetFactory(capacity); + set = (SegmentedHashSet)GenericISetFactory(capacity); Assert.Equal(capacity, set.EnsureCapacity(capacity)); } @@ -567,15 +572,15 @@ public void EnsureCapacity_Generic_ExistingCapacityRequested_SameValueReturned(i [InlineData(4)] public void EnsureCapacity_Generic_EnsureCapacityCalledTwice_ReturnsSameValue(int setLength) { - HashSet set = (HashSet)GenericISetFactory(setLength); + SegmentedHashSet set = (SegmentedHashSet)GenericISetFactory(setLength); int capacity = set.EnsureCapacity(0); Assert.Equal(capacity, set.EnsureCapacity(0)); - set = (HashSet)GenericISetFactory(setLength); + set = (SegmentedHashSet)GenericISetFactory(setLength); capacity = set.EnsureCapacity(setLength); Assert.Equal(capacity, set.EnsureCapacity(setLength)); - set = (HashSet)GenericISetFactory(setLength); + set = (SegmentedHashSet)GenericISetFactory(setLength); capacity = set.EnsureCapacity(setLength + 1); Assert.Equal(capacity, set.EnsureCapacity(setLength + 1)); } @@ -587,7 +592,7 @@ public void EnsureCapacity_Generic_EnsureCapacityCalledTwice_ReturnsSameValue(in [InlineData(8)] public void EnsureCapacity_Generic_HashsetNotEmpty_RequestedSmallerThanCount_ReturnsAtLeastSizeOfCount(int setLength) { - HashSet set = (HashSet)GenericISetFactory(setLength); + SegmentedHashSet set = (SegmentedHashSet)GenericISetFactory(setLength); Assert.InRange(set.EnsureCapacity(setLength - 1), setLength, int.MaxValue); } @@ -596,7 +601,7 @@ public void EnsureCapacity_Generic_HashsetNotEmpty_RequestedSmallerThanCount_Ret [InlineData(20)] public void EnsureCapacity_Generic_HashsetNotEmpty_SetsToAtLeastTheRequested(int setLength) { - HashSet set = (HashSet)GenericISetFactory(setLength); + SegmentedHashSet set = (SegmentedHashSet)GenericISetFactory(setLength); // get current capacity int currentCapacity = set.EnsureCapacity(0); @@ -609,13 +614,13 @@ public void EnsureCapacity_Generic_HashsetNotEmpty_SetsToAtLeastTheRequested(int [Fact] public void EnsureCapacity_Generic_CapacityIsSetToPrimeNumberLargerOrEqualToRequested() { - var set = new HashSet(); + var set = new SegmentedHashSet(); Assert.Equal(17, set.EnsureCapacity(17)); - set = new HashSet(); + set = new SegmentedHashSet(); Assert.Equal(17, set.EnsureCapacity(15)); - set = new HashSet(); + set = new SegmentedHashSet(); Assert.Equal(17, set.EnsureCapacity(13)); } @@ -624,7 +629,7 @@ public void EnsureCapacity_Generic_CapacityIsSetToPrimeNumberLargerOrEqualToRequ [InlineData(10)] public void EnsureCapacity_Generic_GrowCapacityWithFreeList(int setLength) { - HashSet set = (HashSet)GenericISetFactory(setLength); + SegmentedHashSet set = (SegmentedHashSet)GenericISetFactory(setLength); // Remove the first element to ensure we have a free list. Assert.True(set.Remove(set.ElementAt(0))); @@ -645,7 +650,7 @@ public void EnsureCapacity_Generic_GrowCapacityWithFreeList(int setLength) public void Remove_NonDefaultComparer_ComparerUsed(int capacity) { var c = new TrackingEqualityComparer(); - var set = new HashSet(capacity, c); + var set = new SegmentedHashSet(capacity, c); AddToCollection(set, capacity); T first = set.First(); @@ -661,57 +666,5 @@ public void Remove_NonDefaultComparer_ComparerUsed(int capacity) } #endregion - - #region Serialization - - [ConditionalFact(typeof(PlatformDetection), nameof(PlatformDetection.IsBinaryFormatterSupported))] - public void ComparerSerialization() - { - // Strings switch between randomized and non-randomized comparers, - // however this should never be observable externally. - TestComparerSerialization(EqualityComparer.Default); - - // OrdinalCaseSensitiveComparer is internal and (de)serializes as OrdinalComparer - TestComparerSerialization(StringComparer.Ordinal, "System.OrdinalComparer"); - - // OrdinalIgnoreCaseComparer is internal and (de)serializes as OrdinalComparer - TestComparerSerialization(StringComparer.OrdinalIgnoreCase, "System.OrdinalComparer"); - TestComparerSerialization(StringComparer.CurrentCulture); - TestComparerSerialization(StringComparer.CurrentCultureIgnoreCase); - TestComparerSerialization(StringComparer.InvariantCulture); - TestComparerSerialization(StringComparer.InvariantCultureIgnoreCase); - - // Check other types while here, IEquatable valuetype, nullable valuetype, and non IEquatable object - TestComparerSerialization(EqualityComparer.Default); - TestComparerSerialization(EqualityComparer.Default); - TestComparerSerialization(EqualityComparer.Default); - - static void TestComparerSerialization(IEqualityComparer equalityComparer, string internalTypeName = null) - { - var bf = new BinaryFormatter(); - var s = new MemoryStream(); - - var dict = new HashSet(equalityComparer); - - Assert.Same(equalityComparer, dict.Comparer); - - bf.Serialize(s, dict); - s.Position = 0; - dict = (HashSet)bf.Deserialize(s); - - if (internalTypeName == null) - { - Assert.IsType(equalityComparer.GetType(), dict.Comparer); - } - else - { - Assert.Equal(internalTypeName, dict.Comparer.GetType().ToString()); - } - - Assert.True(equalityComparer.Equals(dict.Comparer)); - } - } - - #endregion } } diff --git a/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/SegmentedHashSet_IEnumerable_NonGeneric_Tests.cs b/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/SegmentedHashSet_IEnumerable_NonGeneric_Tests.cs index 54dbae1fbd74d..0eb9c23b473a9 100644 --- a/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/SegmentedHashSet_IEnumerable_NonGeneric_Tests.cs +++ b/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/SegmentedHashSet_IEnumerable_NonGeneric_Tests.cs @@ -1,5 +1,6 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. // NOTE: This code is derived from an implementation originally in dotnet/runtime: // https://github.com/dotnet/runtime/blob/v5.0.7/src/libraries/System.Collections/tests/Generic/HashSet/HashSet.Generic.Tests.AsNonGenericIEnumerable.cs @@ -7,15 +8,18 @@ // See the commentary in https://github.com/dotnet/roslyn/pull/50156 for notes on incorporating changes made to the // reference implementation. +using System; +using System.Collections; using System.Collections.Generic; +using Microsoft.CodeAnalysis.Collections; -namespace System.Collections.Tests +namespace Microsoft.CodeAnalysis.UnitTests.Collections { - public class HashSet_IEnumerable_NonGeneric_Tests : IEnumerable_NonGeneric_Tests + public class SegmentedHashSet_IEnumerable_NonGeneric_Tests : IEnumerable_NonGeneric_Tests { protected override IEnumerable NonGenericIEnumerableFactory(int count) { - var set = new HashSet(); + var set = new SegmentedHashSet(); int seed = 12354; while (set.Count < count) set.Add(CreateT(set, seed++)); @@ -24,9 +28,9 @@ protected override IEnumerable NonGenericIEnumerableFactory(int count) protected override bool Enumerator_Current_UndefinedOperation_Throws => true; - protected override ModifyOperation ModifyEnumeratorThrows => PlatformDetection.IsNetFramework ? base.ModifyEnumeratorThrows : (base.ModifyEnumeratorAllowed & ~ModifyOperation.Remove); + protected override ModifyOperation ModifyEnumeratorThrows => base.ModifyEnumeratorAllowed & ~ModifyOperation.Remove; - protected override ModifyOperation ModifyEnumeratorAllowed => PlatformDetection.IsNetFramework ? base.ModifyEnumeratorAllowed : ModifyOperation.Overwrite | ModifyOperation.Remove; + protected override ModifyOperation ModifyEnumeratorAllowed => ModifyOperation.Overwrite | ModifyOperation.Remove; /// /// Returns a set of ModifyEnumerable delegates that modify the enumerable passed to them. @@ -37,7 +41,7 @@ protected override IEnumerable GetModifyEnumerables(ModifyOper { yield return (IEnumerable enumerable) => { - HashSet casted = ((HashSet)enumerable); + SegmentedHashSet casted = ((SegmentedHashSet)enumerable); if (casted.Count > 0) { casted.Clear(); @@ -48,7 +52,7 @@ protected override IEnumerable GetModifyEnumerables(ModifyOper } } - protected string CreateT(HashSet set, int seed) + private protected static string CreateT(SegmentedHashSet set, int seed) { int stringLength = seed % 10 + 5; Random rand = new Random(seed); diff --git a/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/TestingTypes.cs b/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/TestingTypes.cs index 8ce466ee32b35..0df05d92e5ab0 100644 --- a/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/TestingTypes.cs +++ b/src/Compilers/Core/CodeAnalysisTest/Collections/HashSet/TestingTypes.cs @@ -1,5 +1,6 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. // NOTE: This code is derived from an implementation originally in dotnet/runtime: // https://github.com/dotnet/runtime/blob/v5.0.7/src/libraries/Common/tests/System/Collections/TestingTypes.cs @@ -7,9 +8,13 @@ // See the commentary in https://github.com/dotnet/roslyn/pull/50156 for notes on incorporating changes made to the // reference implementation. +#pragma warning disable CA1067 // Override Object.Equals(object) when implementing IEquatable + +using System; +using System.Collections; using System.Collections.Generic; -namespace System.Collections.Tests +namespace Microsoft.CodeAnalysis.UnitTests.Collections { #region Comparers and Equatables @@ -27,7 +32,7 @@ public int GetHashCode(int obj) return obj % 2; } - public override bool Equals(object obj) + public override bool Equals(object? obj) { return obj is BadIntEqualityComparer; // Equal to all other instances of this type, not to anything else. } @@ -39,34 +44,36 @@ public override int GetHashCode() } [Serializable] - public class EquatableBackwardsOrder : IEquatable, IComparable, IComparable + public class EquatableBackwardsOrder : IEquatable, IComparable, IComparable { - private int _value; + private readonly int _value; public EquatableBackwardsOrder(int value) { _value = value; } - public int CompareTo(EquatableBackwardsOrder other) //backwards from the usual integer ordering + public int CompareTo(EquatableBackwardsOrder? other) //backwards from the usual integer ordering { + if (other is null) + return -1; + return other._value - _value; } public override int GetHashCode() => _value; - public override bool Equals(object obj) + public override bool Equals(object? obj) { - EquatableBackwardsOrder other = obj as EquatableBackwardsOrder; - return other != null && Equals(other); + return obj is EquatableBackwardsOrder other && Equals(other); } - public bool Equals(EquatableBackwardsOrder other) + public bool Equals(EquatableBackwardsOrder? other) { - return _value == other._value; + return _value == other?._value; } - int IComparable.CompareTo(object obj) + int IComparable.CompareTo(object? obj) { if (obj != null && obj.GetType() == typeof(EquatableBackwardsOrder)) return ((EquatableBackwardsOrder)obj)._value - _value; @@ -115,7 +122,7 @@ public int GetHashCode(int obj) [Serializable] public class Comparer_ModOfInt : IEqualityComparer, IComparer { - private int _mod; + private readonly int _mod; public Comparer_ModOfInt(int mod) { @@ -185,25 +192,25 @@ public int CompareTo(SimpleInt other) return other.Val - _val; } - public int CompareTo(object obj) + public int CompareTo(object? obj) { - if (obj.GetType() == typeof(SimpleInt)) + if (obj?.GetType() == typeof(SimpleInt)) { return ((SimpleInt)obj).Val - _val; } return -1; } - public int CompareTo(object other, IComparer comparer) + public int CompareTo(object? other, IComparer comparer) { - if (other.GetType() == typeof(SimpleInt)) + if (other?.GetType() == typeof(SimpleInt)) return ((SimpleInt)other).Val - _val; return -1; } - public bool Equals(object other, IEqualityComparer comparer) + public bool Equals(object? other, IEqualityComparer comparer) { - if (other.GetType() == typeof(SimpleInt)) + if (other?.GetType() == typeof(SimpleInt)) return ((SimpleInt)other).Val == _val; return false; } @@ -261,7 +268,7 @@ public GenericComparable(int value) _value = value; } - public int CompareTo(GenericComparable other) => _value.CompareTo(other._value); + public int CompareTo(GenericComparable? other) => _value.CompareTo(other?._value); } public class NonGenericComparable : IComparable @@ -273,15 +280,15 @@ public NonGenericComparable(int value) _inner = new GenericComparable(value); } - public int CompareTo(object other) => - _inner.CompareTo(((NonGenericComparable)other)._inner); + public int CompareTo(object? other) => + _inner.CompareTo(((NonGenericComparable?)other)?._inner); } public class BadlyBehavingComparable : IComparable, IComparable { - public int CompareTo(BadlyBehavingComparable other) => 1; + public int CompareTo(BadlyBehavingComparable? other) => 1; - public int CompareTo(object other) => -1; + public int CompareTo(object? other) => -1; } public class MutatingComparable : IComparable, IComparable @@ -295,9 +302,9 @@ public MutatingComparable(int initialState) public int State => _state; - public int CompareTo(object other) => _state++; + public int CompareTo(object? other) => _state++; - public int CompareTo(MutatingComparable other) => _state++; + public int CompareTo(MutatingComparable? other) => _state++; } public static class ValueComparable @@ -334,7 +341,7 @@ public Equatable(int value) // Equals(object) is not implemented on purpose. // EqualityComparer is only supposed to call through to the strongly-typed Equals since we implement IEquatable. - public bool Equals(Equatable other) + public bool Equals(Equatable? other) { return other != null && Value == other.Value; } @@ -359,9 +366,9 @@ public DelegateEquatable() EqualsWorker = _ => false; } - public Func EqualsWorker { get; set; } + public Func EqualsWorker { get; set; } - public bool Equals(DelegateEquatable other) => EqualsWorker(other); + public bool Equals(DelegateEquatable? other) => EqualsWorker(other); } public struct ValueDelegateEquatable : IEquatable @@ -373,10 +380,10 @@ public struct ValueDelegateEquatable : IEquatable public sealed class TrackingEqualityComparer : IEqualityComparer { - public int EqualsCalls; - public int GetHashCodeCalls; + public int EqualsCalls { get; set; } + public int GetHashCodeCalls { get; set; } - public bool Equals(T x, T y) + public bool Equals(T? x, T? y) { EqualsCalls++; return EqualityComparer.Default.Equals(x, y); @@ -385,7 +392,7 @@ public bool Equals(T x, T y) public int GetHashCode(T obj) { GetHashCodeCalls++; - return EqualityComparer.Default.GetHashCode(obj); + return EqualityComparer.Default.GetHashCode(obj!); } } diff --git a/src/Compilers/Core/CodeAnalysisTest/Collections/List/TestBase.Generic.cs b/src/Compilers/Core/CodeAnalysisTest/Collections/List/TestBase.Generic.cs index 191ebdf4b9121..9dd55bb59224e 100644 --- a/src/Compilers/Core/CodeAnalysisTest/Collections/List/TestBase.Generic.cs +++ b/src/Compilers/Core/CodeAnalysisTest/Collections/List/TestBase.Generic.cs @@ -12,6 +12,7 @@ using System.Collections.Generic; using System.Diagnostics; using System.Linq; +using Microsoft.CodeAnalysis.Collections; using Xunit; namespace Microsoft.CodeAnalysis.UnitTests.Collections @@ -99,9 +100,9 @@ protected IEnumerable CreateEnumerable(EnumerableType type, IEnumerable? e switch (type) { - case EnumerableType.HashSet: + case EnumerableType.SegmentedHashSet: Debug.Assert(numberOfDuplicateElements == 0, "Can not create a HashSet with duplicate elements - numberOfDuplicateElements must be zero"); - return CreateHashSet(enumerableToMatchTo, count, numberOfMatchingElements); + return CreateSegmentedHashSet(enumerableToMatchTo, count, numberOfMatchingElements); case EnumerableType.List: return CreateList(enumerableToMatchTo, count, numberOfMatchingElements, numberOfDuplicateElements); case EnumerableType.SortedSet: @@ -221,16 +222,16 @@ protected IEnumerable CreateList(IEnumerable? enumerableToMatchTo, int cou /// to it until it is full. It will begin by adding the desired number of matching, /// followed by random (deterministic) elements until the desired count is reached. /// - protected IEnumerable CreateHashSet(IEnumerable? enumerableToMatchTo, int count, int numberOfMatchingElements) + protected IEnumerable CreateSegmentedHashSet(IEnumerable? enumerableToMatchTo, int count, int numberOfMatchingElements) { - HashSet set = new HashSet(GetIEqualityComparer()); + SegmentedHashSet set = new SegmentedHashSet(GetIEqualityComparer()); int seed = 528; - List? match = null; + SegmentedList? match = null; // Add Matching elements if (enumerableToMatchTo != null) { - match = enumerableToMatchTo.ToList(); + match = enumerableToMatchTo.ToSegmentedList(); for (int i = 0; i < numberOfMatchingElements; i++) set.Add(match[i]); } diff --git a/src/Compilers/Core/CodeAnalysisTest/Collections/List/TestBase.NonGeneric.cs b/src/Compilers/Core/CodeAnalysisTest/Collections/List/TestBase.NonGeneric.cs index c7d92adbb7006..eb71b35efed54 100644 --- a/src/Compilers/Core/CodeAnalysisTest/Collections/List/TestBase.NonGeneric.cs +++ b/src/Compilers/Core/CodeAnalysisTest/Collections/List/TestBase.NonGeneric.cs @@ -35,7 +35,7 @@ public static IEnumerable ValidPositiveCollectionSizes() public enum EnumerableType { - HashSet, + SegmentedHashSet, SortedSet, List, Queue, From b5d0f66bfb64079e1990d6e255bc2e99eeb61c51 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Wed, 24 Nov 2021 13:44:19 +0000 Subject: [PATCH 013/413] Update dependencies from https://github.com/dotnet/arcade build 20211123.3 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.Helix.Sdk From Version 7.0.0-beta.21569.2 -> To Version 7.0.0-beta.21573.3 --- eng/Version.Details.xml | 8 ++++---- eng/common/build.sh | 4 ---- eng/common/native/init-compiler.sh | 2 +- eng/common/sdk-task.ps1 | 3 --- eng/common/templates/job/job.yml | 1 + eng/common/tools.ps1 | 21 --------------------- eng/common/tools.sh | 7 ------- global.json | 4 ++-- 8 files changed, 8 insertions(+), 42 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 0f6ddaaf08536..33edf83e68f7c 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -13,18 +13,18 @@ - + https://github.com/dotnet/arcade - 97463777ee9a8445d4a4c5911ede0f0cd71fa8aa + 927f8d4d5036f68a5fc6d042f336bc9458027208 https://github.com/dotnet/roslyn c1d8c6f043bc80425c6828455eb57f8a404759c6 - + https://github.com/dotnet/arcade - 97463777ee9a8445d4a4c5911ede0f0cd71fa8aa + 927f8d4d5036f68a5fc6d042f336bc9458027208 diff --git a/eng/common/build.sh b/eng/common/build.sh index bc07a1c684824..55b298f16ccd1 100755 --- a/eng/common/build.sh +++ b/eng/common/build.sh @@ -187,10 +187,6 @@ function InitializeCustomToolset { } function Build { - - if [[ "$ci" == true ]]; then - TryLogClientIpAddress - fi InitializeToolset InitializeCustomToolset diff --git a/eng/common/native/init-compiler.sh b/eng/common/native/init-compiler.sh index 8c944f30b2864..03a996062a796 100644 --- a/eng/common/native/init-compiler.sh +++ b/eng/common/native/init-compiler.sh @@ -112,7 +112,7 @@ if [[ -z "$CC" ]]; then fi if [[ "$compiler" == "clang" ]]; then - if command -v "lld$desired_version" > /dev/null; then + if "$CC" -fuse-ld=lld -Wl,--version 2>&1; then # Only lld version >= 9 can be considered stable if [[ "$majorVersion" -ge 9 ]]; then LDFLAGS="-fuse-ld=lld" diff --git a/eng/common/sdk-task.ps1 b/eng/common/sdk-task.ps1 index 7ab9baac5c8d9..b1bca63ab1d82 100644 --- a/eng/common/sdk-task.ps1 +++ b/eng/common/sdk-task.ps1 @@ -83,9 +83,6 @@ try { } if ($restore) { - if ($ci) { - Try-LogClientIpAddress - } Build 'Restore' } diff --git a/eng/common/templates/job/job.yml b/eng/common/templates/job/job.yml index 37dceb1bab0a9..7678b94ce740c 100644 --- a/eng/common/templates/job/job.yml +++ b/eng/common/templates/job/job.yml @@ -114,6 +114,7 @@ jobs: continueOnError: ${{ parameters.continueOnError }} condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT')) + - ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}: - task: NuGetAuthenticate@0 - ${{ if or(eq(parameters.artifacts.download, 'true'), ne(parameters.artifacts.download, '')) }}: diff --git a/eng/common/tools.ps1 b/eng/common/tools.ps1 index 90b1f9fdcdb19..f1e1cb53953bc 100644 --- a/eng/common/tools.ps1 +++ b/eng/common/tools.ps1 @@ -163,9 +163,6 @@ function InitializeDotNetCli([bool]$install, [bool]$createSdkLocationFile) { # Disable telemetry on CI. if ($ci) { $env:DOTNET_CLI_TELEMETRY_OPTOUT=1 - - # In case of network error, try to log the current IP for reference - Try-LogClientIpAddress } # Source Build uses DotNetCoreSdkDir variable @@ -895,24 +892,6 @@ if (!$disableConfigureToolsetImport) { } } -function Try-LogClientIpAddress() -{ - Write-Host "Attempting to log this client's IP for Azure Package feed telemetry purposes" - try - { - $result = Invoke-WebRequest -Uri "http://co1r5a.msedge.net/fdv2/diagnostics.aspx" -UseBasicParsing - $lines = $result.Content.Split([Environment]::NewLine) - $socketIp = $lines | Select-String -Pattern "^Socket IP:.*" - Write-Host $socketIp - $clientIp = $lines | Select-String -Pattern "^Client IP:.*" - Write-Host $clientIp - } - catch - { - Write-Host "Unable to get this machine's effective IP address for logging: $_" - } -} - # # If $ci flag is set, turn on (and log that we did) special environment variables for improved Nuget client retry logic. # diff --git a/eng/common/tools.sh b/eng/common/tools.sh index dd7030ff5385e..e555c34269f6e 100755 --- a/eng/common/tools.sh +++ b/eng/common/tools.sh @@ -405,13 +405,6 @@ function StopProcesses { return 0 } -function TryLogClientIpAddress () { - echo 'Attempting to log this client''s IP for Azure Package feed telemetry purposes' - if command -v curl > /dev/null; then - curl -s 'http://co1r5a.msedge.net/fdv2/diagnostics.aspx' | grep ' IP: ' || true - fi -} - function MSBuild { local args=$@ if [[ "$pipelines_log" == true ]]; then diff --git a/global.json b/global.json index 7534a89b9c1dd..135e0e2d654df 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ "xcopy-msbuild": "16.10.0-preview2" }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "7.0.0-beta.21569.2", - "Microsoft.DotNet.Helix.Sdk": "7.0.0-beta.21569.2" + "Microsoft.DotNet.Arcade.Sdk": "7.0.0-beta.21573.3", + "Microsoft.DotNet.Helix.Sdk": "7.0.0-beta.21573.3" } } From 2bcbcc89fd8e0846367d866fa29eb941552ac847 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Thu, 25 Nov 2021 13:38:42 +0000 Subject: [PATCH 014/413] Update dependencies from https://github.com/dotnet/arcade build 20211124.3 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.Helix.Sdk From Version 7.0.0-beta.21569.2 -> To Version 7.0.0-beta.21574.3 --- eng/Version.Details.xml | 8 ++++---- global.json | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 33edf83e68f7c..79ef472afcdfd 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -13,18 +13,18 @@ - + https://github.com/dotnet/arcade - 927f8d4d5036f68a5fc6d042f336bc9458027208 + 11fb2931671a47924e29b92e5fe06043fb3c6bbb https://github.com/dotnet/roslyn c1d8c6f043bc80425c6828455eb57f8a404759c6 - + https://github.com/dotnet/arcade - 927f8d4d5036f68a5fc6d042f336bc9458027208 + 11fb2931671a47924e29b92e5fe06043fb3c6bbb diff --git a/global.json b/global.json index 135e0e2d654df..b44bb1037b63a 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ "xcopy-msbuild": "16.10.0-preview2" }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "7.0.0-beta.21573.3", - "Microsoft.DotNet.Helix.Sdk": "7.0.0-beta.21573.3" + "Microsoft.DotNet.Arcade.Sdk": "7.0.0-beta.21574.3", + "Microsoft.DotNet.Helix.Sdk": "7.0.0-beta.21574.3" } } From 9693ec52d055eaeab54d5b4ce3f576407fe1a25d Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Fri, 26 Nov 2021 13:39:31 +0000 Subject: [PATCH 015/413] Update dependencies from https://github.com/dotnet/arcade build 20211126.2 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.Helix.Sdk From Version 7.0.0-beta.21569.2 -> To Version 7.0.0-beta.21576.2 --- eng/Version.Details.xml | 8 ++++---- eng/common/native/init-compiler.sh | 10 ++++------ global.json | 4 ++-- 3 files changed, 10 insertions(+), 12 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 79ef472afcdfd..43ebf703ba45b 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -13,18 +13,18 @@ - + https://github.com/dotnet/arcade - 11fb2931671a47924e29b92e5fe06043fb3c6bbb + 9c578f701e92c055ed752c3869a0f36c60630cea https://github.com/dotnet/roslyn c1d8c6f043bc80425c6828455eb57f8a404759c6 - + https://github.com/dotnet/arcade - 11fb2931671a47924e29b92e5fe06043fb3c6bbb + 9c578f701e92c055ed752c3869a0f36c60630cea diff --git a/eng/common/native/init-compiler.sh b/eng/common/native/init-compiler.sh index 03a996062a796..fd1d080e20435 100644 --- a/eng/common/native/init-compiler.sh +++ b/eng/common/native/init-compiler.sh @@ -111,12 +111,10 @@ if [[ -z "$CC" ]]; then exit 1 fi -if [[ "$compiler" == "clang" ]]; then - if "$CC" -fuse-ld=lld -Wl,--version 2>&1; then - # Only lld version >= 9 can be considered stable - if [[ "$majorVersion" -ge 9 ]]; then - LDFLAGS="-fuse-ld=lld" - fi +# Only lld version >= 9 can be considered stable +if [[ "$compiler" == "clang" && "$majorVersion" -ge 9 ]]; then + if "$CC" -fuse-ld=lld -Wl,--version 2>/dev/null; then + LDFLAGS="-fuse-ld=lld" fi fi diff --git a/global.json b/global.json index b44bb1037b63a..813759b425f87 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ "xcopy-msbuild": "16.10.0-preview2" }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "7.0.0-beta.21574.3", - "Microsoft.DotNet.Helix.Sdk": "7.0.0-beta.21574.3" + "Microsoft.DotNet.Arcade.Sdk": "7.0.0-beta.21576.2", + "Microsoft.DotNet.Helix.Sdk": "7.0.0-beta.21576.2" } } From 215717961285fba8c3a7019811dc2c8dec9f5436 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Sat, 27 Nov 2021 13:35:18 +0000 Subject: [PATCH 016/413] Update dependencies from https://github.com/dotnet/arcade build 20211126.4 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.Helix.Sdk From Version 7.0.0-beta.21569.2 -> To Version 7.0.0-beta.21576.4 --- eng/Version.Details.xml | 8 ++++---- eng/common/native/init-compiler.sh | 3 ++- global.json | 4 ++-- 3 files changed, 8 insertions(+), 7 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 43ebf703ba45b..3240b55a86132 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -13,18 +13,18 @@ - + https://github.com/dotnet/arcade - 9c578f701e92c055ed752c3869a0f36c60630cea + 427c05909067bb2e484116ae2239456bb45adb85 https://github.com/dotnet/roslyn c1d8c6f043bc80425c6828455eb57f8a404759c6 - + https://github.com/dotnet/arcade - 9c578f701e92c055ed752c3869a0f36c60630cea + 427c05909067bb2e484116ae2239456bb45adb85 diff --git a/eng/common/native/init-compiler.sh b/eng/common/native/init-compiler.sh index fd1d080e20435..e361e03fabdd2 100644 --- a/eng/common/native/init-compiler.sh +++ b/eng/common/native/init-compiler.sh @@ -2,6 +2,7 @@ # # This file detects the C/C++ compiler and exports it to the CC/CXX environment variables # +# NOTE: some scripts source this file and rely on stdout being empty, make sure to not output anything here! if [[ "$#" -lt 3 ]]; then echo "Usage..." @@ -113,7 +114,7 @@ fi # Only lld version >= 9 can be considered stable if [[ "$compiler" == "clang" && "$majorVersion" -ge 9 ]]; then - if "$CC" -fuse-ld=lld -Wl,--version 2>/dev/null; then + if "$CC" -fuse-ld=lld -Wl,--version >/dev/null 2>&1; then LDFLAGS="-fuse-ld=lld" fi fi diff --git a/global.json b/global.json index 813759b425f87..3960ed57eddd5 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ "xcopy-msbuild": "16.10.0-preview2" }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "7.0.0-beta.21576.2", - "Microsoft.DotNet.Helix.Sdk": "7.0.0-beta.21576.2" + "Microsoft.DotNet.Arcade.Sdk": "7.0.0-beta.21576.4", + "Microsoft.DotNet.Helix.Sdk": "7.0.0-beta.21576.4" } } From 524a44d57ab8c118abdae528c20df0c8010ceb82 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Fri, 3 Dec 2021 13:40:16 +0000 Subject: [PATCH 017/413] Update dependencies from https://github.com/dotnet/arcade build 20211202.3 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.Helix.Sdk From Version 7.0.0-beta.21569.2 -> To Version 7.0.0-beta.21602.3 --- eng/Version.Details.xml | 8 ++++---- eng/common/sdl/packages.config | 2 +- eng/common/templates/job/execute-sdl.yml | 2 +- eng/common/tools.sh | 2 +- global.json | 4 ++-- 5 files changed, 9 insertions(+), 9 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 3240b55a86132..9cfdd7f035ebe 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -13,18 +13,18 @@ - + https://github.com/dotnet/arcade - 427c05909067bb2e484116ae2239456bb45adb85 + 59775387deb609d7c62f9e713d133c34ba28ffcd https://github.com/dotnet/roslyn c1d8c6f043bc80425c6828455eb57f8a404759c6 - + https://github.com/dotnet/arcade - 427c05909067bb2e484116ae2239456bb45adb85 + 59775387deb609d7c62f9e713d133c34ba28ffcd diff --git a/eng/common/sdl/packages.config b/eng/common/sdl/packages.config index 3bd8b29ebd721..4585cfd6bba1e 100644 --- a/eng/common/sdl/packages.config +++ b/eng/common/sdl/packages.config @@ -1,4 +1,4 @@ - + diff --git a/eng/common/templates/job/execute-sdl.yml b/eng/common/templates/job/execute-sdl.yml index 3aafc82e4171a..8128f2c357052 100644 --- a/eng/common/templates/job/execute-sdl.yml +++ b/eng/common/templates/job/execute-sdl.yml @@ -54,7 +54,7 @@ jobs: # The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in # sync with the packages.config file. - name: DefaultGuardianVersion - value: 0.53.3 + value: 0.109.0 - name: GuardianVersion value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }} - name: GuardianPackagesConfigFile diff --git a/eng/common/tools.sh b/eng/common/tools.sh index e555c34269f6e..17f0a365805d5 100755 --- a/eng/common/tools.sh +++ b/eng/common/tools.sh @@ -178,7 +178,7 @@ function InstallDotNetSdk { if [[ $# -ge 3 ]]; then architecture=$3 fi - InstallDotNet "$root" "$version" $architecture 'sdk' 'false' $runtime_source_feed $runtime_source_feed_key + InstallDotNet "$root" "$version" $architecture 'sdk' 'true' $runtime_source_feed $runtime_source_feed_key } function InstallDotNet { diff --git a/global.json b/global.json index 3960ed57eddd5..f74139efc66b9 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ "xcopy-msbuild": "16.10.0-preview2" }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "7.0.0-beta.21576.4", - "Microsoft.DotNet.Helix.Sdk": "7.0.0-beta.21576.4" + "Microsoft.DotNet.Arcade.Sdk": "7.0.0-beta.21602.3", + "Microsoft.DotNet.Helix.Sdk": "7.0.0-beta.21602.3" } } From d4f6be4772a2e260daf82de8ce476ddb0c1f5002 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Sat, 4 Dec 2021 13:44:56 +0000 Subject: [PATCH 018/413] Update dependencies from https://github.com/dotnet/arcade build 20211203.6 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.Helix.Sdk From Version 7.0.0-beta.21569.2 -> To Version 7.0.0-beta.21603.6 --- eng/Version.Details.xml | 8 ++++---- global.json | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 9cfdd7f035ebe..2920898296228 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -13,18 +13,18 @@ - + https://github.com/dotnet/arcade - 59775387deb609d7c62f9e713d133c34ba28ffcd + b3e949192067c8acdaaae35015534f76e92d79d4 https://github.com/dotnet/roslyn c1d8c6f043bc80425c6828455eb57f8a404759c6 - + https://github.com/dotnet/arcade - 59775387deb609d7c62f9e713d133c34ba28ffcd + b3e949192067c8acdaaae35015534f76e92d79d4 diff --git a/global.json b/global.json index f74139efc66b9..14aa2fe8b0e09 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ "xcopy-msbuild": "16.10.0-preview2" }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "7.0.0-beta.21602.3", - "Microsoft.DotNet.Helix.Sdk": "7.0.0-beta.21602.3" + "Microsoft.DotNet.Arcade.Sdk": "7.0.0-beta.21603.6", + "Microsoft.DotNet.Helix.Sdk": "7.0.0-beta.21603.6" } } From c46aa8a690f5b12f10fe72ad81ae6aefb7f128fc Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Tue, 7 Dec 2021 13:53:39 +0000 Subject: [PATCH 019/413] Update dependencies from https://github.com/dotnet/arcade build 20211206.6 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.Helix.Sdk From Version 7.0.0-beta.21569.2 -> To Version 7.0.0-beta.21606.6 --- eng/Version.Details.xml | 8 ++++---- global.json | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 2920898296228..86bf63e1e2f5e 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -13,18 +13,18 @@ - + https://github.com/dotnet/arcade - b3e949192067c8acdaaae35015534f76e92d79d4 + 5d969787afb2fd87f642458687e3ad41094ac3ab https://github.com/dotnet/roslyn c1d8c6f043bc80425c6828455eb57f8a404759c6 - + https://github.com/dotnet/arcade - b3e949192067c8acdaaae35015534f76e92d79d4 + 5d969787afb2fd87f642458687e3ad41094ac3ab diff --git a/global.json b/global.json index 14aa2fe8b0e09..a78ea4ad4aad6 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ "xcopy-msbuild": "16.10.0-preview2" }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "7.0.0-beta.21603.6", - "Microsoft.DotNet.Helix.Sdk": "7.0.0-beta.21603.6" + "Microsoft.DotNet.Arcade.Sdk": "7.0.0-beta.21606.6", + "Microsoft.DotNet.Helix.Sdk": "7.0.0-beta.21606.6" } } From 98d070a790ce80cdc49dbcc0cc2932992b247d5d Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Thu, 9 Dec 2021 13:53:13 +0000 Subject: [PATCH 020/413] Update dependencies from https://github.com/dotnet/arcade build 20211208.1 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.Helix.Sdk From Version 7.0.0-beta.21569.2 -> To Version 7.0.0-beta.21608.1 --- eng/Version.Details.xml | 8 +-- eng/common/cross/armv6/sources.list.buster | 2 + eng/common/cross/build-rootfs.sh | 17 ++++- eng/common/templates/job/execute-sdl.yml | 69 +++---------------- eng/common/templates/steps/execute-sdl.yml | 68 ++++++++++++++++++ .../templates/variables/sdl-variables.yml | 7 ++ global.json | 4 +- 7 files changed, 108 insertions(+), 67 deletions(-) create mode 100644 eng/common/cross/armv6/sources.list.buster create mode 100644 eng/common/templates/steps/execute-sdl.yml create mode 100644 eng/common/templates/variables/sdl-variables.yml diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 86bf63e1e2f5e..e40a6fcabb38d 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -13,18 +13,18 @@ - + https://github.com/dotnet/arcade - 5d969787afb2fd87f642458687e3ad41094ac3ab + 200adbc809c4451973d1929a53a75502b7cada01 https://github.com/dotnet/roslyn c1d8c6f043bc80425c6828455eb57f8a404759c6 - + https://github.com/dotnet/arcade - 5d969787afb2fd87f642458687e3ad41094ac3ab + 200adbc809c4451973d1929a53a75502b7cada01 diff --git a/eng/common/cross/armv6/sources.list.buster b/eng/common/cross/armv6/sources.list.buster new file mode 100644 index 0000000000000..f27fc4fb346b6 --- /dev/null +++ b/eng/common/cross/armv6/sources.list.buster @@ -0,0 +1,2 @@ +deb http://raspbian.raspberrypi.org/raspbian/ buster main contrib non-free rpi +deb-src http://raspbian.raspberrypi.org/raspbian/ buster main contrib non-free rpi diff --git a/eng/common/cross/build-rootfs.sh b/eng/common/cross/build-rootfs.sh index 6fa2c8aa5511d..5102245b7b5e3 100755 --- a/eng/common/cross/build-rootfs.sh +++ b/eng/common/cross/build-rootfs.sh @@ -99,6 +99,15 @@ while :; do __AlpineArch=armv7 __QEMUArch=arm ;; + armv6) + __BuildArch=armv6 + __UbuntuArch=armhf + __QEMUArch=arm + __UbuntuRepo="http://raspbian.raspberrypi.org/raspbian/" + __CodeName=buster + __LLDB_Package="liblldb-6.0-dev" + __Keyring="/usr/share/keyrings/raspbian-archive-keyring.gpg" + ;; arm64) __BuildArch=arm64 __UbuntuArch=arm64 @@ -236,6 +245,12 @@ while :; do shift done +if [ -e "$__Keyring" ]; then + __Keyring="--keyring=$__Keyring" +else + __Keyring="" +fi + if [ "$__BuildArch" == "armel" ]; then __LLDB_Package="lldb-3.5-dev" fi @@ -337,7 +352,7 @@ elif [[ "$__CodeName" == "illumos" ]]; then wget -P "$__RootfsDir"/usr/include/netpacket https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/inet/sockmods/netpacket/packet.h wget -P "$__RootfsDir"/usr/include/sys https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/sys/sdt.h elif [[ -n $__CodeName ]]; then - qemu-debootstrap --arch $__UbuntuArch $__CodeName $__RootfsDir $__UbuntuRepo + qemu-debootstrap $__Keyring --arch $__UbuntuArch $__CodeName $__RootfsDir $__UbuntuRepo cp $__CrossDir/$__BuildArch/sources.list.$__CodeName $__RootfsDir/etc/apt/sources.list chroot $__RootfsDir apt-get update chroot $__RootfsDir apt-get -f -y install diff --git a/eng/common/templates/job/execute-sdl.yml b/eng/common/templates/job/execute-sdl.yml index 8128f2c357052..8cf772b3cbf81 100644 --- a/eng/common/templates/job/execute-sdl.yml +++ b/eng/common/templates/job/execute-sdl.yml @@ -51,14 +51,9 @@ jobs: value: ${{ parameters.AzDOPipelineId }} - name: AzDOBuildId value: ${{ parameters.AzDOBuildId }} - # The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in - # sync with the packages.config file. - - name: DefaultGuardianVersion - value: 0.109.0 + - template: /eng/common/templates/variables/sdl-variables.yml - name: GuardianVersion value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }} - - name: GuardianPackagesConfigFile - value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config pool: vmImage: windows-2019 steps: @@ -125,57 +120,11 @@ jobs: displayName: Extract Archive Artifacts continueOnError: ${{ parameters.sdlContinueOnError }} - - ${{ if ne(parameters.overrideGuardianVersion, '') }}: - - powershell: | - $content = Get-Content $(GuardianPackagesConfigFile) - - Write-Host "packages.config content was:`n$content" - - $content = $content.Replace('$(DefaultGuardianVersion)', '$(GuardianVersion)') - $content | Set-Content $(GuardianPackagesConfigFile) - - Write-Host "packages.config content updated to:`n$content" - displayName: Use overridden Guardian version ${{ parameters.overrideGuardianVersion }} - - - task: NuGetToolInstaller@1 - displayName: 'Install NuGet.exe' - - task: NuGetCommand@2 - displayName: 'Install Guardian' - inputs: - restoreSolution: $(Build.SourcesDirectory)\eng\common\sdl\packages.config - feedsToUse: config - nugetConfigPath: $(Build.SourcesDirectory)\eng\common\sdl\NuGet.config - externalFeedCredentials: GuardianConnect - restoreDirectory: $(Build.SourcesDirectory)\.packages - - - ${{ if ne(parameters.overrideParameters, '') }}: - - powershell: ${{ parameters.executeAllSdlToolsScript }} ${{ parameters.overrideParameters }} - displayName: Execute SDL - continueOnError: ${{ parameters.sdlContinueOnError }} - - ${{ if eq(parameters.overrideParameters, '') }}: - - powershell: ${{ parameters.executeAllSdlToolsScript }} - -GuardianPackageName Microsoft.Guardian.Cli.$(GuardianVersion) - -NugetPackageDirectory $(Build.SourcesDirectory)\.packages - -AzureDevOpsAccessToken $(dn-bot-dotnet-build-rw-code-rw) - ${{ parameters.additionalParameters }} - displayName: Execute SDL - continueOnError: ${{ parameters.sdlContinueOnError }} - - - ${{ if ne(parameters.publishGuardianDirectoryToPipeline, 'false') }}: - # We want to publish the Guardian results and configuration for easy diagnosis. However, the - # '.gdn' dir is a mix of configuration, results, extracted dependencies, and Guardian default - # tooling files. Some of these files are large and aren't useful during an investigation, so - # exclude them by simply deleting them before publishing. (As of writing, there is no documented - # way to selectively exclude a dir from the pipeline artifact publish task.) - - task: DeleteFiles@1 - displayName: Delete Guardian dependencies to avoid uploading - inputs: - SourceFolder: $(Agent.BuildDirectory)/.gdn - Contents: | - c - i - condition: succeededOrFailed() - - publish: $(Agent.BuildDirectory)/.gdn - artifact: GuardianConfiguration - displayName: Publish GuardianConfiguration - condition: succeededOrFailed() + - template: /eng/common/templates/steps/execute-sdl.yml + parameters: + overrideGuardianVersion: ${{ parameters.overrideGuardianVersion }} + executeAllSdlToolsScript: ${{ parameters.executeAllSdlToolsScript }} + overrideParameters: ${{ parameters.overrideParameters }} + additionalParameters: ${{ parameters.additionalParameters }} + publishGuardianDirectoryToPipeline: ${{ parameters.publishGuardianDirectoryToPipeline }} + sdlContinueOnError: ${{ parameters.sdlContinueOnError }} diff --git a/eng/common/templates/steps/execute-sdl.yml b/eng/common/templates/steps/execute-sdl.yml new file mode 100644 index 0000000000000..7b8ee18a28d7e --- /dev/null +++ b/eng/common/templates/steps/execute-sdl.yml @@ -0,0 +1,68 @@ +parameters: + overrideGuardianVersion: '' + executeAllSdlToolsScript: '' + overrideParameters: '' + additionalParameters: '' + publishGuardianDirectoryToPipeline: false + sdlContinueOnError: false + condition: '' + +steps: +- ${{ if ne(parameters.overrideGuardianVersion, '') }}: + - powershell: | + $content = Get-Content $(GuardianPackagesConfigFile) + + Write-Host "packages.config content was:`n$content" + + $content = $content.Replace('$(DefaultGuardianVersion)', '$(GuardianVersion)') + $content | Set-Content $(GuardianPackagesConfigFile) + + Write-Host "packages.config content updated to:`n$content" + displayName: Use overridden Guardian version ${{ parameters.overrideGuardianVersion }} + +- task: NuGetToolInstaller@1 + displayName: 'Install NuGet.exe' + +- task: NuGetCommand@2 + displayName: 'Install Guardian' + inputs: + restoreSolution: $(Build.SourcesDirectory)\eng\common\sdl\packages.config + feedsToUse: config + nugetConfigPath: $(Build.SourcesDirectory)\eng\common\sdl\NuGet.config + externalFeedCredentials: GuardianConnect + restoreDirectory: $(Build.SourcesDirectory)\.packages + +- ${{ if ne(parameters.overrideParameters, '') }}: + - powershell: ${{ parameters.executeAllSdlToolsScript }} ${{ parameters.overrideParameters }} + displayName: Execute SDL + continueOnError: ${{ parameters.sdlContinueOnError }} + condition: ${{ parameters.condition }} + +- ${{ if eq(parameters.overrideParameters, '') }}: + - powershell: ${{ parameters.executeAllSdlToolsScript }} + -GuardianPackageName Microsoft.Guardian.Cli.$(GuardianVersion) + -NugetPackageDirectory $(Build.SourcesDirectory)\.packages + -AzureDevOpsAccessToken $(dn-bot-dotnet-build-rw-code-rw) + ${{ parameters.additionalParameters }} + displayName: Execute SDL + continueOnError: ${{ parameters.sdlContinueOnError }} + condition: ${{ parameters.condition }} + +- ${{ if ne(parameters.publishGuardianDirectoryToPipeline, 'false') }}: + # We want to publish the Guardian results and configuration for easy diagnosis. However, the + # '.gdn' dir is a mix of configuration, results, extracted dependencies, and Guardian default + # tooling files. Some of these files are large and aren't useful during an investigation, so + # exclude them by simply deleting them before publishing. (As of writing, there is no documented + # way to selectively exclude a dir from the pipeline artifact publish task.) + - task: DeleteFiles@1 + displayName: Delete Guardian dependencies to avoid uploading + inputs: + SourceFolder: $(Agent.BuildDirectory)/.gdn + Contents: | + c + i + condition: succeededOrFailed() + - publish: $(Agent.BuildDirectory)/.gdn + artifact: GuardianConfiguration + displayName: Publish GuardianConfiguration + condition: succeededOrFailed() \ No newline at end of file diff --git a/eng/common/templates/variables/sdl-variables.yml b/eng/common/templates/variables/sdl-variables.yml new file mode 100644 index 0000000000000..dbdd66d4a4b3a --- /dev/null +++ b/eng/common/templates/variables/sdl-variables.yml @@ -0,0 +1,7 @@ +variables: +# The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in +# sync with the packages.config file. +- name: DefaultGuardianVersion + value: 0.109.0 +- name: GuardianPackagesConfigFile + value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config \ No newline at end of file diff --git a/global.json b/global.json index a78ea4ad4aad6..071ecc6c608bb 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ "xcopy-msbuild": "16.10.0-preview2" }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "7.0.0-beta.21606.6", - "Microsoft.DotNet.Helix.Sdk": "7.0.0-beta.21606.6" + "Microsoft.DotNet.Arcade.Sdk": "7.0.0-beta.21608.1", + "Microsoft.DotNet.Helix.Sdk": "7.0.0-beta.21608.1" } } From 0322eabf76f6404df01f10ee10041eec3fc613d8 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Fri, 10 Dec 2021 13:49:18 +0000 Subject: [PATCH 021/413] Update dependencies from https://github.com/dotnet/arcade build 20211209.2 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.Helix.Sdk From Version 7.0.0-beta.21569.2 -> To Version 7.0.0-beta.21609.2 --- eng/Version.Details.xml | 8 ++++---- global.json | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index e40a6fcabb38d..f23a3b1429404 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -13,18 +13,18 @@ - + https://github.com/dotnet/arcade - 200adbc809c4451973d1929a53a75502b7cada01 + 05a63c6bae31f97583d35f5a16e1bd8f41a1d094 https://github.com/dotnet/roslyn c1d8c6f043bc80425c6828455eb57f8a404759c6 - + https://github.com/dotnet/arcade - 200adbc809c4451973d1929a53a75502b7cada01 + 05a63c6bae31f97583d35f5a16e1bd8f41a1d094 diff --git a/global.json b/global.json index 071ecc6c608bb..34a8a46ea03d3 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ "xcopy-msbuild": "16.10.0-preview2" }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "7.0.0-beta.21608.1", - "Microsoft.DotNet.Helix.Sdk": "7.0.0-beta.21608.1" + "Microsoft.DotNet.Arcade.Sdk": "7.0.0-beta.21609.2", + "Microsoft.DotNet.Helix.Sdk": "7.0.0-beta.21609.2" } } From 0e19bb375f27b7396ddd6ad327dee4f78113f795 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Sat, 11 Dec 2021 13:44:53 +0000 Subject: [PATCH 022/413] Update dependencies from https://github.com/dotnet/arcade build 20211210.4 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.Helix.Sdk From Version 7.0.0-beta.21569.2 -> To Version 7.0.0-beta.21610.4 --- eng/Version.Details.xml | 8 ++++---- global.json | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index f23a3b1429404..5a33a1bcf5adc 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -13,18 +13,18 @@ - + https://github.com/dotnet/arcade - 05a63c6bae31f97583d35f5a16e1bd8f41a1d094 + 18adc5b47acce8bb03948baf578fca442d1029d4 https://github.com/dotnet/roslyn c1d8c6f043bc80425c6828455eb57f8a404759c6 - + https://github.com/dotnet/arcade - 05a63c6bae31f97583d35f5a16e1bd8f41a1d094 + 18adc5b47acce8bb03948baf578fca442d1029d4 diff --git a/global.json b/global.json index 34a8a46ea03d3..1c7d412b7817d 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ "xcopy-msbuild": "16.10.0-preview2" }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "7.0.0-beta.21609.2", - "Microsoft.DotNet.Helix.Sdk": "7.0.0-beta.21609.2" + "Microsoft.DotNet.Arcade.Sdk": "7.0.0-beta.21610.4", + "Microsoft.DotNet.Helix.Sdk": "7.0.0-beta.21610.4" } } From 8b74def7cb54ec648bdc955b243646a30500d7b3 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Tue, 14 Dec 2021 13:44:43 +0000 Subject: [PATCH 023/413] Update dependencies from https://github.com/dotnet/arcade build 20211213.2 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.Helix.Sdk From Version 7.0.0-beta.21569.2 -> To Version 7.0.0-beta.21613.2 --- eng/Version.Details.xml | 8 ++++---- global.json | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 5a33a1bcf5adc..02eb8c1e90083 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -13,18 +13,18 @@ - + https://github.com/dotnet/arcade - 18adc5b47acce8bb03948baf578fca442d1029d4 + bcd6e007b9f53be0a7aff804d5c17ea7e179317b https://github.com/dotnet/roslyn c1d8c6f043bc80425c6828455eb57f8a404759c6 - + https://github.com/dotnet/arcade - 18adc5b47acce8bb03948baf578fca442d1029d4 + bcd6e007b9f53be0a7aff804d5c17ea7e179317b diff --git a/global.json b/global.json index 1c7d412b7817d..1fbcd02737cf9 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ "xcopy-msbuild": "16.10.0-preview2" }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "7.0.0-beta.21610.4", - "Microsoft.DotNet.Helix.Sdk": "7.0.0-beta.21610.4" + "Microsoft.DotNet.Arcade.Sdk": "7.0.0-beta.21613.2", + "Microsoft.DotNet.Helix.Sdk": "7.0.0-beta.21613.2" } } From 79dec64b6c528f9d8ac53547a26eab7e8fa4aa87 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Wed, 15 Dec 2021 13:42:48 +0000 Subject: [PATCH 024/413] Update dependencies from https://github.com/dotnet/arcade build 20211214.1 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.Helix.Sdk From Version 7.0.0-beta.21569.2 -> To Version 7.0.0-beta.21614.1 --- eng/Version.Details.xml | 8 ++++---- global.json | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 02eb8c1e90083..00e897e15a33d 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -13,18 +13,18 @@ - + https://github.com/dotnet/arcade - bcd6e007b9f53be0a7aff804d5c17ea7e179317b + cc0fa942bf43c2814af778868d4e7ddf21146b96 https://github.com/dotnet/roslyn c1d8c6f043bc80425c6828455eb57f8a404759c6 - + https://github.com/dotnet/arcade - bcd6e007b9f53be0a7aff804d5c17ea7e179317b + cc0fa942bf43c2814af778868d4e7ddf21146b96 diff --git a/global.json b/global.json index 1fbcd02737cf9..4a9419d56feba 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ "xcopy-msbuild": "16.10.0-preview2" }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "7.0.0-beta.21613.2", - "Microsoft.DotNet.Helix.Sdk": "7.0.0-beta.21613.2" + "Microsoft.DotNet.Arcade.Sdk": "7.0.0-beta.21614.1", + "Microsoft.DotNet.Helix.Sdk": "7.0.0-beta.21614.1" } } From 83203be612ac601dcc7a276a31415244a5fff9dd Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Thu, 16 Dec 2021 13:41:55 +0000 Subject: [PATCH 025/413] Update dependencies from https://github.com/dotnet/arcade build 20211215.1 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.Helix.Sdk From Version 7.0.0-beta.21569.2 -> To Version 7.0.0-beta.21615.1 --- eng/Version.Details.xml | 8 ++++---- global.json | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 00e897e15a33d..043d063747f6a 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -13,18 +13,18 @@ - + https://github.com/dotnet/arcade - cc0fa942bf43c2814af778868d4e7ddf21146b96 + 943d03f62955c771825dfa1f1bdeb8f853a2d7dd https://github.com/dotnet/roslyn c1d8c6f043bc80425c6828455eb57f8a404759c6 - + https://github.com/dotnet/arcade - cc0fa942bf43c2814af778868d4e7ddf21146b96 + 943d03f62955c771825dfa1f1bdeb8f853a2d7dd diff --git a/global.json b/global.json index 4a9419d56feba..fd86f0793bb3e 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ "xcopy-msbuild": "16.10.0-preview2" }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "7.0.0-beta.21614.1", - "Microsoft.DotNet.Helix.Sdk": "7.0.0-beta.21614.1" + "Microsoft.DotNet.Arcade.Sdk": "7.0.0-beta.21615.1", + "Microsoft.DotNet.Helix.Sdk": "7.0.0-beta.21615.1" } } From 525fcd5ace5a6687f78e464931f4baf5e37e15cc Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Sat, 18 Dec 2021 13:40:40 +0000 Subject: [PATCH 026/413] Update dependencies from https://github.com/dotnet/arcade build 20211217.4 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.Helix.Sdk From Version 7.0.0-beta.21569.2 -> To Version 7.0.0-beta.21617.4 --- eng/Version.Details.xml | 8 ++++---- global.json | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 043d063747f6a..120cd44cb52c5 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -13,18 +13,18 @@ - + https://github.com/dotnet/arcade - 943d03f62955c771825dfa1f1bdeb8f853a2d7dd + 78659a1d4831ce9d62ea817fe13e4e2e70a52961 https://github.com/dotnet/roslyn c1d8c6f043bc80425c6828455eb57f8a404759c6 - + https://github.com/dotnet/arcade - 943d03f62955c771825dfa1f1bdeb8f853a2d7dd + 78659a1d4831ce9d62ea817fe13e4e2e70a52961 diff --git a/global.json b/global.json index fd86f0793bb3e..ff549c0402cf6 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ "xcopy-msbuild": "16.10.0-preview2" }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "7.0.0-beta.21615.1", - "Microsoft.DotNet.Helix.Sdk": "7.0.0-beta.21615.1" + "Microsoft.DotNet.Arcade.Sdk": "7.0.0-beta.21617.4", + "Microsoft.DotNet.Helix.Sdk": "7.0.0-beta.21617.4" } } From 134357994257d5df6273457d600608d7b5e4f7a4 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Tue, 21 Dec 2021 13:44:41 +0000 Subject: [PATCH 027/413] Update dependencies from https://github.com/dotnet/arcade build 20211220.2 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.Helix.Sdk From Version 7.0.0-beta.21569.2 -> To Version 7.0.0-beta.21620.2 --- eng/Version.Details.xml | 8 ++++---- eng/common/dotnet-install.sh | 3 +++ eng/common/native/CommonLibrary.psm1 | 3 ++- global.json | 4 ++-- 4 files changed, 11 insertions(+), 7 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 120cd44cb52c5..105bc8d60c04d 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -13,18 +13,18 @@ - + https://github.com/dotnet/arcade - 78659a1d4831ce9d62ea817fe13e4e2e70a52961 + 2af5dda2d48417982a6b90bf28e8b9a9b57f5ad4 https://github.com/dotnet/roslyn c1d8c6f043bc80425c6828455eb57f8a404759c6 - + https://github.com/dotnet/arcade - 78659a1d4831ce9d62ea817fe13e4e2e70a52961 + 2af5dda2d48417982a6b90bf28e8b9a9b57f5ad4 diff --git a/eng/common/dotnet-install.sh b/eng/common/dotnet-install.sh index fdfeea66e7d43..5c94e98632a0a 100755 --- a/eng/common/dotnet-install.sh +++ b/eng/common/dotnet-install.sh @@ -55,6 +55,9 @@ case $cpuname in aarch64) buildarch=arm64 ;; + loongarch64) + buildarch=loongarch64 + ;; amd64|x86_64) buildarch=x64 ;; diff --git a/eng/common/native/CommonLibrary.psm1 b/eng/common/native/CommonLibrary.psm1 index adf707c8fe700..ca38268c44d83 100644 --- a/eng/common/native/CommonLibrary.psm1 +++ b/eng/common/native/CommonLibrary.psm1 @@ -276,7 +276,8 @@ function Get-MachineArchitecture { } if (($ProcessorArchitecture -Eq "AMD64") -Or ($ProcessorArchitecture -Eq "IA64") -Or - ($ProcessorArchitecture -Eq "ARM64")) { + ($ProcessorArchitecture -Eq "ARM64") -Or + ($ProcessorArchitecture -Eq "LOONGARCH64")) { return "x64" } return "x86" diff --git a/global.json b/global.json index ff549c0402cf6..d66acb0b9072e 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ "xcopy-msbuild": "16.10.0-preview2" }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "7.0.0-beta.21617.4", - "Microsoft.DotNet.Helix.Sdk": "7.0.0-beta.21617.4" + "Microsoft.DotNet.Arcade.Sdk": "7.0.0-beta.21620.2", + "Microsoft.DotNet.Helix.Sdk": "7.0.0-beta.21620.2" } } From 18b0daf47473b5481b1b72f76a755efb1a99d55e Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Wed, 22 Dec 2021 13:42:09 +0000 Subject: [PATCH 028/413] Update dependencies from https://github.com/dotnet/arcade build 20211221.3 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.Helix.Sdk From Version 7.0.0-beta.21569.2 -> To Version 7.0.0-beta.21621.3 --- eng/Version.Details.xml | 8 ++++---- global.json | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 105bc8d60c04d..4955b81ab4c56 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -13,18 +13,18 @@ - + https://github.com/dotnet/arcade - 2af5dda2d48417982a6b90bf28e8b9a9b57f5ad4 + 0cd94b1d02c03377d99f3739beb191591f6abee5 https://github.com/dotnet/roslyn c1d8c6f043bc80425c6828455eb57f8a404759c6 - + https://github.com/dotnet/arcade - 2af5dda2d48417982a6b90bf28e8b9a9b57f5ad4 + 0cd94b1d02c03377d99f3739beb191591f6abee5 diff --git a/global.json b/global.json index d66acb0b9072e..02b28209e68ee 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ "xcopy-msbuild": "16.10.0-preview2" }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "7.0.0-beta.21620.2", - "Microsoft.DotNet.Helix.Sdk": "7.0.0-beta.21620.2" + "Microsoft.DotNet.Arcade.Sdk": "7.0.0-beta.21621.3", + "Microsoft.DotNet.Helix.Sdk": "7.0.0-beta.21621.3" } } From accc20ae4e4678c6ac23f14958815ef2a0802c9d Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Thu, 23 Dec 2021 13:43:28 +0000 Subject: [PATCH 029/413] Update dependencies from https://github.com/dotnet/arcade build 20211223.1 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.Helix.Sdk From Version 7.0.0-beta.21569.2 -> To Version 7.0.0-beta.21623.1 --- eng/Version.Details.xml | 8 ++++---- global.json | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 4955b81ab4c56..17cc44be3501b 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -13,18 +13,18 @@ - + https://github.com/dotnet/arcade - 0cd94b1d02c03377d99f3739beb191591f6abee5 + 4abaab2bf44d06638abeb23fc96c4f6eef58a2f0 https://github.com/dotnet/roslyn c1d8c6f043bc80425c6828455eb57f8a404759c6 - + https://github.com/dotnet/arcade - 0cd94b1d02c03377d99f3739beb191591f6abee5 + 4abaab2bf44d06638abeb23fc96c4f6eef58a2f0 diff --git a/global.json b/global.json index 02b28209e68ee..59c01b0b4680d 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ "xcopy-msbuild": "16.10.0-preview2" }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "7.0.0-beta.21621.3", - "Microsoft.DotNet.Helix.Sdk": "7.0.0-beta.21621.3" + "Microsoft.DotNet.Arcade.Sdk": "7.0.0-beta.21623.1", + "Microsoft.DotNet.Helix.Sdk": "7.0.0-beta.21623.1" } } From f0f0bfe8d8b36bc1591309be3825b5a8c68e5cc3 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Fri, 24 Dec 2021 13:42:09 +0000 Subject: [PATCH 030/413] Update dependencies from https://github.com/dotnet/arcade build 20211223.2 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.Helix.Sdk From Version 7.0.0-beta.21569.2 -> To Version 7.0.0-beta.21623.2 --- eng/Version.Details.xml | 8 +- eng/common/cross/arm/tizen-build-rootfs.sh | 35 +++++ eng/common/cross/arm/tizen-fetch.sh | 170 +++++++++++++++++++++ eng/common/cross/arm/tizen/tizen.patch | 9 ++ eng/common/cross/build-rootfs.sh | 4 +- eng/common/cross/toolchain.cmake | 11 +- global.json | 4 +- 7 files changed, 231 insertions(+), 10 deletions(-) create mode 100644 eng/common/cross/arm/tizen-build-rootfs.sh create mode 100644 eng/common/cross/arm/tizen-fetch.sh create mode 100644 eng/common/cross/arm/tizen/tizen.patch diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 17cc44be3501b..2551c405e8fa0 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -13,18 +13,18 @@ - + https://github.com/dotnet/arcade - 4abaab2bf44d06638abeb23fc96c4f6eef58a2f0 + 1a66526b0c1eb068cab89909b7d52fe6f57d64df https://github.com/dotnet/roslyn c1d8c6f043bc80425c6828455eb57f8a404759c6 - + https://github.com/dotnet/arcade - 4abaab2bf44d06638abeb23fc96c4f6eef58a2f0 + 1a66526b0c1eb068cab89909b7d52fe6f57d64df diff --git a/eng/common/cross/arm/tizen-build-rootfs.sh b/eng/common/cross/arm/tizen-build-rootfs.sh new file mode 100644 index 0000000000000..9fdb32e920e22 --- /dev/null +++ b/eng/common/cross/arm/tizen-build-rootfs.sh @@ -0,0 +1,35 @@ +#!/usr/bin/env bash +set -e + +__ARM_HARDFP_CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) +__TIZEN_CROSSDIR="$__ARM_HARDFP_CrossDir/tizen" + +if [[ -z "$ROOTFS_DIR" ]]; then + echo "ROOTFS_DIR is not defined." + exit 1; +fi + +TIZEN_TMP_DIR=$ROOTFS_DIR/tizen_tmp +mkdir -p $TIZEN_TMP_DIR + +# Download files +echo ">>Start downloading files" +VERBOSE=1 $__ARM_HARDFP_CrossDir/tizen-fetch.sh $TIZEN_TMP_DIR +echo "<>Start constructing Tizen rootfs" +TIZEN_RPM_FILES=`ls $TIZEN_TMP_DIR/*.rpm` +cd $ROOTFS_DIR +for f in $TIZEN_RPM_FILES; do + rpm2cpio $f | cpio -idm --quiet +done +echo "<>Start configuring Tizen rootfs" +ln -sfn asm-arm ./usr/include/asm +patch -p1 < $__TIZEN_CROSSDIR/tizen.patch +echo "</dev/null; then + VERBOSE=0 +fi + +Log() +{ + if [ $VERBOSE -ge $1 ]; then + echo ${@:2} + fi +} + +Inform() +{ + Log 1 -e "\x1B[0;34m$@\x1B[m" +} + +Debug() +{ + Log 2 -e "\x1B[0;32m$@\x1B[m" +} + +Error() +{ + >&2 Log 0 -e "\x1B[0;31m$@\x1B[m" +} + +Fetch() +{ + URL=$1 + FILE=$2 + PROGRESS=$3 + if [ $VERBOSE -ge 1 ] && [ $PROGRESS ]; then + CURL_OPT="--progress-bar" + else + CURL_OPT="--silent" + fi + curl $CURL_OPT $URL > $FILE +} + +hash curl 2> /dev/null || { Error "Require 'curl' Aborting."; exit 1; } +hash xmllint 2> /dev/null || { Error "Require 'xmllint' Aborting."; exit 1; } +hash sha256sum 2> /dev/null || { Error "Require 'sha256sum' Aborting."; exit 1; } + +TMPDIR=$1 +if [ ! -d $TMPDIR ]; then + TMPDIR=./tizen_tmp + Debug "Create temporary directory : $TMPDIR" + mkdir -p $TMPDIR +fi + +TIZEN_URL=http://download.tizen.org/snapshots/tizen +BUILD_XML=build.xml +REPOMD_XML=repomd.xml +PRIMARY_XML=primary.xml +TARGET_URL="http://__not_initialized" + +Xpath_get() +{ + XPATH_RESULT='' + XPATH=$1 + XML_FILE=$2 + RESULT=$(xmllint --xpath $XPATH $XML_FILE) + if [[ -z ${RESULT// } ]]; then + Error "Can not find target from $XML_FILE" + Debug "Xpath = $XPATH" + exit 1 + fi + XPATH_RESULT=$RESULT +} + +fetch_tizen_pkgs_init() +{ + TARGET=$1 + PROFILE=$2 + Debug "Initialize TARGET=$TARGET, PROFILE=$PROFILE" + + TMP_PKG_DIR=$TMPDIR/tizen_${PROFILE}_pkgs + if [ -d $TMP_PKG_DIR ]; then rm -rf $TMP_PKG_DIR; fi + mkdir -p $TMP_PKG_DIR + + PKG_URL=$TIZEN_URL/$PROFILE/latest + + BUILD_XML_URL=$PKG_URL/$BUILD_XML + TMP_BUILD=$TMP_PKG_DIR/$BUILD_XML + TMP_REPOMD=$TMP_PKG_DIR/$REPOMD_XML + TMP_PRIMARY=$TMP_PKG_DIR/$PRIMARY_XML + TMP_PRIMARYGZ=${TMP_PRIMARY}.gz + + Fetch $BUILD_XML_URL $TMP_BUILD + + Debug "fetch $BUILD_XML_URL to $TMP_BUILD" + + TARGET_XPATH="//build/buildtargets/buildtarget[@name=\"$TARGET\"]/repo[@type=\"binary\"]/text()" + Xpath_get $TARGET_XPATH $TMP_BUILD + TARGET_PATH=$XPATH_RESULT + TARGET_URL=$PKG_URL/$TARGET_PATH + + REPOMD_URL=$TARGET_URL/repodata/repomd.xml + PRIMARY_XPATH='string(//*[local-name()="data"][@type="primary"]/*[local-name()="location"]/@href)' + + Fetch $REPOMD_URL $TMP_REPOMD + + Debug "fetch $REPOMD_URL to $TMP_REPOMD" + + Xpath_get $PRIMARY_XPATH $TMP_REPOMD + PRIMARY_XML_PATH=$XPATH_RESULT + PRIMARY_URL=$TARGET_URL/$PRIMARY_XML_PATH + + Fetch $PRIMARY_URL $TMP_PRIMARYGZ + + Debug "fetch $PRIMARY_URL to $TMP_PRIMARYGZ" + + gunzip $TMP_PRIMARYGZ + + Debug "unzip $TMP_PRIMARYGZ to $TMP_PRIMARY" +} + +fetch_tizen_pkgs() +{ + ARCH=$1 + PACKAGE_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="location"]/@href)' + + PACKAGE_CHECKSUM_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="checksum"]/text())' + + for pkg in ${@:2} + do + Inform "Fetching... $pkg" + XPATH=${PACKAGE_XPATH_TPL/_PKG_/$pkg} + XPATH=${XPATH/_ARCH_/$ARCH} + Xpath_get $XPATH $TMP_PRIMARY + PKG_PATH=$XPATH_RESULT + + XPATH=${PACKAGE_CHECKSUM_XPATH_TPL/_PKG_/$pkg} + XPATH=${XPATH/_ARCH_/$ARCH} + Xpath_get $XPATH $TMP_PRIMARY + CHECKSUM=$XPATH_RESULT + + PKG_URL=$TARGET_URL/$PKG_PATH + PKG_FILE=$(basename $PKG_PATH) + PKG_PATH=$TMPDIR/$PKG_FILE + + Debug "Download $PKG_URL to $PKG_PATH" + Fetch $PKG_URL $PKG_PATH true + + echo "$CHECKSUM $PKG_PATH" | sha256sum -c - > /dev/null + if [ $? -ne 0 ]; then + Error "Fail to fetch $PKG_URL to $PKG_PATH" + Debug "Checksum = $CHECKSUM" + exit 1 + fi + done +} + +Inform "Initialize arm base" +fetch_tizen_pkgs_init standard base +Inform "fetch common packages" +fetch_tizen_pkgs armv7hl gcc gcc-devel-static glibc glibc-devel libicu libicu-devel libatomic linux-glibc-devel keyutils keyutils-devel libkeyutils +Inform "fetch coreclr packages" +fetch_tizen_pkgs armv7hl lldb lldb-devel libgcc libstdc++ libstdc++-devel libunwind libunwind-devel lttng-ust-devel lttng-ust userspace-rcu-devel userspace-rcu +Inform "fetch corefx packages" +fetch_tizen_pkgs armv7hl libcom_err libcom_err-devel zlib zlib-devel libopenssl11 libopenssl1.1-devel krb5 krb5-devel + +Inform "Initialize standard unified" +fetch_tizen_pkgs_init standard unified +Inform "fetch corefx packages" +fetch_tizen_pkgs armv7hl gssdp gssdp-devel tizen-release + diff --git a/eng/common/cross/arm/tizen/tizen.patch b/eng/common/cross/arm/tizen/tizen.patch new file mode 100644 index 0000000000000..fb12ade7250ae --- /dev/null +++ b/eng/common/cross/arm/tizen/tizen.patch @@ -0,0 +1,9 @@ +diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so +--- a/usr/lib/libc.so 2016-12-30 23:00:08.284951863 +0900 ++++ b/usr/lib/libc.so 2016-12-30 23:00:32.140951815 +0900 +@@ -2,4 +2,4 @@ + Use the shared library, but some functions are only in + the static library, so try that secondarily. */ + OUTPUT_FORMAT(elf32-littlearm) +-GROUP ( /lib/libc.so.6 /usr/lib/libc_nonshared.a AS_NEEDED ( /lib/ld-linux-armhf.so.3 ) ) ++GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux-armhf.so.3 ) ) diff --git a/eng/common/cross/build-rootfs.sh b/eng/common/cross/build-rootfs.sh index 5102245b7b5e3..e94d13d62ef64 100755 --- a/eng/common/cross/build-rootfs.sh +++ b/eng/common/cross/build-rootfs.sh @@ -185,8 +185,8 @@ while :; do __LLDB_Package="liblldb-6.0-dev" ;; tizen) - if [ "$__BuildArch" != "armel" ] && [ "$__BuildArch" != "arm64" ]; then - echo "Tizen is available only for armel and arm64." + if [ "$__BuildArch" != "arm" ] && [ "$__BuildArch" != "armel" ] && [ "$__BuildArch" != "arm64" ]; then + echo "Tizen is available only for arm, armel and arm64." usage; exit 1; fi diff --git a/eng/common/cross/toolchain.cmake b/eng/common/cross/toolchain.cmake index 6501c3a955f78..8369ae0b431d7 100644 --- a/eng/common/cross/toolchain.cmake +++ b/eng/common/cross/toolchain.cmake @@ -26,6 +26,9 @@ elseif(TARGET_ARCH_NAME STREQUAL "arm") else() set(TOOLCHAIN "arm-linux-gnueabihf") endif() + if("$ENV{__DistroRid}" MATCHES "tizen.*") + set(TIZEN_TOOLCHAIN "armv7hl-tizen-linux-gnueabihf/9.2.0") + endif() elseif(TARGET_ARCH_NAME STREQUAL "arm64") set(CMAKE_SYSTEM_PROCESSOR aarch64) if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/aarch64-alpine-linux-musl) @@ -58,6 +61,10 @@ endif() # Specify include paths if(DEFINED TIZEN_TOOLCHAIN) + if(TARGET_ARCH_NAME STREQUAL "arm") + include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/) + include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/armv7hl-tizen-linux-gnueabihf) + endif() if(TARGET_ARCH_NAME STREQUAL "armel") include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/) include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/armv7l-tizen-linux-gnueabi) @@ -150,7 +157,7 @@ if(CMAKE_SYSTEM_NAME STREQUAL "Linux") add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/usr/lib/${TOOLCHAIN}") endif() -if(TARGET_ARCH_NAME STREQUAL "armel") +if(TARGET_ARCH_NAME STREQUAL "arm" OR TARGET_ARCH_NAME STREQUAL "armel") if(DEFINED TIZEN_TOOLCHAIN) # For Tizen only add_toolchain_linker_flag("-B${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}") add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib") @@ -205,7 +212,7 @@ elseif(TARGET_ARCH_NAME STREQUAL "x86") endif() if(DEFINED TIZEN_TOOLCHAIN) - if(TARGET_ARCH_NAME MATCHES "^(armel|arm64)$") + if(TARGET_ARCH_NAME MATCHES "^(arm|armel|arm64)$") add_compile_options(-Wno-deprecated-declarations) # compile-time option add_compile_options(-D__extern_always_inline=inline) # compile-time option endif() diff --git a/global.json b/global.json index 59c01b0b4680d..d2b1106cc4d2b 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ "xcopy-msbuild": "16.10.0-preview2" }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "7.0.0-beta.21623.1", - "Microsoft.DotNet.Helix.Sdk": "7.0.0-beta.21623.1" + "Microsoft.DotNet.Arcade.Sdk": "7.0.0-beta.21623.2", + "Microsoft.DotNet.Helix.Sdk": "7.0.0-beta.21623.2" } } From 8211a1d3107263976414448983f6efb2372e164c Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Tue, 28 Dec 2021 13:41:59 +0000 Subject: [PATCH 031/413] Update dependencies from https://github.com/dotnet/arcade build 20211227.1 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.Helix.Sdk From Version 7.0.0-beta.21569.2 -> To Version 7.0.0-beta.21627.1 --- eng/Version.Details.xml | 8 +++---- eng/common/cross/toolchain.cmake | 38 +++++++++++++++++++----------- eng/common/native/init-compiler.sh | 32 ++++++++++++++++++++----- global.json | 4 ++-- 4 files changed, 56 insertions(+), 26 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 2551c405e8fa0..a3c9299433e91 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -13,18 +13,18 @@ - + https://github.com/dotnet/arcade - 1a66526b0c1eb068cab89909b7d52fe6f57d64df + 28ea474e02753fe23295f60e8792bc845b8b6e20 https://github.com/dotnet/roslyn c1d8c6f043bc80425c6828455eb57f8a404759c6 - + https://github.com/dotnet/arcade - 1a66526b0c1eb068cab89909b7d52fe6f57d64df + 28ea474e02753fe23295f60e8792bc845b8b6e20 diff --git a/eng/common/cross/toolchain.cmake b/eng/common/cross/toolchain.cmake index 8369ae0b431d7..f7878dddd3921 100644 --- a/eng/common/cross/toolchain.cmake +++ b/eng/common/cross/toolchain.cmake @@ -3,18 +3,26 @@ set(CROSS_ROOTFS $ENV{ROOTFS_DIR}) set(TARGET_ARCH_NAME $ENV{TARGET_BUILD_ARCH}) if(EXISTS ${CROSS_ROOTFS}/bin/freebsd-version) set(CMAKE_SYSTEM_NAME FreeBSD) + set(FREEBSD 1) elseif(EXISTS ${CROSS_ROOTFS}/usr/platform/i86pc) set(CMAKE_SYSTEM_NAME SunOS) set(ILLUMOS 1) else() set(CMAKE_SYSTEM_NAME Linux) + set(LINUX 1) endif() set(CMAKE_SYSTEM_VERSION 1) +if(EXISTS ${CROSS_ROOTFS}/etc/tizen-release) + set(TIZEN 1) +elseif(EXISTS ${CROSS_ROOTFS}/android_platform) + set(ANDROID 1) +endif() + if(TARGET_ARCH_NAME STREQUAL "armel") set(CMAKE_SYSTEM_PROCESSOR armv7l) set(TOOLCHAIN "arm-linux-gnueabi") - if("$ENV{__DistroRid}" MATCHES "tizen.*") + if(TIZEN) set(TIZEN_TOOLCHAIN "armv7l-tizen-linux-gnueabi/9.2.0") endif() elseif(TARGET_ARCH_NAME STREQUAL "arm") @@ -26,7 +34,7 @@ elseif(TARGET_ARCH_NAME STREQUAL "arm") else() set(TOOLCHAIN "arm-linux-gnueabihf") endif() - if("$ENV{__DistroRid}" MATCHES "tizen.*") + if(TIZEN) set(TIZEN_TOOLCHAIN "armv7hl-tizen-linux-gnueabihf/9.2.0") endif() elseif(TARGET_ARCH_NAME STREQUAL "arm64") @@ -36,7 +44,7 @@ elseif(TARGET_ARCH_NAME STREQUAL "arm64") else() set(TOOLCHAIN "aarch64-linux-gnu") endif() - if("$ENV{__DistroRid}" MATCHES "tizen.*") + if(TIZEN) set(TIZEN_TOOLCHAIN "aarch64-tizen-linux-gnu/9.2.0") endif() elseif(TARGET_ARCH_NAME STREQUAL "s390x") @@ -45,7 +53,7 @@ elseif(TARGET_ARCH_NAME STREQUAL "s390x") elseif(TARGET_ARCH_NAME STREQUAL "x86") set(CMAKE_SYSTEM_PROCESSOR i686) set(TOOLCHAIN "i686-linux-gnu") -elseif (CMAKE_SYSTEM_NAME STREQUAL "FreeBSD") +elseif (FREEBSD) set(CMAKE_SYSTEM_PROCESSOR "x86_64") set(triple "x86_64-unknown-freebsd12") elseif (ILLUMOS) @@ -60,7 +68,7 @@ if(DEFINED ENV{TOOLCHAIN}) endif() # Specify include paths -if(DEFINED TIZEN_TOOLCHAIN) +if(TIZEN) if(TARGET_ARCH_NAME STREQUAL "arm") include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/) include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/armv7hl-tizen-linux-gnueabihf) @@ -75,7 +83,7 @@ if(DEFINED TIZEN_TOOLCHAIN) endif() endif() -if("$ENV{__DistroRid}" MATCHES "android.*") +if(ANDROID) if(TARGET_ARCH_NAME STREQUAL "arm") set(ANDROID_ABI armeabi-v7a) elseif(TARGET_ARCH_NAME STREQUAL "arm64") @@ -83,7 +91,9 @@ if("$ENV{__DistroRid}" MATCHES "android.*") endif() # extract platform number required by the NDK's toolchain - string(REGEX REPLACE ".*\\.([0-9]+)-.*" "\\1" ANDROID_PLATFORM "$ENV{__DistroRid}") + file(READ "${CROSS_ROOTFS}/android_platform" RID_FILE_CONTENTS) + string(REPLACE "RID=" "" ANDROID_RID "${RID_FILE_CONTENTS}") + string(REGEX REPLACE ".*\\.([0-9]+)-.*" "\\1" ANDROID_PLATFORM "${ANDROID_RID}") set(ANDROID_TOOLCHAIN clang) set(FEATURE_EVENT_TRACE 0) # disable event trace as there is no lttng-ust package in termux repository @@ -92,7 +102,7 @@ if("$ENV{__DistroRid}" MATCHES "android.*") # include official NDK toolchain script include(${CROSS_ROOTFS}/../build/cmake/android.toolchain.cmake) -elseif(CMAKE_SYSTEM_NAME STREQUAL "FreeBSD") +elseif(FREEBSD) # we cross-compile by instructing clang set(CMAKE_C_COMPILER_TARGET ${triple}) set(CMAKE_CXX_COMPILER_TARGET ${triple}) @@ -152,20 +162,20 @@ function(add_toolchain_linker_flag Flag) set("CMAKE_SHARED_LINKER_FLAGS${CONFIG_SUFFIX}_INIT" "${CMAKE_SHARED_LINKER_FLAGS${CONFIG_SUFFIX}_INIT} ${Flag}" PARENT_SCOPE) endfunction() -if(CMAKE_SYSTEM_NAME STREQUAL "Linux") +if(LINUX) add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/lib/${TOOLCHAIN}") add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/usr/lib/${TOOLCHAIN}") endif() -if(TARGET_ARCH_NAME STREQUAL "arm" OR TARGET_ARCH_NAME STREQUAL "armel") - if(DEFINED TIZEN_TOOLCHAIN) # For Tizen only +if(TARGET_ARCH_NAME MATCHES "^(arm|armel)$") + if(TIZEN) add_toolchain_linker_flag("-B${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}") add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib") add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib") add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}") endif() elseif(TARGET_ARCH_NAME STREQUAL "arm64") - if(DEFINED TIZEN_TOOLCHAIN) # For Tizen only + if(TIZEN) add_toolchain_linker_flag("-B${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}") add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib64") add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib64") @@ -184,7 +194,7 @@ endif() # Specify compile options -if((TARGET_ARCH_NAME MATCHES "^(arm|armel|arm64|s390x)$" AND NOT "$ENV{__DistroRid}" MATCHES "android.*") OR ILLUMOS) +if((TARGET_ARCH_NAME MATCHES "^(arm|armel|arm64|s390x)$" AND NOT ANDROID) OR ILLUMOS) set(CMAKE_C_COMPILER_TARGET ${TOOLCHAIN}) set(CMAKE_CXX_COMPILER_TARGET ${TOOLCHAIN}) set(CMAKE_ASM_COMPILER_TARGET ${TOOLCHAIN}) @@ -211,7 +221,7 @@ elseif(TARGET_ARCH_NAME STREQUAL "x86") add_compile_options(-Wno-error=unused-command-line-argument) endif() -if(DEFINED TIZEN_TOOLCHAIN) +if(TIZEN) if(TARGET_ARCH_NAME MATCHES "^(arm|armel|arm64)$") add_compile_options(-Wno-deprecated-declarations) # compile-time option add_compile_options(-D__extern_always_inline=inline) # compile-time option diff --git a/eng/common/native/init-compiler.sh b/eng/common/native/init-compiler.sh index e361e03fabdd2..6d7ba15e5f2b5 100644 --- a/eng/common/native/init-compiler.sh +++ b/eng/common/native/init-compiler.sh @@ -2,25 +2,45 @@ # # This file detects the C/C++ compiler and exports it to the CC/CXX environment variables # -# NOTE: some scripts source this file and rely on stdout being empty, make sure to not output anything here! +# NOTE: some scripts source this file and rely on stdout being empty, make sure to not output anything here! if [[ "$#" -lt 3 ]]; then echo "Usage..." - echo "init-compiler.sh