From d12b383f836ca4e8ba9075b5dc6c5abde4ec418d Mon Sep 17 00:00:00 2001 From: Shay Rojansky Date: Mon, 21 Mar 2022 16:20:26 +0200 Subject: [PATCH] Removed unnecessary batching boundaries from topological sort Closes #20664 --- .../Update/Internal/CommandBatchPreparer.cs | 47 ++- .../SqlServerModificationCommandBatch.cs | 10 +- src/EFCore/Metadata/Internal/EntityType.cs | 3 +- .../Metadata/Internal/ModelExtensions.cs | 27 +- src/Shared/Multigraph.cs | 314 ++++++++---------- .../Update/StoreValueGenerationTestBase.cs | 17 +- .../Internal/MigrationsModelDifferTest.cs | 109 +----- .../Internal/MigrationsModelDifferTestBase.cs | 19 +- .../TestModificationCommandBatch.cs | 2 +- .../Update/CommandBatchPreparerTest.cs | 149 ++++++--- .../BatchingTest.cs | 4 +- .../Query/TPTInheritanceQuerySqlServerTest.cs | 76 ++--- .../SqlServerEndToEndTest.cs | 4 +- ...oreValueGenerationIdentitySqlServerTest.cs | 31 +- ...eGenerationIdentityTriggerSqlServerTest.cs | 9 +- ...oreValueGenerationSequenceSqlServerTest.cs | 8 +- ...eGenerationSequenceTriggerSqlServerTest.cs | 8 +- .../Update/StoreValueGenerationSqliteTest.cs | 8 +- test/EFCore.Tests/Utilities/MultigraphTest.cs | 123 +++++++ 19 files changed, 534 insertions(+), 434 deletions(-) diff --git a/src/EFCore.Relational/Update/Internal/CommandBatchPreparer.cs b/src/EFCore.Relational/Update/Internal/CommandBatchPreparer.cs index 3e58ba607ac..b1e5e065caf 100644 --- a/src/EFCore.Relational/Update/Internal/CommandBatchPreparer.cs +++ b/src/EFCore.Relational/Update/Internal/CommandBatchPreparer.cs @@ -16,7 +16,7 @@ public class CommandBatchPreparer : ICommandBatchPreparer { private readonly int _minBatchSize; private readonly bool _sensitiveLoggingEnabled; - private readonly Multigraph _modificationCommandGraph = new(); + private readonly Multigraph _modificationCommandGraph; /// /// This is an internal API that supports the Entity Framework Core infrastructure and not subject to @@ -29,6 +29,8 @@ public CommandBatchPreparer(CommandBatchPreparerDependencies dependencies) _minBatchSize = dependencies.Options.Extensions.OfType().FirstOrDefault()?.MinBatchSize ?? 1; + + _modificationCommandGraph = new(dependencies.ModificationCommandComparer); Dependencies = dependencies; if (dependencies.LoggingOptions.IsSensitiveDataLoggingEnabled) @@ -57,16 +59,14 @@ public CommandBatchPreparer(CommandBatchPreparerDependencies dependencies) { var parameterNameGenerator = Dependencies.ParameterNameGeneratorFactory.Create(); var commands = CreateModificationCommands(entries, updateAdapter, parameterNameGenerator.GenerateNext); - var sortedCommandSets = TopologicalSort(commands); + var commandSets = TopologicalSort(commands); - for (var commandSetIndex = 0; commandSetIndex < sortedCommandSets.Count; commandSetIndex++) + for (var commandSetIndex = 0; commandSetIndex < commandSets.Count; commandSetIndex++) { - var independentCommandSet = sortedCommandSets[commandSetIndex]; - - independentCommandSet.Sort(Dependencies.ModificationCommandComparer); + var commandSet = commandSets[commandSetIndex]; var batch = Dependencies.ModificationCommandBatchFactory.Create(); - foreach (var modificationCommand in independentCommandSet) + foreach (var modificationCommand in commandSet) { (modificationCommand as ModificationCommand)?.AssertColumnsNotInitialized(); if (modificationCommand.EntityState == EntityState.Modified @@ -108,7 +108,7 @@ public CommandBatchPreparer(CommandBatchPreparerDependencies dependencies) } } - var hasMoreCommandSets = commandSetIndex < sortedCommandSets.Count - 1; + var hasMoreCommandSets = commandSetIndex < commandSets.Count - 1; if (batch.ModificationCommands.Count == 1 || batch.ModificationCommands.Count >= _minBatchSize) @@ -295,10 +295,10 @@ private string FormatCycle( var builder = new StringBuilder(); for (var i = 0; i < data.Count; i++) { - var (command1, command2, annotatables) = data[i]; + var (command1, command2, edges) = data[i]; Format(command1, builder); - switch (annotatables.First()) + switch (edges.First()) { case IForeignKeyConstraint foreignKey: Format(foreignKey, command1, command2, builder); @@ -538,8 +538,27 @@ private void AddForeignKeyEdges( .CreateDependentValueIndex(command); if (dependentKeyValue != null) { - AddMatchingPredecessorEdge( - predecessorsMap, dependentKeyValue, commandGraph, command, foreignKey); + if (predecessorsMap.TryGetValue(dependentKeyValue, out var predecessorCommands)) + { + foreach (var predecessor in predecessorCommands) + { + if (predecessor != command) + { + // If we're adding/inserting a dependent where the principal key is being database-generated, then + // the dependency edge represents a batching boundary: fetch the principal database-generated + // property from the database in separate batch, in order to populate the dependent foreign key + // property in the next. + var requiresBatchingBoundary = + foreignKey.PrincipalColumns.Any( + c => predecessor.Entries.Any( + e => + c.FindColumnMapping(e.EntityType) is IColumnMapping columnMapping + && e.IsStoreGenerated(columnMapping.Property))); + + commandGraph.AddEdge(predecessor, command, foreignKey, requiresBatchingBoundary); + } + } + } } } } @@ -623,7 +642,7 @@ private static void AddMatchingPredecessorEdge( T keyValue, Multigraph commandGraph, IReadOnlyModificationCommand command, - IAnnotatable edge) + IAnnotatable edgeAnnotatable) where T : notnull { if (predecessorsMap.TryGetValue(keyValue, out var predecessorCommands)) @@ -632,7 +651,7 @@ private static void AddMatchingPredecessorEdge( { if (predecessor != command) { - commandGraph.AddEdge(predecessor, command, edge); + commandGraph.AddEdge(predecessor, command, edgeAnnotatable); } } } diff --git a/src/EFCore.SqlServer/Update/Internal/SqlServerModificationCommandBatch.cs b/src/EFCore.SqlServer/Update/Internal/SqlServerModificationCommandBatch.cs index c6d8a9085e4..582252289cb 100644 --- a/src/EFCore.SqlServer/Update/Internal/SqlServerModificationCommandBatch.cs +++ b/src/EFCore.SqlServer/Update/Internal/SqlServerModificationCommandBatch.cs @@ -18,7 +18,6 @@ public class SqlServerModificationCommandBatch : AffectedCountModificationComman private const int DefaultNetworkPacketSizeBytes = 4096; private const int MaxScriptLength = 65536 * DefaultNetworkPacketSizeBytes / 2; private const int MaxParameterCount = 2100; - private readonly int _maxBatchSize; private readonly List _pendingBulkInsertCommands = new(); /// @@ -31,7 +30,7 @@ public SqlServerModificationCommandBatch( ModificationCommandBatchFactoryDependencies dependencies, int maxBatchSize) : base(dependencies) - => _maxBatchSize = maxBatchSize; + => MaxBatchSize = maxBatchSize; /// /// This is an internal API that supports the Entity Framework Core infrastructure and not subject to @@ -48,8 +47,7 @@ public SqlServerModificationCommandBatch( /// /// For SQL Server, this is 42 by default, and cannot exceed 1000. /// - protected override int MaxBatchSize - => _maxBatchSize; + protected override int MaxBatchSize { get; } /// /// This is an internal API that supports the Entity Framework Core infrastructure and not subject to @@ -165,8 +163,8 @@ protected override void AddCommand(IReadOnlyModificationCommand modificationComm private static bool CanBeInsertedInSameStatement( IReadOnlyModificationCommand firstCommand, IReadOnlyModificationCommand secondCommand) - => string.Equals(firstCommand.TableName, secondCommand.TableName, StringComparison.Ordinal) - && string.Equals(firstCommand.Schema, secondCommand.Schema, StringComparison.Ordinal) + => firstCommand.TableName == secondCommand.TableName + && firstCommand.Schema == secondCommand.Schema && firstCommand.ColumnModifications.Where(o => o.IsWrite).Select(o => o.ColumnName).SequenceEqual( secondCommand.ColumnModifications.Where(o => o.IsWrite).Select(o => o.ColumnName)) && firstCommand.ColumnModifications.Where(o => o.IsRead).Select(o => o.ColumnName).SequenceEqual( diff --git a/src/EFCore/Metadata/Internal/EntityType.cs b/src/EFCore/Metadata/Internal/EntityType.cs index e74bbe1fcd6..0449f2c53d7 100644 --- a/src/EFCore/Metadata/Internal/EntityType.cs +++ b/src/EFCore/Metadata/Internal/EntityType.cs @@ -427,9 +427,8 @@ private void UpdateBaseTypeConfigurationSource(ConfigurationSource configuration /// any release. You should only use it directly in your code with extreme caution and knowing that /// doing so can result in application failures when updating to a new Entity Framework Core release. /// - // Note this is ISet because there is no suitable readonly interface in the profiles we are using [DebuggerStepThrough] - public virtual ISet GetDirectlyDerivedTypes() + public virtual IReadOnlySet GetDirectlyDerivedTypes() => _directlyDerivedTypes; /// diff --git a/src/EFCore/Metadata/Internal/ModelExtensions.cs b/src/EFCore/Metadata/Internal/ModelExtensions.cs index 172ad28bf18..dbc0370bef7 100644 --- a/src/EFCore/Metadata/Internal/ModelExtensions.cs +++ b/src/EFCore/Metadata/Internal/ModelExtensions.cs @@ -1,6 +1,8 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. +using System.Collections; + namespace Microsoft.EntityFrameworkCore.Metadata.Internal; /// @@ -36,18 +38,27 @@ public static IEnumerable GetRootEntityTypes(this IModel model) /// doing so can result in application failures when updating to a new Entity Framework Core release. /// public static IEnumerable GetEntityTypesInHierarchicalOrder(this IModel model) - => Sort(model.GetEntityTypes()); - - private static IEnumerable Sort(IEnumerable entityTypes) { - var entityTypeGraph = new Multigraph(); - entityTypeGraph.AddVertices(entityTypes); - foreach (var entityType in entityTypes.Where(et => et.BaseType != null)) + var entityTypes = new Queue(); + + foreach (var root in model.GetRootEntityTypes().OrderBy(e => e.Name, StringComparer.Ordinal)) { - entityTypeGraph.AddEdge(entityType.BaseType!, entityType, 0); + entityTypes.Enqueue(root); } - return entityTypeGraph.BatchingTopologicalSort().SelectMany(b => b.OrderBy(et => et.Name)); + while (entityTypes.Count > 0) + { + var current = entityTypes.Dequeue(); + + yield return current; + + // Note: GetDirectlyDerivedTypes returns the entity types sorted by their name, but that isn't codified anywhere currently. + // (e.g. make the guarantee in the docs, re-sort...) + foreach (var descendant in current.GetDirectlyDerivedTypes()) + { + entityTypes.Enqueue(descendant); + } + } } /// diff --git a/src/Shared/Multigraph.cs b/src/Shared/Multigraph.cs index f86a8317e79..f972766c14f 100644 --- a/src/Shared/Multigraph.cs +++ b/src/Shared/Multigraph.cs @@ -8,9 +8,22 @@ namespace Microsoft.EntityFrameworkCore.Utilities; internal class Multigraph : Graph where TVertex : notnull { + private readonly IComparer? _secondarySortComparer; private readonly HashSet _vertices = new(); private readonly Dictionary> _successorMap = new(); - private readonly Dictionary> _predecessorMap = new(); + private readonly Dictionary> _predecessorMap = new(); + + public Multigraph() + { + } + + public Multigraph(IComparer secondarySortComparer) + => _secondarySortComparer = secondarySortComparer; + + public Multigraph(Comparison secondarySortComparer) + : this(Comparer.Create(secondarySortComparer)) + { + } public IEnumerable GetEdges(TVertex from, TVertex to) { @@ -18,7 +31,7 @@ public IEnumerable GetEdges(TVertex from, TVertex to) { if (successorSet.TryGetValue(to, out var edges)) { - return edges is IEnumerable edgeList ? edgeList : (new[] { (TEdge)edges! }); + return edges is IEnumerable edgeList ? edgeList.Select(e => e.Payload) : (new[] { ((Edge)edges!).Payload }); } } @@ -31,7 +44,7 @@ public void AddVertex(TVertex vertex) public void AddVertices(IEnumerable vertices) => _vertices.UnionWith(vertices); - public void AddEdge(TVertex from, TVertex to, TEdge edge) + public void AddEdge(TVertex from, TVertex to, TEdge payload, bool requiresBatchingBoundary = false) { #if DEBUG if (!_vertices.Contains(from)) @@ -45,6 +58,8 @@ public void AddEdge(TVertex from, TVertex to, TEdge edge) } #endif + var edge = new Edge(payload, requiresBatchingBoundary); + if (!_successorMap.TryGetValue(from, out var successorEdges)) { successorEdges = new Dictionary(); @@ -53,9 +68,9 @@ public void AddEdge(TVertex from, TVertex to, TEdge edge) if (successorEdges.TryGetValue(to, out var edges)) { - if (edges is not List edgeList) + if (edges is not List edgeList) { - edgeList = new List { (TEdge)edges! }; + edgeList = new List { (Edge)edges! }; successorEdges[to] = edgeList; } @@ -66,13 +81,26 @@ public void AddEdge(TVertex from, TVertex to, TEdge edge) successorEdges.Add(to, edge); } - if (!_predecessorMap.TryGetValue(to, out var predecessors)) + if (!_predecessorMap.TryGetValue(to, out var predecessorEdges)) { - predecessors = new HashSet(); - _predecessorMap.Add(to, predecessors); + predecessorEdges = new Dictionary(); + _predecessorMap.Add(to, predecessorEdges); } - predecessors.Add(from); + if (predecessorEdges.TryGetValue(from, out edges)) + { + if (edges is not List edgeList) + { + edgeList = new List { (Edge)edges! }; + predecessorEdges[from] = edgeList; + } + + edgeList.Add(edge); + } + else + { + predecessorEdges.Add(from, edge); + } } public override void Clear() @@ -98,41 +126,121 @@ public IReadOnlyList TopologicalSort( Func>>, string>? formatCycle, Func? formatException = null) { - var queue = new List(); + var batches = TopologicalSortCore(withBatching: false, tryBreakEdge, formatCycle, formatException); + + Check.DebugAssert(batches.Count < 2, "TopologicalSortCore did batching but withBatching was false"); + + return batches.Count == 1 + ? batches[0] + : Array.Empty(); + } + + protected virtual string? ToString(TVertex vertex) + => vertex.ToString(); + + public IReadOnlyList> BatchingTopologicalSort() + => BatchingTopologicalSort(null, null); + + public IReadOnlyList> BatchingTopologicalSort( + Func, bool>? tryBreakEdge, + Func>>, string>? formatCycle, + Func? formatException = null) + => TopologicalSortCore(withBatching: true, tryBreakEdge, formatCycle, formatException); + + private IReadOnlyList> TopologicalSortCore( + bool withBatching, + Func, bool>? tryBreakEdge, + Func>>, string>? formatCycle, + Func? formatException = null) + { + // Performs a breadth-first topological sort (Kahn's algorithm) + var result = new List>(); + var currentRootsQueue = new List(); + var nextRootsQueue = new List(); + var vertexesProcessed = 0; + var batchBoundaryRequired = false; + var currentBatch = new List(); + var currentBatchSet = new HashSet(); + var predecessorCounts = new Dictionary(_predecessorMap.Count); foreach (var (vertex, vertices) in _predecessorMap) { predecessorCounts[vertex] = vertices.Count; } + // Bootstrap the topological sort by finding all vertexes which have no predecessors foreach (var vertex in _vertices) { if (!predecessorCounts.ContainsKey(vertex)) { - queue.Add(vertex); + currentRootsQueue.Add(vertex); } } - var index = 0; - while (queue.Count < _vertices.Count) + result.Add(currentBatch); + + while (vertexesProcessed < _vertices.Count) { - while (index < queue.Count) + while (currentRootsQueue.Count > 0) { - var currentRoot = queue[index]; - index++; + // Secondary sorting: after the first topological sorting (according to dependencies between the commands as expressed in + // the graph), we apply an optional secondary sort. + // When sorting modification commands, which ensures a deterministic ordering and prevents deadlocks between concurrent + // transactions locking the same rows in different orders. + if (_secondarySortComparer is not null) + { + currentRootsQueue.Sort(_secondarySortComparer); + } + + // If we detected in the last roots pass that a batch boundary is required, close the current batch and start a new one. + if (batchBoundaryRequired) + { + currentBatch = new(); + result.Add(currentBatch); + currentBatchSet.Clear(); + + batchBoundaryRequired = false; + } - foreach (var successor in GetOutgoingNeighbors(currentRoot)) + foreach (var currentRoot in currentRootsQueue) { - predecessorCounts[successor]--; - if (predecessorCounts[successor] == 0) + currentBatch.Add(currentRoot); + currentBatchSet.Add(currentRoot); + vertexesProcessed++; + + foreach (var successor in GetOutgoingNeighbors(currentRoot)) { - queue.Add(successor); + predecessorCounts[successor]--; + + // If the successor has no other predecessors, add it for processing in the next roots pass. + if (predecessorCounts[successor] == 0) + { + nextRootsQueue.Add(successor); + + // Detect batch boundary (if batching is enabled). + // If the successor has any predecessor where the edge requires a batching boundary, and that predecessor is + // already in the current batch, then the next batch will have to be executed in a separate batch. + // TODO: Optimization: Instead of currentBatchSet, store a batch counter on each vertex, and check if later + // vertexes have a boundary-requiring dependency on a vertex with the same batch counter. + if (withBatching && _predecessorMap[successor].Any( + kv => + (kv.Value is Edge { RequiresBatchingBoundary: true } + || kv.Value is IEnumerable edges && edges.Any(e => e.RequiresBatchingBoundary)) + && currentBatchSet.Contains(kv.Key))) + { + batchBoundaryRequired = true; + } + } } } + + // Finished passing over the current roots, move on to the next set. + (currentRootsQueue, nextRootsQueue) = (nextRootsQueue, currentRootsQueue); + nextRootsQueue.Clear(); } - // Cycle breaking - if (queue.Count < _vertices.Count) + // We have no more roots to process. That either means we're done, or that there's a cycle which we need to break + if (vertexesProcessed < _vertices.Count) { var broken = false; @@ -158,12 +266,14 @@ public IReadOnlyList TopologicalSort( if (tryBreakEdge(incomingNeighbor, candidateVertex, GetEdges(incomingNeighbor, candidateVertex))) { + if (!_successorMap.ContainsKey(incomingNeighbor)) + Debugger.Break(); _successorMap[incomingNeighbor].Remove(candidateVertex); _predecessorMap[candidateVertex].Remove(incomingNeighbor); predecessorCounts[candidateVertex]--; if (predecessorCounts[candidateVertex] == 0) { - queue.Add(candidateVertex); + currentRootsQueue.Add(candidateVertex); broken = true; } @@ -219,7 +329,7 @@ public IReadOnlyList TopologicalSort( } } - return queue; + return result; } private void ThrowCycle( @@ -250,158 +360,6 @@ private void ThrowCycle( throw new InvalidOperationException(message); } - protected virtual string? ToString(TVertex vertex) - => vertex.ToString(); - - public IReadOnlyList> BatchingTopologicalSort() - => BatchingTopologicalSort(null, null); - - public IReadOnlyList> BatchingTopologicalSort( - Func, bool>? tryBreakEdge, - Func>>, string>? formatCycle) - { - var currentRootsQueue = new List(); - var predecessorCounts = new Dictionary(_predecessorMap.Count); - foreach (var (vertex, vertices) in _predecessorMap) - { - predecessorCounts[vertex] = vertices.Count; - } - - foreach (var vertex in _vertices) - { - if (!predecessorCounts.ContainsKey(vertex)) - { - currentRootsQueue.Add(vertex); - } - } - - var result = new List>(); - var nextRootsQueue = new List(); - - while (result.Sum(b => b.Count) != _vertices.Count) - { - var currentRootIndex = 0; - while (currentRootIndex < currentRootsQueue.Count) - { - var currentRoot = currentRootsQueue[currentRootIndex]; - currentRootIndex++; - - foreach (var successor in GetOutgoingNeighbors(currentRoot)) - { - predecessorCounts[successor]--; - if (predecessorCounts[successor] == 0) - { - nextRootsQueue.Add(successor); - } - } - - // Roll lists over for next batch - if (currentRootIndex == currentRootsQueue.Count) - { - result.Add(currentRootsQueue); - - currentRootsQueue = nextRootsQueue; - currentRootIndex = 0; - - if (currentRootsQueue.Count != 0) - { - nextRootsQueue = new List(); - } - } - } - - // Cycle breaking - if (result.Sum(b => b.Count) != _vertices.Count) - { - var broken = false; - - var candidateVertices = predecessorCounts.Keys.ToList(); - var candidateIndex = 0; - - while ((candidateIndex < candidateVertices.Count) - && !broken - && tryBreakEdge != null) - { - var candidateVertex = candidateVertices[candidateIndex]; - if (predecessorCounts[candidateVertex] == 0) - { - candidateIndex++; - continue; - } - - // Find a vertex in the unsorted portion of the graph that has edges to the candidate - var incomingNeighbor = GetIncomingNeighbors(candidateVertex) - .First( - neighbor => predecessorCounts.TryGetValue(neighbor, out var neighborPredecessors) - && neighborPredecessors > 0); - - if (tryBreakEdge(incomingNeighbor, candidateVertex, GetEdges(incomingNeighbor, candidateVertex))) - { - _successorMap[incomingNeighbor].Remove(candidateVertex); - _predecessorMap[candidateVertex].Remove(incomingNeighbor); - predecessorCounts[candidateVertex]--; - if (predecessorCounts[candidateVertex] == 0) - { - currentRootsQueue.Add(candidateVertex); - nextRootsQueue = new List(); - broken = true; - } - - continue; - } - - candidateIndex++; - } - - if (broken) - { - continue; - } - - var currentCycleVertex = _vertices.First( - v => predecessorCounts.TryGetValue(v, out var predecessorCount) && predecessorCount != 0); - var cycle = new List { currentCycleVertex }; - var finished = false; - while (!finished) - { - foreach (var predecessor in GetIncomingNeighbors(currentCycleVertex)) - { - if (!predecessorCounts.TryGetValue(predecessor, out var predecessorCount) - || predecessorCount == 0) - { - continue; - } - - predecessorCounts[currentCycleVertex] = -1; - - currentCycleVertex = predecessor; - cycle.Add(currentCycleVertex); - finished = predecessorCounts[predecessor] == -1; - break; - } - } - - cycle.Reverse(); - - // Remove any tail that's not part of the cycle - var startingVertex = cycle[0]; - for (var i = cycle.Count - 1; i >= 0; i--) - { - if (cycle[i].Equals(startingVertex)) - { - break; - } - - cycle.RemoveAt(i); - } - - ThrowCycle(cycle, formatCycle); - } - } - - return result; - } - public override IEnumerable Vertices => _vertices; @@ -412,6 +370,8 @@ public override IEnumerable GetOutgoingNeighbors(TVertex from) public override IEnumerable GetIncomingNeighbors(TVertex to) => _predecessorMap.TryGetValue(to, out var predecessors) - ? predecessors + ? predecessors.Keys : Enumerable.Empty(); + + private record struct Edge(TEdge Payload, bool RequiresBatchingBoundary); } diff --git a/test/EFCore.Relational.Specification.Tests/Update/StoreValueGenerationTestBase.cs b/test/EFCore.Relational.Specification.Tests/Update/StoreValueGenerationTestBase.cs index f790bf1219f..1c4f8431032 100644 --- a/test/EFCore.Relational.Specification.Tests/Update/StoreValueGenerationTestBase.cs +++ b/test/EFCore.Relational.Specification.Tests/Update/StoreValueGenerationTestBase.cs @@ -53,7 +53,7 @@ public virtual Task Delete(bool async) #endregion Single operation - #region Two operations with same entity type + #region Same two operations with same entity type [ConditionalTheory] [MemberData(nameof(IsAsyncData))] @@ -85,9 +85,9 @@ public virtual Task Modify_Modify_with_same_entity_type_and_no_generated_values( public virtual Task Delete_Delete_with_same_entity_type(bool async) => Test(EntityState.Deleted, EntityState.Deleted, GeneratedValues.Some, async, withSameEntityType: true); - #endregion Two operations with same entity type + #endregion Same two operations with same entity type - #region Two operations with different entity types + #region Same two operations with different entity types [ConditionalTheory] [MemberData(nameof(IsAsyncData))] @@ -119,7 +119,16 @@ public virtual Task Modify_Modify_with_different_entity_types_and_no_generated_v public virtual Task Delete_Delete_with_different_entity_types(bool async) => Test(EntityState.Deleted, EntityState.Deleted, GeneratedValues.Some, async, withSameEntityType: false); - #endregion Two operations with different entity types + #endregion Same two operations with different entity types + + #region Different two operations + + [ConditionalTheory] + [MemberData(nameof(IsAsyncData))] + public virtual Task Delete_Add_with_same_entity_types(bool async) + => Test(EntityState.Deleted, EntityState.Added, GeneratedValues.Some, async, withSameEntityType: true); + + #endregion Different two operations protected virtual async Task Test( EntityState firstOperationType, diff --git a/test/EFCore.Relational.Tests/Migrations/Internal/MigrationsModelDifferTest.cs b/test/EFCore.Relational.Tests/Migrations/Internal/MigrationsModelDifferTest.cs index c1cdf2a68e3..9daaca35001 100644 --- a/test/EFCore.Relational.Tests/Migrations/Internal/MigrationsModelDifferTest.cs +++ b/test/EFCore.Relational.Tests/Migrations/Internal/MigrationsModelDifferTest.cs @@ -2525,13 +2525,7 @@ public void Alter_key_column_type_with_seed_data() var m = Assert.IsType(o); AssertMultidimensionalArray( m.Values, - v => Assert.Equal(42, v)); - }, - o => - { - var m = Assert.IsType(o); - AssertMultidimensionalArray( - m.Values, + v => Assert.Equal(42, v), v => Assert.Equal(43, v)); })); @@ -5822,15 +5816,7 @@ public void Change_TPH_to_TPT_with_FKs_and_seed_data() AssertMultidimensionalArray( operation.Values, v => Assert.Equal(23, v), - v => Assert.Null(v)); - }, - o => - { - var operation = Assert.IsType(o); - Assert.Equal("Animal", operation.Table); - Assert.Equal(new[] { "Id", "MouseId" }, operation.Columns); - AssertMultidimensionalArray( - operation.Values, + v => Assert.Null(v), v => Assert.Equal(33, v), v => Assert.Null(v)); }, @@ -5856,15 +5842,7 @@ public void Change_TPH_to_TPT_with_FKs_and_seed_data() AssertMultidimensionalArray( operation.Values, v => Assert.Equal(22, v), - v => Assert.Equal(33, v)); - }, - o => - { - var operation = Assert.IsType(o); - Assert.Equal("Dogs", operation.Table); - Assert.Equal(new[] { "Id", "PreyId" }, operation.Columns); - AssertMultidimensionalArray( - operation.Values, + v => Assert.Equal(33, v), v => Assert.Equal(23, v), v => Assert.Null(v)); }, @@ -6461,15 +6439,7 @@ public void Change_TPH_to_TPT_with_FKs_and_seed_data_readonly_discriminator() AssertMultidimensionalArray( operation.Values, v => Assert.Equal(23, v), - v => Assert.Null(v)); - }, - o => - { - var operation = Assert.IsType(o); - Assert.Equal("Animal", operation.Table); - Assert.Equal(new[] { "Id", "MouseId" }, operation.Columns); - AssertMultidimensionalArray( - operation.Values, + v => Assert.Null(v), v => Assert.Equal(33, v), v => Assert.Null(v)); }, @@ -6495,15 +6465,7 @@ public void Change_TPH_to_TPT_with_FKs_and_seed_data_readonly_discriminator() AssertMultidimensionalArray( operation.Values, v => Assert.Equal(22, v), - v => Assert.Equal(33, v)); - }, - o => - { - var operation = Assert.IsType(o); - Assert.Equal("Dogs", operation.Table); - Assert.Equal(new[] { "Id", "PreyId" }, operation.Columns); - AssertMultidimensionalArray( - operation.Values, + v => Assert.Equal(33, v), v => Assert.Equal(23, v), v => Assert.Null(v)); }, @@ -6747,15 +6709,7 @@ public void Change_TPH_to_TPT_with_FKs_and_seed_data_readonly_discriminator() operation.Values, v => Assert.Equal(31, v), v => Assert.Equal("Mouse", v), - v => Assert.Null(v)); - }, - o => - { - var operation = Assert.IsType(o); - Assert.Equal("Animal", operation.Table); - Assert.Equal(new[] { "Id", "Discriminator", "MouseId" }, operation.Columns); - AssertMultidimensionalArray( - operation.Values, + v => Assert.Null(v), v => Assert.Equal(32, v), v => Assert.Equal("Mouse", v), v => Assert.Null(v)); @@ -6771,42 +6725,15 @@ public void Change_TPH_to_TPT_with_FKs_and_seed_data_readonly_discriminator() v => Assert.Equal(11, v), v => Assert.Equal("Cat", v), v => Assert.Equal(31, v), - v => Assert.Null(v)); - }, - o => - { - var operation = Assert.IsType(o); - Assert.Equal("Animal", operation.Table); - Assert.Equal(new[] { "Id", "Discriminator", "MouseId", "PreyId" }, operation.Columns); - Assert.Null(operation.ColumnTypes); - AssertMultidimensionalArray( - operation.Values, + v => Assert.Null(v), v => Assert.Equal(12, v), v => Assert.Equal("Cat", v), v => Assert.Equal(32, v), - v => Assert.Null(v)); - }, - o => - { - var operation = Assert.IsType(o); - Assert.Equal("Animal", operation.Table); - Assert.Equal(new[] { "Id", "Discriminator", "MouseId", "PreyId" }, operation.Columns); - Assert.Null(operation.ColumnTypes); - AssertMultidimensionalArray( - operation.Values, + v => Assert.Null(v), v => Assert.Equal(21, v), v => Assert.Equal("Dog", v), v => Assert.Null(v), - v => Assert.Equal(31, v)); - }, - o => - { - var operation = Assert.IsType(o); - Assert.Equal("Animal", operation.Table); - Assert.Equal(new[] { "Id", "Discriminator", "MouseId", "PreyId" }, operation.Columns); - Assert.Null(operation.ColumnTypes); - AssertMultidimensionalArray( - operation.Values, + v => Assert.Equal(31, v), v => Assert.Equal(22, v), v => Assert.Equal("Dog", v), v => Assert.Null(v), @@ -10352,14 +10279,7 @@ public void SeedData_all_operations() m.Values, v => Assert.Equal(11111, v), v => Assert.Equal(0, v), - v => Assert.Equal("", v)); - }, - o => - { - var m = Assert.IsType(o); - Assert.Null(m.ColumnTypes); - AssertMultidimensionalArray( - m.Values, + v => Assert.Equal("", v), v => Assert.Equal(11112, v), v => Assert.Equal(1, v), v => Assert.Equal("new", v)); @@ -10751,14 +10671,7 @@ private void SeedData_with_navigation_properties(Action buildTarge AssertMultidimensionalArray( m.Values, v => Assert.Equal(38, v), - v => Assert.Equal("newblog.url", v)); - }, - o => - { - var m = Assert.IsType(o); - Assert.Equal("Blog", m.Table); - AssertMultidimensionalArray( - m.Values, + v => Assert.Equal("newblog.url", v), v => Assert.Equal(316, v), v => Assert.Equal("nowitexists.blog", v)); }, diff --git a/test/EFCore.Relational.Tests/Migrations/Internal/MigrationsModelDifferTestBase.cs b/test/EFCore.Relational.Tests/Migrations/Internal/MigrationsModelDifferTestBase.cs index ee585e1ab3d..4fe5686ccc7 100644 --- a/test/EFCore.Relational.Tests/Migrations/Internal/MigrationsModelDifferTestBase.cs +++ b/test/EFCore.Relational.Tests/Migrations/Internal/MigrationsModelDifferTestBase.cs @@ -82,24 +82,7 @@ protected void Execute( } protected void AssertMultidimensionalArray(T[,] values, params Action[] assertions) - => Assert.Collection(ToOnedimensionalArray(values), assertions); - - protected static T[] ToOnedimensionalArray(T[,] values, bool firstDimension = false) - { - Check.DebugAssert( - values.GetLength(firstDimension ? 1 : 0) == 1, - $"Length of dimension {(firstDimension ? 1 : 0)} is not 1."); - - var result = new T[values.Length]; - for (var i = 0; i < values.Length; i++) - { - result[i] = firstDimension - ? values[i, 0] - : values[0, i]; - } - - return result; - } + => Assert.Collection(values.Cast(), assertions); protected static T[][] ToJaggedArray(T[,] twoDimensionalArray, bool firstDimension = false) { diff --git a/test/EFCore.Relational.Tests/TestUtilities/TestModificationCommandBatch.cs b/test/EFCore.Relational.Tests/TestUtilities/TestModificationCommandBatch.cs index e27a95b4a16..cbc5157338f 100644 --- a/test/EFCore.Relational.Tests/TestUtilities/TestModificationCommandBatch.cs +++ b/test/EFCore.Relational.Tests/TestUtilities/TestModificationCommandBatch.cs @@ -9,7 +9,7 @@ public TestModificationCommandBatch( ModificationCommandBatchFactoryDependencies dependencies, int? maxBatchSize) : base(dependencies) - => MaxBatchSize = maxBatchSize ?? 1; + => MaxBatchSize = maxBatchSize ?? 42; protected override int MaxBatchSize { get; } } diff --git a/test/EFCore.Relational.Tests/Update/CommandBatchPreparerTest.cs b/test/EFCore.Relational.Tests/Update/CommandBatchPreparerTest.cs index 4cbd36ae702..b95c064e93b 100644 --- a/test/EFCore.Relational.Tests/Update/CommandBatchPreparerTest.cs +++ b/test/EFCore.Relational.Tests/Update/CommandBatchPreparerTest.cs @@ -126,6 +126,51 @@ public void BatchCommands_creates_valid_batch_for_deleted_entities() Assert.False(columnMod.IsWrite); } + [ConditionalFact] + public void BatchCommands_does_not_create_separate_batch_without_principal_key_database_generation() + { + var configuration = CreateContextServices(CreateFKOneToManyModelWithGeneratedIds()); + var stateManager = configuration.GetRequiredService(); + + var entry = stateManager.GetOrCreateEntry(new FakeEntity { Id = 42, Value = "Test" }); + var relatedEntry1 = stateManager.GetOrCreateEntry(new RelatedFakeEntity { Id = 100, RelatedId = 42 }); + var relatedEntry2 = stateManager.GetOrCreateEntry(new RelatedFakeEntity { Id = 101, RelatedId = 42 }); + entry.SetEntityState(EntityState.Added); + relatedEntry1.SetEntityState(EntityState.Added); + relatedEntry2.SetEntityState(EntityState.Added); + + var batches = CreateBatches(new[] { relatedEntry1, entry, relatedEntry2 }, new UpdateAdapter(stateManager)); + var batch = Assert.Single(batches); + + Assert.Equal( + new[] { entry, relatedEntry1, relatedEntry2 }, + batch.ModificationCommands.Select(c => c.Entries.Single())); + } + + [ConditionalFact] + public void BatchCommands_creates_separate_batch_with_principal_key_database_generation() + { + var configuration = CreateContextServices(CreateFKOneToManyModelWithGeneratedIds()); + var stateManager = configuration.GetRequiredService(); + + var entry = stateManager.GetOrCreateEntry(new FakeEntity { Value = "Test" }); + entry.SetEntityState(EntityState.Added); + + var temporaryIdValue = entry.GetCurrentValue(entry.EntityType.GetProperty(nameof(FakeEntity.Id))); + + var relatedEntry1 = stateManager.GetOrCreateEntry(new RelatedFakeEntity { Id = 1, RelatedId = temporaryIdValue }); + var relatedEntry2 = stateManager.GetOrCreateEntry(new RelatedFakeEntity { Id = 2, RelatedId = temporaryIdValue }); + relatedEntry1.SetEntityState(EntityState.Added); + relatedEntry2.SetEntityState(EntityState.Added); + + var batches = CreateBatches(new[] { relatedEntry1, entry, relatedEntry2 }, new UpdateAdapter(stateManager)); + + Assert.Collection( + batches, + b => Assert.Same(entry, b.ModificationCommands.Single().Entries.Single()), + b => Assert.Equal(new[] { relatedEntry1, relatedEntry2 }, b.ModificationCommands.Select(m => m.Entries.Single()))); + } + [ConditionalFact] public void BatchCommands_sorts_related_added_entities() { @@ -142,11 +187,12 @@ public void BatchCommands_sorts_related_added_entities() new RelatedFakeEntity { Id = 42 }); relatedEntry.SetEntityState(EntityState.Added); - var commandBatches = CreateBatches(new[] { relatedEntry, entry }, modelData); + var batches = CreateBatches(new[] { relatedEntry, entry }, modelData); + var batch = Assert.Single(batches); Assert.Equal( new[] { entry, relatedEntry }, - commandBatches.Select(cb => cb.ModificationCommands.Single()).Select(mc => mc.Entries.Single())); + batch.ModificationCommands.Select(c => c.Entries.Single())); } [ConditionalFact] @@ -165,11 +211,12 @@ public void BatchCommands_sorts_added_and_related_modified_entities() new RelatedFakeEntity { Id = 42 }); relatedEntry.SetEntityState(EntityState.Modified); - var commandBatches = CreateBatches(new[] { relatedEntry, entry }, modelData); + var batches = CreateBatches(new[] { relatedEntry, entry }, modelData); + var batch = Assert.Single(batches); Assert.Equal( new[] { entry, relatedEntry }, - commandBatches.Select(cb => cb.ModificationCommands.Single()).Select(mc => mc.Entries.Single())); + batch.ModificationCommands.Select(mc => mc.Entries.Single())); } [ConditionalFact] @@ -188,11 +235,12 @@ public void BatchCommands_sorts_unrelated_entities() var modelData = new UpdateAdapter(stateManager); - var commandBatches = CreateBatches(new[] { secondEntry, firstEntry }, modelData); + var batches = CreateBatches(new[] { secondEntry, firstEntry }, modelData); + var batch = Assert.Single(batches); Assert.Equal( new[] { firstEntry, secondEntry }, - commandBatches.Select(cb => cb.ModificationCommands.Single()).Select(mc => mc.Entries.Single())); + batch.ModificationCommands.Select(c => c.Entries.Single())); } [ConditionalFact] @@ -216,11 +264,12 @@ public void BatchCommands_sorts_entities_when_reparenting() var modelData = new UpdateAdapter(stateManager); - var commandBatches = CreateBatches(new[] { relatedEntry, previousParent, newParent }, modelData); + var batches = CreateBatches(new[] { relatedEntry, previousParent, newParent }, modelData); + var batch = Assert.Single(batches); Assert.Equal( new[] { newParent, relatedEntry, previousParent }, - commandBatches.Select(cb => cb.ModificationCommands.Single()).Select(mc => mc.Entries.Single())); + batch.ModificationCommands.Select(c => c.Entries.Single())); } [ConditionalFact] @@ -243,11 +292,12 @@ public void BatchCommands_sorts_when_reassigning_child() var modelData = new UpdateAdapter(stateManager); - var commandBatches = CreateBatches(new[] { newChild, previousChild }, modelData); + var batches = CreateBatches(new[] { newChild, previousChild }, modelData); + var batch = Assert.Single(batches); Assert.Equal( new[] { previousChild, newChild }, - commandBatches.Select(cb => cb.ModificationCommands.Single()).Select(mc => mc.Entries.Single())); + batch.ModificationCommands.Select(c => c.Entries.Single())); } [ConditionalFact] @@ -278,12 +328,12 @@ public void BatchCommands_sorts_entities_while_reassigning_child_tree() var modelData = new UpdateAdapter(stateManager); - var sortedEntities = CreateBatches(new[] { newEntity, newChildEntity, oldEntity, oldChildEntity }, modelData) - .Select(cb => cb.ModificationCommands.Single()).Select(mc => mc.Entries.Single()).ToArray(); + var batches = CreateBatches(new[] { newEntity, newChildEntity, oldEntity, oldChildEntity }, modelData); + var batch = Assert.Single(batches); Assert.Equal( - new IUpdateEntry[] { oldChildEntity, oldEntity, newEntity, newChildEntity }, - sortedEntities); + new[] { oldChildEntity, oldEntity, newEntity, newChildEntity }, + batch.ModificationCommands.Select(c => c.Entries.Single())); } [ConditionalFact] @@ -291,30 +341,27 @@ public void BatchCommands_creates_batches_lazily() { var configuration = RelationalTestHelpers.Instance.CreateContextServices( new ServiceCollection().AddScoped(), - CreateSimpleFKModel()); + CreateFKOneToManyModelWithGeneratedIds()); var stateManager = configuration.GetRequiredService(); - var fakeEntity = new FakeEntity { Id = 42, Value = "Test" }; - var entry = stateManager.GetOrCreateEntry(fakeEntity); + var entry = stateManager.GetOrCreateEntry(new FakeEntity { Value = "Test" }); entry.SetEntityState(EntityState.Added); - var relatedEntry = stateManager.GetOrCreateEntry( - new RelatedFakeEntity { Id = 42 }); + var temporaryIdValue = entry.GetCurrentValue(entry.EntityType.GetProperty(nameof(FakeEntity.Id))); + var relatedEntry = stateManager.GetOrCreateEntry(new RelatedFakeEntity { RelatedId = temporaryIdValue }); relatedEntry.SetEntityState(EntityState.Added); var factory = (TestModificationCommandBatchFactory)configuration.GetService(); - var modelData = new UpdateAdapter(stateManager); - - var commandBatches = CreateCommandBatchPreparer(factory).BatchCommands(new[] { relatedEntry, entry }, modelData); + var batches = CreateCommandBatchPreparer(factory).BatchCommands(new[] { relatedEntry, entry }, new UpdateAdapter(stateManager)); - using var commandBatchesEnumerator = commandBatches.GetEnumerator(); - commandBatchesEnumerator.MoveNext(); + using var commandBatchesEnumerator = batches.GetEnumerator(); + Assert.True(commandBatchesEnumerator.MoveNext()); Assert.Equal(1, factory.CreateCount); - commandBatchesEnumerator.MoveNext(); + Assert.True(commandBatchesEnumerator.MoveNext()); Assert.Equal(2, factory.CreateCount); } @@ -346,13 +393,12 @@ public void Batch_command_does_not_order_non_unique_index_values() var modelData = new UpdateAdapter(stateManager); - var sortedEntities = - CreateBatches(new[] { fakeEntry, fakeEntry2, relatedFakeEntry }, modelData) - .Select(cb => cb.ModificationCommands.Single()).Select(mc => mc.Entries.Single()).ToArray(); + var batches = CreateBatches(new[] { fakeEntry, fakeEntry2, relatedFakeEntry }, modelData); + var batch = Assert.Single(batches); Assert.Equal( new IUpdateEntry[] { fakeEntry, relatedFakeEntry, fakeEntry2 }, - sortedEntities); + batch.ModificationCommands.Select(c => c.Entries.Single())); } [ConditionalFact] @@ -508,9 +554,14 @@ public void BatchCommands_works_with_duplicate_values_for_unique_indexes() var modelData = new UpdateAdapter(stateManager); - var batches = CreateBatches(new[] { fakeEntry, fakeEntry2 }, modelData); + var batches = CreateBatches(new[] { fakeEntry2, fakeEntry }, modelData); + var batch = Assert.Single(batches); - Assert.Equal(2, batches.Count); + // The DELETE must be ordered before the UPDATE, otherwise we'd get a unique constraint violation. + Assert.Collection( + batch.ModificationCommands, + e => Assert.Equal(EntityState.Deleted, e.EntityState), + e => Assert.Equal(EntityState.Modified, e.EntityState)); } [ConditionalFact] @@ -914,15 +965,15 @@ public void BatchCommands_creates_batch_on_incomplete_updates_for_shared_table_n } else { - Assert.Equal(2, commandBatches.Count); - Assert.Equal(1, commandBatches.First().ModificationCommands.Count); + var batch = Assert.Single(commandBatches); + Assert.Equal(2, batch.ModificationCommands.Count); - var command = commandBatches.First().ModificationCommands.Single(); - Assert.Equal(EntityState.Modified, command.EntityState); + var firstCommand = batch.ModificationCommands[0]; + Assert.Equal(EntityState.Modified, firstCommand.EntityState); - Assert.Equal(2, command.ColumnModifications.Count); + Assert.Equal(2, firstCommand.ColumnModifications.Count); - var columnMod = command.ColumnModifications[0]; + var columnMod = firstCommand.ColumnModifications[0]; Assert.Equal(nameof(DerivedRelatedFakeEntity.Id), columnMod.ColumnName); Assert.Equal(first.Id, columnMod.Value); @@ -932,7 +983,7 @@ public void BatchCommands_creates_batch_on_incomplete_updates_for_shared_table_n Assert.False(columnMod.IsRead); Assert.False(columnMod.IsWrite); - columnMod = command.ColumnModifications[1]; + columnMod = firstCommand.ColumnModifications[1]; Assert.Equal(nameof(AnotherFakeEntity.AnotherId), columnMod.ColumnName); Assert.Equal(second.AnotherId, columnMod.Value); @@ -1003,6 +1054,28 @@ private static IModel CreateSimpleFKModel() return modelBuilder.Model.FinalizeModel(); } + private static IModel CreateFKOneToManyModelWithGeneratedIds() + { + var modelBuilder = RelationalTestHelpers.Instance.CreateConventionBuilder(); + + modelBuilder.Entity( + b => + { + b.Property(c => c.Id).ValueGeneratedOnAdd(); + b.Ignore(c => c.UniqueValue); + b.Ignore(c => c.RelatedId); + + b.HasMany() + .WithOne() + .HasForeignKey(c => c.RelatedId); + }); + + modelBuilder.Entity( + b => b.Property(c => c.Id).ValueGeneratedOnAdd()); + + return modelBuilder.Model.FinalizeModel(); + } + private static IModel CreateCyclicFKModel() { var modelBuilder = RelationalTestHelpers.Instance.CreateConventionBuilder(); diff --git a/test/EFCore.SqlServer.FunctionalTests/BatchingTest.cs b/test/EFCore.SqlServer.FunctionalTests/BatchingTest.cs index 921698d36bf..c450fc48f24 100644 --- a/test/EFCore.SqlServer.FunctionalTests/BatchingTest.cs +++ b/test/EFCore.SqlServer.FunctionalTests/BatchingTest.cs @@ -305,7 +305,7 @@ public void Inserts_are_batched_only_when_necessary(int minBatchSize) var owner = new Owner(); context.Owners.Add(owner); - for (var i = 1; i < 4; i++) + for (var i = 1; i < 3; i++) { var blog = new Blog { Id = Guid.NewGuid(), Owner = owner }; @@ -325,7 +325,7 @@ public void Inserts_are_batched_only_when_necessary(int minBatchSize) .GenerateMessage(3, 4), Fixture.TestSqlLoggerFactory.Log.Select(l => l.Message)); - Assert.Equal(minBatchSize <= 3 ? 2 : 4, Fixture.TestSqlLoggerFactory.SqlStatements.Count); + Assert.Equal(minBatchSize <= 3 ? 1 : 3, Fixture.TestSqlLoggerFactory.SqlStatements.Count); }, context => AssertDatabaseState(context, false, expectedBlogs)); } diff --git a/test/EFCore.SqlServer.FunctionalTests/Query/TPTInheritanceQuerySqlServerTest.cs b/test/EFCore.SqlServer.FunctionalTests/Query/TPTInheritanceQuerySqlServerTest.cs index a31d28b8f50..98b38898ae3 100644 --- a/test/EFCore.SqlServer.FunctionalTests/Query/TPTInheritanceQuerySqlServerTest.cs +++ b/test/EFCore.SqlServer.FunctionalTests/Query/TPTInheritanceQuerySqlServerTest.cs @@ -94,43 +94,34 @@ public override void Can_insert_update_delete() base.Can_insert_update_delete(); AssertSql( - @"SELECT TOP(2) [c].[Id], [c].[Name] + @"SELECT TOP(2) [c].[Id], [c].[Name] FROM [Countries] AS [c] WHERE [c].[Id] = 1", - // - @"@p0='Apteryx owenii' (Nullable = false) (Size = 100) + // + @"@p0='Apteryx owenii' (Nullable = false) (Size = 100) @p1='1' @p2='Little spotted kiwi' (Size = 4000) - -SET IMPLICIT_TRANSACTIONS OFF; -SET NOCOUNT ON; -INSERT INTO [Animals] ([Species], [CountryId], [Name]) -VALUES (@p0, @p1, @p2);", - // - @"@p3='Apteryx owenii' (Nullable = false) (Size = 100) +@p3='Apteryx owenii' (Nullable = false) (Size = 100) @p4=NULL (Size = 100) @p5='True' - -SET IMPLICIT_TRANSACTIONS OFF; -SET NOCOUNT ON; -INSERT INTO [Birds] ([Species], [EagleId], [IsFlightless]) -VALUES (@p3, @p4, @p5);", - // - @"@p6='Apteryx owenii' (Nullable = false) (Size = 100) +@p6='Apteryx owenii' (Nullable = false) (Size = 100) @p7='0' (Size = 1) -SET IMPLICIT_TRANSACTIONS OFF; SET NOCOUNT ON; +INSERT INTO [Animals] ([Species], [CountryId], [Name]) +VALUES (@p0, @p1, @p2); +INSERT INTO [Birds] ([Species], [EagleId], [IsFlightless]) +VALUES (@p3, @p4, @p5); INSERT INTO [Kiwi] ([Species], [FoundOn]) VALUES (@p6, @p7);", - // - @"SELECT TOP(2) [a].[Species], [a].[CountryId], [a].[Name], [b].[EagleId], [b].[IsFlightless], [k].[FoundOn] + // + @"SELECT TOP(2) [a].[Species], [a].[CountryId], [a].[Name], [b].[EagleId], [b].[IsFlightless], [k].[FoundOn] FROM [Animals] AS [a] INNER JOIN [Birds] AS [b] ON [a].[Species] = [b].[Species] INNER JOIN [Kiwi] AS [k] ON [a].[Species] = [k].[Species] WHERE [a].[Species] LIKE N'%owenii'", - // - @"@p1='Apteryx owenii' (Nullable = false) (Size = 100) + // + @"@p1='Apteryx owenii' (Nullable = false) (Size = 100) @p0='Aquila chrysaetos canadensis' (Size = 100) SET IMPLICIT_TRANSACTIONS OFF; @@ -138,38 +129,29 @@ FROM [Animals] AS [a] UPDATE [Birds] SET [EagleId] = @p0 OUTPUT 1 WHERE [Species] = @p1;", - // - @"SELECT TOP(2) [a].[Species], [a].[CountryId], [a].[Name], [b].[EagleId], [b].[IsFlightless], [k].[FoundOn] + // + @"SELECT TOP(2) [a].[Species], [a].[CountryId], [a].[Name], [b].[EagleId], [b].[IsFlightless], [k].[FoundOn] FROM [Animals] AS [a] INNER JOIN [Birds] AS [b] ON [a].[Species] = [b].[Species] INNER JOIN [Kiwi] AS [k] ON [a].[Species] = [k].[Species] WHERE [a].[Species] LIKE N'%owenii'", - // - @"@p0='Apteryx owenii' (Nullable = false) (Size = 100) + // + @"@p0='Apteryx owenii' (Nullable = false) (Size = 100) +@p1='Apteryx owenii' (Nullable = false) (Size = 100) +@p2='Apteryx owenii' (Nullable = false) (Size = 100) -SET IMPLICIT_TRANSACTIONS OFF; SET NOCOUNT ON; DELETE FROM [Kiwi] OUTPUT 1 -WHERE [Species] = @p0;", - // - @"@p1='Apteryx owenii' (Nullable = false) (Size = 100) - -SET IMPLICIT_TRANSACTIONS OFF; -SET NOCOUNT ON; +WHERE [Species] = @p0; DELETE FROM [Birds] OUTPUT 1 -WHERE [Species] = @p1;", - // - @"@p2='Apteryx owenii' (Nullable = false) (Size = 100) - -SET IMPLICIT_TRANSACTIONS OFF; -SET NOCOUNT ON; +WHERE [Species] = @p1; DELETE FROM [Animals] OUTPUT 1 WHERE [Species] = @p2;", - // - @"SELECT COUNT(*) + // + @"SELECT COUNT(*) FROM [Animals] AS [a] INNER JOIN [Birds] AS [b] ON [a].[Species] = [b].[Species] INNER JOIN [Kiwi] AS [k] ON [a].[Species] = [k].[Species] @@ -533,11 +515,19 @@ FROM [Animals] AS [a] @"@p0='Haliaeetus leucocephalus' (Nullable = false) (Size = 100) @p1='0' @p2='Bald eagle' (Size = 4000) +@p3='Haliaeetus leucocephalus' (Nullable = false) (Size = 100) +@p4='Apteryx haastii' (Size = 100) +@p5='False' +@p6='Haliaeetus leucocephalus' (Nullable = false) (Size = 100) +@p7='1' -SET IMPLICIT_TRANSACTIONS OFF; SET NOCOUNT ON; INSERT INTO [Animals] ([Species], [CountryId], [Name]) -VALUES (@p0, @p1, @p2);"); +VALUES (@p0, @p1, @p2); +INSERT INTO [Birds] ([Species], [EagleId], [IsFlightless]) +VALUES (@p3, @p4, @p5); +INSERT INTO [Eagle] ([Species], [Group]) +VALUES (@p6, @p7);"); } public override async Task Subquery_OfType(bool async) diff --git a/test/EFCore.SqlServer.FunctionalTests/SqlServerEndToEndTest.cs b/test/EFCore.SqlServer.FunctionalTests/SqlServerEndToEndTest.cs index b41d24206a1..cf8e657b55c 100644 --- a/test/EFCore.SqlServer.FunctionalTests/SqlServerEndToEndTest.cs +++ b/test/EFCore.SqlServer.FunctionalTests/SqlServerEndToEndTest.cs @@ -707,13 +707,13 @@ public async Task Can_save_changes() Assert.Equal(EntityState.Unchanged, db.Entry(toAdd).State); Assert.DoesNotContain(toDelete, db.ChangeTracker.Entries().Select(e => e.Entity)); - Assert.Equal(4, Fixture.TestSqlLoggerFactory.SqlStatements.Count); + Assert.Equal(3, Fixture.TestSqlLoggerFactory.SqlStatements.Count); Assert.Contains("SELECT", Fixture.TestSqlLoggerFactory.SqlStatements[0]); Assert.Contains("SELECT", Fixture.TestSqlLoggerFactory.SqlStatements[1]); Assert.Contains("@p0='" + deletedId, Fixture.TestSqlLoggerFactory.SqlStatements[2]); Assert.Contains("DELETE", Fixture.TestSqlLoggerFactory.SqlStatements[2]); Assert.Contains("UPDATE", Fixture.TestSqlLoggerFactory.SqlStatements[2]); - Assert.Contains("INSERT", Fixture.TestSqlLoggerFactory.SqlStatements[3]); + Assert.Contains("INSERT", Fixture.TestSqlLoggerFactory.SqlStatements[2]); var rows = await testDatabase.ExecuteScalarAsync( $"SELECT Count(*) FROM [dbo].[Blog] WHERE Id = {updatedId} AND Name = 'Blog is Updated'"); diff --git a/test/EFCore.SqlServer.FunctionalTests/Update/StoreValueGenerationIdentitySqlServerTest.cs b/test/EFCore.SqlServer.FunctionalTests/Update/StoreValueGenerationIdentitySqlServerTest.cs index b68da2be60e..6e4af4f06e4 100644 --- a/test/EFCore.SqlServer.FunctionalTests/Update/StoreValueGenerationIdentitySqlServerTest.cs +++ b/test/EFCore.SqlServer.FunctionalTests/Update/StoreValueGenerationIdentitySqlServerTest.cs @@ -127,7 +127,7 @@ OUTPUT 1 #endregion Single operation - #region Two operations with same entity type + #region Same two operations with same entity type public override async Task Add_Add_with_same_entity_type_and_generated_values(bool async) { @@ -238,9 +238,9 @@ OUTPUT 1 WHERE [Id] = @p1;"); } - #endregion Two operations with same entity type + #endregion Same two operations with same entity type - #region Two operations with different entity types + #region Same two operations with different entity types public override async Task Add_Add_with_different_entity_types_and_generated_values(bool async) { @@ -349,7 +349,30 @@ OUTPUT 1 WHERE [Id] = @p1;"); } - #endregion Two operations with different entity types + #endregion Same two operations with different entity types + + #region Different two operations + + [ConditionalTheory] + [MemberData(nameof(IsAsyncData))] + public override async Task Delete_Add_with_same_entity_types(bool async) + { + await Test(EntityState.Deleted, EntityState.Added, GeneratedValues.Some, async, withSameEntityType: true); + + AssertSql( + @"@p0='1' +@p1='1001' + +SET NOCOUNT ON; +DELETE FROM [WithSomeDatabaseGenerated] +OUTPUT 1 +WHERE [Id] = @p0; +INSERT INTO [WithSomeDatabaseGenerated] ([Data2]) +OUTPUT INSERTED.[Id], INSERTED.[Data1] +VALUES (@p1);"); + } + + #endregion Different two operations protected override async Task Test( EntityState firstOperationType, diff --git a/test/EFCore.SqlServer.FunctionalTests/Update/StoreValueGenerationIdentityTriggerSqlServerTest.cs b/test/EFCore.SqlServer.FunctionalTests/Update/StoreValueGenerationIdentityTriggerSqlServerTest.cs index 2dffa61a266..cbc19b36929 100644 --- a/test/EFCore.SqlServer.FunctionalTests/Update/StoreValueGenerationIdentityTriggerSqlServerTest.cs +++ b/test/EFCore.SqlServer.FunctionalTests/Update/StoreValueGenerationIdentityTriggerSqlServerTest.cs @@ -133,7 +133,7 @@ DELETE FROM [WithSomeDatabaseGenerated] #endregion Single operation - #region Two operations with same entity type + #region Same two operations with same entity type public override async Task Add_Add_with_same_entity_type_and_generated_values(bool async) { @@ -259,9 +259,9 @@ DELETE FROM [WithSomeDatabaseGenerated] SELECT @@ROWCOUNT;"); } - #endregion Two operations with same entity type + #endregion Same two operations with same entity type - #region Two operations with different entity types + #region Same two operations with different entity types public override async Task Add_Add_with_different_entity_types_and_generated_values(bool async) { @@ -323,7 +323,6 @@ FROM [WithAllDatabaseGenerated2] WHERE @@ROWCOUNT = 1 AND [Id] = scope_identity();"); } - public override async Task Modify_Modify_with_different_entity_types_and_generated_values(bool async) { await base.Modify_Modify_with_different_entity_types_and_generated_values(async); @@ -388,7 +387,7 @@ DELETE FROM [WithSomeDatabaseGenerated2] SELECT @@ROWCOUNT;"); } - #endregion Two operations with different entity types + #endregion Same two operations with different entity types public override async Task Three_Add_use_batched_inserts(bool async) { diff --git a/test/EFCore.SqlServer.FunctionalTests/Update/StoreValueGenerationSequenceSqlServerTest.cs b/test/EFCore.SqlServer.FunctionalTests/Update/StoreValueGenerationSequenceSqlServerTest.cs index b32fade1836..28dcd2bed1d 100644 --- a/test/EFCore.SqlServer.FunctionalTests/Update/StoreValueGenerationSequenceSqlServerTest.cs +++ b/test/EFCore.SqlServer.FunctionalTests/Update/StoreValueGenerationSequenceSqlServerTest.cs @@ -129,7 +129,7 @@ OUTPUT 1 #endregion Single operation - #region Two operations with same entity type + #region Same two operations with same entity type public override async Task Add_Add_with_same_entity_type_and_generated_values(bool async) { @@ -242,9 +242,9 @@ OUTPUT 1 WHERE [Id] = @p1;"); } - #endregion Two operations with same entity type + #endregion Same two operations with same entity type - #region Two operations with different entity types + #region Same two operations with different entity types public override async Task Add_Add_with_different_entity_types_and_generated_values(bool async) { @@ -352,7 +352,7 @@ OUTPUT 1 WHERE [Id] = @p1;"); } - #endregion Two operations with different entity types + #endregion Same two operations with different entity types protected override async Task Test( EntityState firstOperationType, diff --git a/test/EFCore.SqlServer.FunctionalTests/Update/StoreValueGenerationSequenceTriggerSqlServerTest.cs b/test/EFCore.SqlServer.FunctionalTests/Update/StoreValueGenerationSequenceTriggerSqlServerTest.cs index 055a0351ca8..2f7ce975834 100644 --- a/test/EFCore.SqlServer.FunctionalTests/Update/StoreValueGenerationSequenceTriggerSqlServerTest.cs +++ b/test/EFCore.SqlServer.FunctionalTests/Update/StoreValueGenerationSequenceTriggerSqlServerTest.cs @@ -141,7 +141,7 @@ DELETE FROM [WithSomeDatabaseGenerated] #endregion Single operation - #region Two operations with same entity type + #region Same two operations with same entity type public override async Task Add_Add_with_same_entity_type_and_generated_values(bool async) { @@ -266,9 +266,9 @@ DELETE FROM [WithSomeDatabaseGenerated] SELECT @@ROWCOUNT;"); } - #endregion Two operations with same entity type + #endregion Same two operations with same entity type - #region Two operations with different entity types + #region Same two operations with different entity types public override async Task Add_Add_with_different_entity_types_and_generated_values(bool async) { @@ -402,7 +402,7 @@ DELETE FROM [WithSomeDatabaseGenerated2] SELECT @@ROWCOUNT;"); } - #endregion Two operations with different entity types + #endregion Same two operations with different entity types public override async Task Three_Add_use_batched_inserts(bool async) { diff --git a/test/EFCore.Sqlite.FunctionalTests/Update/StoreValueGenerationSqliteTest.cs b/test/EFCore.Sqlite.FunctionalTests/Update/StoreValueGenerationSqliteTest.cs index c4ece3530f3..bcae4d97328 100644 --- a/test/EFCore.Sqlite.FunctionalTests/Update/StoreValueGenerationSqliteTest.cs +++ b/test/EFCore.Sqlite.FunctionalTests/Update/StoreValueGenerationSqliteTest.cs @@ -100,7 +100,7 @@ DELETE FROM ""WithSomeDatabaseGenerated"" #endregion Single operation - #region Two operations with same entity type + #region Same two operations with same entity type public override async Task Add_Add_with_same_entity_type_and_generated_values(bool async) { @@ -214,9 +214,9 @@ DELETE FROM ""WithSomeDatabaseGenerated"" RETURNING 1;"); } - #endregion Two operations with same entity type + #endregion Same two operations with same entity type - #region Two operations with different entity types + #region Same two operations with different entity types public override async Task Add_Add_with_different_entity_types_and_generated_values(bool async) { @@ -330,7 +330,7 @@ DELETE FROM ""WithSomeDatabaseGenerated2"" RETURNING 1;"); } - #endregion Two operations with different entity types + #endregion Same two operations with different entity types public class StoreValueGenerationSqliteFixture : StoreValueGenerationFixtureBase { diff --git a/test/EFCore.Tests/Utilities/MultigraphTest.cs b/test/EFCore.Tests/Utilities/MultigraphTest.cs index f2d827de3f6..193d6f0cd7c 100644 --- a/test/EFCore.Tests/Utilities/MultigraphTest.cs +++ b/test/EFCore.Tests/Utilities/MultigraphTest.cs @@ -392,6 +392,54 @@ string formatter(IEnumerable>> data) Assert.Equal(new[] { edgeThree }, cycleData[vertexThree].Item3); } + [ConditionalFact] + public void TopologicalSort_with_secondary_sort() + { + var vertexOne = new Vertex { Id = 1 }; + var vertexTwo = new Vertex { Id = 2 }; + var vertexThree = new Vertex { Id = 3 }; + var vertexFour = new Vertex { Id = 4 }; + + var edgeOne = new Edge { Id = 1 }; + var edgeTwo = new Edge { Id = 2 }; + + var graph = new Multigraph((v1, v2) => Comparer.Default.Compare(v1.Id, v2.Id)); + graph.AddVertices(new[] { vertexFour, vertexThree, vertexTwo, vertexOne }); + + // 1 -> {3} + graph.AddEdge(vertexOne, vertexThree, edgeOne); + // 2 -> {4} + graph.AddEdge(vertexTwo, vertexFour, edgeTwo); + + Assert.Equal( + new[] { vertexOne, vertexTwo, vertexThree, vertexFour }, + graph.TopologicalSort().ToArray()); + } + + [ConditionalFact] + public void TopologicalSort_without_secondary_sort() + { + var vertexOne = new Vertex { Id = 1 }; + var vertexTwo = new Vertex { Id = 2 }; + var vertexThree = new Vertex { Id = 3 }; + var vertexFour = new Vertex { Id = 4 }; + + var edgeOne = new Edge { Id = 1 }; + var edgeTwo = new Edge { Id = 2 }; + + var graph = new Multigraph(); + graph.AddVertices(new[] { vertexFour, vertexThree, vertexTwo, vertexOne }); + + // 1 -> {3} + graph.AddEdge(vertexOne, vertexThree, edgeOne); + // 2 -> {4} + graph.AddEdge(vertexTwo, vertexFour, edgeTwo); + + Assert.Equal( + new[] { vertexTwo, vertexOne, vertexFour, vertexThree }, + graph.TopologicalSort().ToArray()); + } + [ConditionalFact] public void BatchingTopologicalSort_throws_with_formatted_message_when_cycle_cannot_be_broken() { @@ -791,6 +839,81 @@ public void BatchingTopologicalSort_sorts_leafy_cycle() Assert.Throws(() => graph.BatchingTopologicalSort()).Message); } + [ConditionalFact] + public void BatchingTopologicalSort_with_secondary_sort() + { + var vertexOne = new Vertex { Id = 1 }; + var vertexTwo = new Vertex { Id = 2 }; + var vertexThree = new Vertex { Id = 3 }; + var vertexFour = new Vertex { Id = 4 }; + + var edgeOne = new Edge { Id = 1 }; + var edgeTwo = new Edge { Id = 2 }; + + var graph = new Multigraph((v1, v2) => Comparer.Default.Compare(v1.Id, v2.Id)); + graph.AddVertices(new[] { vertexFour, vertexThree, vertexTwo, vertexOne }); + + // 1 -> {3} + graph.AddEdge(vertexOne, vertexThree, edgeOne); + // 2 -> {4} + graph.AddEdge(vertexTwo, vertexFour, edgeTwo); + + Assert.Equal( + new[] { vertexOne, vertexTwo, vertexThree, vertexFour }, + graph.BatchingTopologicalSort().Single().ToArray()); + } + + [ConditionalFact] + public void BatchingTopologicalSort_without_secondary_sort() + { + var vertexOne = new Vertex { Id = 1 }; + var vertexTwo = new Vertex { Id = 2 }; + var vertexThree = new Vertex { Id = 3 }; + var vertexFour = new Vertex { Id = 4 }; + + var edgeOne = new Edge { Id = 1 }; + var edgeTwo = new Edge { Id = 2 }; + + var graph = new Multigraph(); + graph.AddVertices(new[] { vertexFour, vertexThree, vertexTwo, vertexOne }); + + // 1 -> {3} + graph.AddEdge(vertexOne, vertexThree, edgeOne); + // 2 -> {4} + graph.AddEdge(vertexTwo, vertexFour, edgeTwo); + + Assert.Equal( + new[] { vertexTwo, vertexOne, vertexFour, vertexThree }, + graph.BatchingTopologicalSort().Single().ToArray()); + } + + [ConditionalFact] + public void BatchingTopologicalSort_with_batching_boundary_edge() + { + var vertexOne = new Vertex { Id = 1 }; + var vertexTwo = new Vertex { Id = 2 }; + var vertexThree = new Vertex { Id = 3 }; + var vertexFour = new Vertex { Id = 4 }; + + var edgeOne = new Edge { Id = 1 }; + var edgeTwo = new Edge { Id = 2 }; + + var graph = new Multigraph((v1, v2) => Comparer.Default.Compare(v1.Id, v2.Id)); + graph.AddVertices(new[] { vertexFour, vertexThree, vertexTwo, vertexOne }); + + // 1 -> {3} + graph.AddEdge(vertexOne, vertexThree, edgeOne, requiresBatchingBoundary: true); + // 2 -> {4} + graph.AddEdge(vertexTwo, vertexFour, edgeTwo); + + var batches = graph.BatchingTopologicalSort(); + + Assert.Collection( + batches, + b => Assert.Equal(new[] { vertexOne, vertexTwo }, b.ToArray()), + b => Assert.Equal(new[] { vertexThree, vertexFour }, b.ToArray())); + } + private static IMutableModel CreateModel() => new Model(); }