diff --git a/.openpublishing.redirection.json b/.openpublishing.redirection.json index 083fc4afb8..3b253e5a48 100644 --- a/.openpublishing.redirection.json +++ b/.openpublishing.redirection.json @@ -429,6 +429,11 @@ "source_path": "entity-framework/core/miscellaneous/configuring-dbcontext.md", "redirect_url": "/ef/core/dbcontext-configuration/index", "redirect_document_id": false + }, + { + "source_path": "entity-framework/core/miscellaneous/context-pooling.md", + "redirect_url": "/ef/core/performance/advanced-performance-topics#dbcontext-pooling", + "redirect_document_id": false } ] } diff --git a/.vscode/settings.json b/.vscode/settings.json index 349758fc3c..cfd3301d90 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -12,11 +12,16 @@ "cSpell.words": [ "LINQ", "dbcontext", + "denormalization", + "idempotency", "mitigations", "navigations", "parameterizable", "pluralizer", "queryable", + "resultset", + "resultsets", + "roundtrips", "savepoint", "savepoints", "subquery" diff --git a/entity-framework/core/miscellaneous/async.md b/entity-framework/core/miscellaneous/async.md index 56150cb51f..29e1a99882 100644 --- a/entity-framework/core/miscellaneous/async.md +++ b/entity-framework/core/miscellaneous/async.md @@ -18,6 +18,9 @@ For more information, see [the general C# asynchronous programming docs](/dotnet > [!WARNING] > EF Core doesn't support multiple parallel operations being run on the same context instance. You should always wait for an operation to complete before beginning the next operation. This is typically done by using the `await` keyword on each async operation. +> [!WARNING] +> The async implementation of [Microsoft.Data.SqlClient](https://github.com/dotnet/SqlClient) unfortunately has some known issues with async on .NET Core (e.g. [#593](https://github.com/dotnet/SqlClient/issues/593), [#601](https://github.com/dotnet/SqlClient/issues/601), and others). + > [!NOTE] > EF Core passes cancellation tokens down to the underlying database provider in use (e.g. Microsoft.Data.SqlClient). These tokens may or may not be honored - consult your database provider's documentation. diff --git a/entity-framework/core/miscellaneous/connection-resiliency.md b/entity-framework/core/miscellaneous/connection-resiliency.md index 7ab6f986d6..c52d8b9fea 100644 --- a/entity-framework/core/miscellaneous/connection-resiliency.md +++ b/entity-framework/core/miscellaneous/connection-resiliency.md @@ -28,6 +28,9 @@ public void ConfigureServices(IServiceCollection services) } ``` +> [!NOTE] +> Enabling retry on failure causes EF to internally buffer the resultset, which may significantly increase memory requirements for queries returning many results. See [buffering and streaming](core/miscellaneous/performance#buffering-and-streaming) for more details. + ## Custom execution strategy There is a mechanism to register a custom execution strategy of your own if you wish to change any of the defaults. diff --git a/entity-framework/core/miscellaneous/context-pooling.md b/entity-framework/core/miscellaneous/context-pooling.md deleted file mode 100644 index 2453756000..0000000000 --- a/entity-framework/core/miscellaneous/context-pooling.md +++ /dev/null @@ -1,39 +0,0 @@ ---- -title: DbContext Pooling -description: DbContext pooling in Entity Framework Core -author: rick-anderson -ms.author: riande -ms.date: 9/19/2020 -uid: core/miscellaneous/context-pooling ---- -# DbContext pooling - -`AddDbContextPool` enables pooling of `DbContext` instances. Context pooling can increase throughput in high-scale scenarios such as web servers by reusing context instances, rather than creating new instances for each request. - -The typical pattern in an ASP.NET Core app using EF Core involves registering a custom type into the [dependency injection](/aspnet/core/fundamentals/dependency-injection) container and obtaining instances of that type through constructor parameters in controllers or Razor Pages. Using constructor injection, a new context instance is created for each request. - - enables a pool of reusable context instances. To use context pooling, use the `AddDbContextPool` method instead of `AddDbContext` during service registration: - -```csharp -services.AddDbContextPool( - options => options.UseSqlServer(connectionString)); -``` - -When `AddDbContextPool` is used, at the time a context instance is requested, EF first checks if there is an instance available in the pool. Once the request processing finalizes, any state on the instance is reset and the instance is itself returned to the pool. - -This is conceptually similar to how connection pooling operates in ADO.NET providers and has the advantage of saving some of the cost of initialization of the context instance. - -The `poolSize` parameter of sets the maximum number of instances retained by the pool. Once `poolSize` is exceeded, new context instances are not cached and EF falls back to the non-pooling behavior of creating instances on demand. - -## Limitations - -Apps should be profiled and tested to show that context initialization is a significant cost. - -`AddDbContextPool` has a few limitations on what can be done in the `OnConfiguring` method of the context. - -> [!WARNING] -> Avoid using context pooling in apps that maintain state. For example, private fields in the context that shouldn't be shared across requests. EF Core only resets the state that it is aware of before adding a context instance to the pool. - -Context pooling works by reusing the same context instance across requests. This means that it's effectively registered as a [Singleton](/aspnet/core/fundamentals/dependency-injection#service-lifetimes) in terms of the instance itself so that it's able to persist. - -Context pooling is intended for scenarios where the context configuration, which includes services resolved, is fixed between requests. For cases where [Scoped](/aspnet/core/fundamentals/dependency-injection#service-lifetimes) services are required, or configuration needs to be changed, don't use pooling. The performance gain from pooling is usually negligible except in highly optimized scenarios. diff --git a/entity-framework/core/modeling/inheritance.md b/entity-framework/core/modeling/inheritance.md index 77b253396e..53203ac961 100644 --- a/entity-framework/core/modeling/inheritance.md +++ b/entity-framework/core/modeling/inheritance.md @@ -87,3 +87,6 @@ CREATE TABLE [RssBlogs] ( If you are employing bulk configuration you can retrieve the column name for a specific table by calling . [!code-csharp[Main](../../../samples/core/Modeling/FluentAPI/TPTConfiguration.cs?name=Metadata&highlight=10)] + +> [!WARNING] +> In many cases, TPT shows inferior performance when compared to TPH. [See the performance docs for more information](xref:core/miscellaneous/performance/modeling#inheritance-mapping). diff --git a/entity-framework/core/performance/_static/actualexecplan.png b/entity-framework/core/performance/_static/actualexecplan.png new file mode 100644 index 0000000000..ec3936f0eb Binary files /dev/null and b/entity-framework/core/performance/_static/actualexecplan.png differ diff --git a/entity-framework/core/performance/advanced-performance-topics.md b/entity-framework/core/performance/advanced-performance-topics.md new file mode 100644 index 0000000000..a16c9fbde3 --- /dev/null +++ b/entity-framework/core/performance/advanced-performance-topics.md @@ -0,0 +1,119 @@ +--- +title: Advanced Performance Topics +description: Advanced performance topics for Entity Framework Core +author: rick-anderson +ms.author: riande +ms.date: 12/9/2020 +uid: core/performance/advanced-performance-topics +--- +# Advanced Performance Topics + +## DbContext pooling + +`AddDbContextPool` enables pooling of `DbContext` instances. Context pooling can increase throughput in high-scale scenarios such as web servers by reusing context instances, rather than creating new instances for each request. + +The typical pattern in an ASP.NET Core app using EF Core involves registering a custom type into the [dependency injection](/aspnet/core/fundamentals/dependency-injection) container and obtaining instances of that type through constructor parameters in controllers or Razor Pages. Using constructor injection, a new context instance is created for each request. + + enables a pool of reusable context instances. To use context pooling, use the `AddDbContextPool` method instead of `AddDbContext` during service registration: + +```csharp +services.AddDbContextPool( + options => options.UseSqlServer(connectionString)); +``` + +When `AddDbContextPool` is used, at the time a context instance is requested, EF first checks if there is an instance available in the pool. Once the request processing finalizes, any state on the instance is reset and the instance is itself returned to the pool. + +This is conceptually similar to how connection pooling operates in ADO.NET providers and has the advantage of saving some of the cost of initialization of the context instance. + +The `poolSize` parameter of sets the maximum number of instances retained by the pool. Once `poolSize` is exceeded, new context instances are not cached and EF falls back to the non-pooling behavior of creating instances on demand. + +### Limitations + +Apps should be profiled and tested to show that context initialization is a significant cost. + +`AddDbContextPool` has a few limitations on what can be done in the `OnConfiguring` method of the context. + +> [!WARNING] +> Avoid using context pooling in apps that maintain state. For example, private fields in the context that shouldn't be shared across requests. EF Core only resets the state that it is aware of before adding a context instance to the pool. + +Context pooling works by reusing the same context instance across requests. This means that it's effectively registered as a [Singleton](/aspnet/core/fundamentals/dependency-injection#service-lifetimes) in terms of the instance itself so that it's able to persist. + +Context pooling is intended for scenarios where the context configuration, which includes services resolved, is fixed between requests. For cases where [Scoped](/aspnet/core/fundamentals/dependency-injection#service-lifetimes) services are required, or configuration needs to be changed, don't use pooling. The performance gain from pooling is usually negligible except in highly optimized scenarios. + +## Query caching and parameterization + +When EF receives a LINQ query tree for execution, it must first "compile" that tree into a SQL query. Because this is a heavy process, EF caches queries by the query tree *shape*: queries with the same structure reuse internally-cached compilation outputs, and can skip repeated compilation. The different queries may still reference different *values*, but as long as these values are properly parameterized, the structure is the same and caching will function properly. + +Consider the following two queries: + +```csharp +var blog1 = ctx.Blogs.FirstOrDefault(b => b.Name == "blog1"); +var blog2 = ctx.Blogs.FirstOrDefault(b => b.Name == "blog2"); +``` + +Since the expression trees contains different constants, the expression tree differs and each of these queries will be compiled separately by EF Core. In addition, each query produces a slightly different SQL command: + +```sql +SELECT TOP(1) [b].[Id], [b].[Name] +FROM [Blogs] AS [b] +WHERE [b].[Name] = N'blog1' + +SELECT TOP(1) [b].[Id], [b].[Name] +FROM [Blogs] AS [b] +WHERE [b].[Name] = N'blog2' +``` + +Because the SQL differs, your database server will likely also need to produce a query plan for both queries, rather than reusing the same plan. + +A small modification to your queries can change things considerably: + +```csharp +var blogName = "blog1"; +var blog1 = ctx.Blogs.FirstOrDefault(b => b.Name == blogName); +blogName = "blog2"; +var blog2 = ctx.Blogs.FirstOrDefault(b => b.Name == blogName); +``` + +Since the blog name is now *parameterized*, both queries have the same tree shape, and EF only needs to be compiled once. The SQL produced is also parameterized, allowing the database to reuse the same query plan: + +```sql +SELECT TOP(1) [b].[Id], [b].[Name] +FROM [Blogs] AS [b] +WHERE [b].[Name] = @__blogName_0 +``` + +Note that there is no need to parameterize each and every query: it's perfectly fine to have some queries with constants, and indeed, databases (and EF) can sometimes perform certain optimization around constants which aren't possible when the query is parameterized. See the section on [dynamically-constructed queries](#dynamically-constructed-queries) for an example where proper parameterization is crucial. + +> [!NOTE] +> EF Core's [event counters](xref:core/logging-events-diagnostics/event-counters) report the Query Cache Hit Rate. In a normal application, this counter reaches 100% soon after program startup, once most queries have executed at least once. If this counter remains stable below 100%, that is an indication that your application may be doing something which defeats the query cache - it's a good idea to investigate that. + +> [!NOTE] +> How the database manages caches query plans is database-dependent. For example, SQL Server implicitly maintains an LRU query plan cache, whereas PostgreSQL does not (but prepared statements can produce a very similar end effect). Consult your database documentation for more details. + +## Dynamically-constructed queries + +In some situations, it is necessary to dynamically construct LINQ queries rather than specifying them outright in source code. This can happen, for example, in a website which receives arbitrary query details from a client, with open-ended query operators (sorting, filtering, paging...). In principle, if done correctly, dynamically-constructed queries can be just as efficient as regular ones (although it's not possible to use the [compiled query]() optimization with dynamic queries). In practice, however, they are frequently the source of performance issues, since it's easy to accidentally produce expression trees with shapes that differ every time. + +The following example uses two techniques to dynamically construct a query; we add a Where operator to the query only if the given parameter is not null. Note that this isn't a good use case for dynamically constructing a query - but we're using it for simplicity: + +### [With constant](#tab/with-constant) + +[!code-csharp[Main](../../../samples/core/Benchmarks/DynamicallyConstructedQueries.cs?name=WithConstant&highlight=14-24)] + +### [With parameter](#tab/with-parameter) + +[!code-csharp[Main](../../../samples/core/Benchmarks/DynamicallyConstructedQueries.cs?name=WithParameter&highlight=14)] + +*** + +Benchmarking these two techniques gives the following results: + +| Method | Mean | Error | StdDev | Gen 0 | Gen 1 | Gen 2 | Allocated | +|-------------- |-----------:|---------:|----------:|--------:|-------:|------:|----------:| +| WithConstant | 1,096.7 us | 12.54 us | 11.12 us | 13.6719 | 1.9531 | - | 83.91 KB | +| WithParameter | 570.8 us | 42.43 us | 124.43 us | 5.8594 | - | - | 37.16 KB | + +Even if the sub-millisecond difference seems small, keep in mind that the constant version continuously pollutes the cache and causes other queries to be re-compiled, slowing them down as well. + +> [!NOTE] +> Avoid constructing queries with the expression tree API unless you really need to. Aside from the API's complexity, it's very easy to inadvertently cause significant performance issues when using them. diff --git a/entity-framework/core/performance/efficient-querying.md b/entity-framework/core/performance/efficient-querying.md new file mode 100644 index 0000000000..80aacfb3a0 --- /dev/null +++ b/entity-framework/core/performance/efficient-querying.md @@ -0,0 +1,249 @@ +--- +title: Efficient Querying - EF Core +description: Performance guide for efficient querying using Entity Framework Core +author: roji +ms.date: 12/1/2020 +uid: core/miscellaneous/efficient-querying +--- +# Efficient Querying + +Querying efficiently is a vast subject, that covers subjects as wide-ranging as indexes, related entity loading strategies, and many others. This section details some common themes for making your queries faster, and pitfalls users typically encounter. + +## Use indexes properly + +The main deciding factor in whether a query runs fast or not is whether it will properly utilize indexes where appropriate: databases are typically used to hold large amounts of data, and queries which traverse entire tables are typically sources of serious performance issues. Indexing issues aren't easy to spot, because it isn't immediately obvious whether a given query will use an index or not. For example: + +```csharp +_ = ctx.Blogs.Where(b => b.Name.StartsWith("A")).ToList(); // Uses an index defined on Name on SQL Server +_ = ctx.Blogs.Where(b => b.Name.EndsWith("B")).ToList(); // Does not use the index +``` + +The main way the spot indexing issues is to first pinpoint a slow query, and then examine its query plan via your database's favorite tool; see the [performance diagnosis](xref:core/miscellaneous/performance-diagnosis) page for more information on how to do that. The query plan displays whether the query traverses the entire table, or uses an index. + +As a general rule, there isn't any special EF knowledge to using indexes or diagnosing performance issues related to them; general database knowledge related to indexes is just as relevant to EF applications as to applications not using EF. The following lists some general guidelines to keep in mind when using indexes: + +* While indexes speed up queries, they also slow down updates since they need to be kept up-to-date. Avoid defining indexes which aren't needed, and consider using [index filters](core/modeling/indexes#index-filter) to limit the index to a subset of the rows, thereby reducing this overhead. +* Composite indexes can speed up queries which filter on multiple columns, but they can also speed up queries which don't filter on all the index's columns - depending on ordering. For example, an index on columns A and B speed up queries filtering by A and B, as well as queries filtering only by A, but it does not speed up queries filtering over only by B. +* If a query filters by an expression over a column (e.g. `price / 2`), a simple index cannot be used. However, you can define a [stored persisted column](xref:core/modeling/generated-properties#computed-columns) for your expression, and create an index over that. Some database also support expression indexes, which can be directly used to speed up queries filtering by any expression. +* Different databases allow indexes to be configured in various ways, and in many cases EF Core providers expose these via the Fluent API. For example, the SQL Server provider allows you to configure whether an index is [clustered](xref:core/providers/sql-server/indexes#clustering), or set its [fill factor](xref:core/providers/sql-server/indexes#fill-factor). Consult your provider's documentation for more information. + +## Project only properties you need + +EF Core makes it very easy to query out entity instances, and then use those instances in code. However, querying entity instances can frequently pull back more data than necessary from your database. Consider the following: + +```csharp +foreach (var blog in ctx.Blogs) +{ + Console.WriteLine("Blog: " + blog.Url); +} +``` + +Although this code only actually needs each Blog's `Url` property, the entire Blog entity is fetched, and unneeded columns are transferred from the database: + +```sql +SELECT [b].[BlogId], [b].[CreationDate], [b].[Name], [b].[Rating], [b].[Url] +FROM [Blogs] AS [b] +``` + +This can be optimized by using `Select` to tell EF which columns to project out: + +```csharp +foreach (var blogName in ctx.Blogs.Select(b => b.Url)) +{ + Console.WriteLine("Blog: " + blogName); +} +``` + +The resulting SQL pulls back only the needed columns: + +```csharp +SELECT [b].[Url] +FROM [Blogs] AS [b] +``` + +If you need to project out more than one column, project out to a C# anonymous type with the properties you want. + +Note that this technique is very useful for read-only queries, but things get more complicated if you need to *update* the fetched blogs, since EF's change tracking only works with entity instances. It's possible to perform updates without loading entire entities by attaching a modified Blog instance and telling EF which properties have changed, but that is a more advanced technique that may not be worth it. + +## Limit the resultset size + +By default, a query returns all rows that matches its filters: + +```csharp +var blogs = ctx.Blogs + .Where(b => b.Name.StartsWith("A")) + .ToList(); +``` + +Since the number of rows returned depends on actual data in your database, it's impossible to know how much data will be loaded from the database, how much memory will be taken up by the results, and how much additional load will be generated when processing these results (e.g. by sending them to a user browser over the network). Crucially, test databases frequently contain little data, so that everything works well while testing, but performance problems suddenly appear when the query starts running on real-world data and many rows are returned. + +As a result, it's usually worth giving thought to limiting the number of results: + +```csharp +var blogs = ctx.Blogs + .Where(b => b.Name.StartsWith("A")) + .Take(25) + .ToList(); +``` + +At a minimum, your UI could show a message indicating that more rows may exist in the database (and allow retrieving them in some other manner). A full-blown solution would implement *paging*, where your UI only shows a certain number of rows at a time, and allow users to advance to the next page as needed; this typically combines the and operators to select a specific range in the resultset each time. + +## Avoid cartesian explosion when loading related entities + +In relational databases, all related entities are loaded by introducing JOINs in single query. + +```sql +SELECT [b].[BlogId], [b].[OwnerId], [b].[Rating], [b].[Url], [p].[PostId], [p].[AuthorId], [p].[BlogId], [p].[Content], [p].[Rating], [p].[Title] +FROM [Blogs] AS [b] +LEFT JOIN [Post] AS [p] ON [b].[BlogId] = [p].[BlogId] +ORDER BY [b].[BlogId], [p].[PostId] +``` + +If a typical blog has multiple related posts, rows for these posts will duplicate the blog's information. This duplication leads to the so-called "cartesian explosion" problem. As more one-to-many relationships are loaded, the amount of duplicated data may grow and adversely affect the performance of your application. + +EF allows avoiding this effect via the use of "split queries", which load the related entities via separate queries. For more information, read [the documentation on split and single queries](xref:core/querying/single-split-queries). + +> [!NOTE] +> The current implementation of [split queries](xref:core/querying/single-split-queries) executes a roundtrip for each query. We plan to improve this in the future, and execute all queries in a single roundtrip. + +## Load related entities eagerly when possible + +It's recommended to read [the dedicated page on related entities](xref:core/querying/related-data) before continuing with this section. + +When dealing with related entities, we usually know in advance what we need to load: a typical example would be loading a certain set of Blogs, along with all their Posts. In these scenarios, it is always better to use [eager loading](xref:core/querying/related-data/eager), so that EF can fetch all the required data in one roundtrip. The [filtered include](xref:core/querying/related-data/eager#filtered-include) feature, introduced in EF Core 5.0, also allows you to limit which related entities you'd like to load, while keeping the loading process eager and therefore doable in a single roundtrip. + +In other scenarios, we may not know which related entity we're going to need before we get its principal entity. For example, when loading some Blog, we may need to consult some other data source - possibly a webservice - in order to know whether we're interested in that Blog's Posts. In these cases, [explicit](xref:core/querying/related-data/explicit) or [lazy](xref:core/querying/related-data/lazy) loading can be used to fetch related entities separately, and populate the Blog's Posts navigation. Note that since these methods aren't eager, they require additional roundtrips to the database, which is source of slowdown; depending on your specific scenario, it may be more efficient to just always load all Posts, rather than to execute the additional roundtrips and selectively get only the Posts you need. + +### Beware of lazy loading + +[Lazy loading](xref:core/querying/related-data/lazy) often seems like a very useful way to write database logic, since EF Core automatically loads related entities from the database as they are accessed by your code. This avoids loading related entities that aren't needed (like [explicit loading](xref:core/querying/related-data/explicit)), and seemingly frees the programmer from having to deal with related entities altogether. However, lazy loading is particularly prone for producing unneeded extra roundtrips which can slow the application. + +Consider the following: + +```csharp +foreach (var blog in ctx.Blogs.ToList()) +{ + foreach (var post in blog.Posts) + { + Console.WriteLine($"Blog {blog.Url}, Post: {post.Title}"); + } +} +``` + +This seemingly innocent piece of code iterates through all the blogs and their posts, printing them out. Turning on EF Core's [statement logging](xref:core/logging-events-diagnostics/index) reveals the following: + +```console +info: Microsoft.EntityFrameworkCore.Database.Command[20101] + Executed DbCommand (1ms) [Parameters=[], CommandType='Text', CommandTimeout='30'] + SELECT [b].[BlogId], [b].[Rating], [b].[Url] + FROM [Blogs] AS [b] +info: Microsoft.EntityFrameworkCore.Database.Command[20101] + Executed DbCommand (5ms) [Parameters=[@__p_0='1'], CommandType='Text', CommandTimeout='30'] + SELECT [p].[PostId], [p].[BlogId], [p].[Content], [p].[Title] + FROM [Post] AS [p] + WHERE [p].[BlogId] = @__p_0 +info: Microsoft.EntityFrameworkCore.Database.Command[20101] + Executed DbCommand (1ms) [Parameters=[@__p_0='2'], CommandType='Text', CommandTimeout='30'] + SELECT [p].[PostId], [p].[BlogId], [p].[Content], [p].[Title] + FROM [Post] AS [p] + WHERE [p].[BlogId] = @__p_0 +info: Microsoft.EntityFrameworkCore.Database.Command[20101] + Executed DbCommand (1ms) [Parameters=[@__p_0='3'], CommandType='Text', CommandTimeout='30'] + SELECT [p].[PostId], [p].[BlogId], [p].[Content], [p].[Title] + FROM [Post] AS [p] + WHERE [p].[BlogId] = @__p_0 + +... and so on +``` + +What's going on here? Why are all these queries being sent for the simple loops above? With lazy loading, a Blog's Posts are only (lazily) loaded when its Posts property is accessed; as a result, each iteration in the inner foreach triggers an additional database query, in its own roundtrip. As a result, after the initial query loading all the blogs, we then have another query *per blog*, loading all its posts; this is sometimes called the *N+1* problem, and it can cause very significant performance issues. + +Assuming we're going to need all of the blogs' posts, it makes sense to use eager loading here instead. We can use the [Include](xref:core/querying/related-data/eager#eager-loading) operator to perform the loading, but since we only need the Blogs' URLs (and we should only [load what's needed](xref:core/miscellaneous/writing-efficient-queries#select-only-properties-you-need)). So we'll use a projection instead: + +```csharp +foreach (var blog in ctx.Blogs.Select(b => new { b.Url, b.Posts }).ToList()) +{ + foreach (var post in blog.Posts) + { + Console.WriteLine($"Blog {blog.Url}, Post: {post.Title}"); + } +} +``` + +This will make EF Core fetch all the Blogs - along with their Posts - in a single query. In some cases, it may also be useful to avoid cartesian explosion effects by using [split queries](). + +> [!WARNING] +> Because lazy loading makes it extremely easy to inadvertently trigger the N+1 problem, it is recommended to avoid it. Eager or explicit loading make it very clear in the source code when a database roundtrip occurs. + +## Buffering and streaming + +Buffering refers to loading all your query results into memory, whereas streaming means that that EF hands the application a single result each time, never containing the entire resultset in memory. In principle, the memory requirements of a streaming query are fixed - they are the same whether the query returns 1 row or 1000; a buffering query, on the other hand, requires more memory the more rows are returned. For queries that result large resultsets, this can be an important performance factor. + +Whether a query buffers or streams depends on how it is evaluated: + +```csharp +// ToList and ToArray cause the entire resultset to be buffered: +var blogsList = context.Blogs.Where(b => b.Name.StartsWith("A")).ToList(); +var blogsArray = context.Blogs.Where(b => b.Name.StartsWith("A")).ToArray(); + +// Foreach streams, processing one row at a time: +foreach (var blog in context.Blogs.Where(b => b.Name.StartsWith("A"))) +{ + // ... +} + +// AsEnumerable also streams, allowing you to execute LINQ operators on the client-side: +var groupedBlogs = context.Blogs + .Where(b => b.Name.StartsWith("A")) + .AsEnumerable() + .Where(b => SomeDotNetMethod(b)); +``` + +If your queries return just a few results, then you probably don't have to worry about this. However, if your query might return large numbers of rows, it's worth giving thought to streaming instead of buffering. + +> [!NOTE] +> Avoid using or if you intend to use another LINQ operator on the result - this will needlessly buffer all results into memory. Use instead. + +### Internal buffering by EF + +In certain situations, EF will itself buffer the resultset internally, regardless of how you evaluate your query. The two cases where this happens are: + +* When a retrying execution strategy is in place. This is done to make sure the same results are returned if the query is retried later. +* When [split query](xref:core/querying/single-split-queries) is used, the resultsets of all but the last query are buffered - unless MARS is enabled on SQL Server. This is because it is usually impossible to have multiple query resultsets active at the same time. + +Note that this internal buffering occurs in addition to any buffering you cause via LINQ operators. For example, if you use on a query and a retrying execution strategy is in place, the resultset is loaded into memory *twice*: once internally by EF, and once by . + +## Tracking, no-tracking and identity resolution + +It's recommended to read [the dedicated page on tracking and no-tracking](xref:core/querying/tracking) before continuing with this section. + +EF tracks entity instances by default, so that changes on them are detected and persisted when is called. Another effect of tracking queries is that EF detects if an instance has already been loaded for your data, and will automatically return that tracked instance rather than returning a new one; this is called *identity resolution*. From a performance perspective, change tracking means the following: + +* EF internally maintains a dictionary of tracked instances. When new data is loaded, EF checks the dictionary to see if an instance is already tracked for that entity's key (identity resolution). The dictionary maintenance and lookups take up some time when loading the query's results. +* Before handing a loaded instance to the application, EF *snapshots* that instance and keeps the snapshot internally. When is called, the application's instance is compared with the snapshot to discover the changes to be persisted. The snapshot takes up more memory, and the snapshotting process itself takes time; it's sometimes possible specify different, possibly more efficient snapshotting behavior via [value comparers](), or to use [change-tracking proxies]() to bypass the snapshotting process altogether (though that comes with its own set of disadvantages). + +In read-only scenarios where changes aren't saved back to the database, the above overheads can be avoided by using [no-tracking queries](xref:core/querying/tracking#no-tracking-queries). However, since no-tracking queries do not perform identity resolution, a database row which is referenced by multiple other loaded rows will be materialized as as different instances, taking up more memory. + +To illustrate, assume we are loading a large number of Posts from the database, as well as the Blog referenced by each Post. If 100 Posts happen to reference the same Blog, a tracking query detects this via identity resolution, and all Post instances will refer the same de-duplicated Blog instance. A no-tracking query, in contrast, duplicates the same Blog 100 times, resulting in increased memory requirements and GC pressure. + +Finally, it is possible to perform updates without the overhead of change tracking, by utilizing a no-tracking query and then attaching the returned instance to the context, specifying which changes are to be made. This transfers the burden of change tracking from EF to the user, and should only be attempted if the change tracking overhead has been shown to be unacceptable via profiling or benchmarking. + +## Using raw SQL + +In some cases, more optimized SQL exists for your query, which EF does not generate. This can happen when the SQL construct is an extension specific to your database that's unsupported, or simply because EF does not translate to it yet. In these cases, writing SQL by hand can provide a substantial performance boost, and EF supports several ways to do this. + +* Use raw SQL [directly in your query](xref:core/querying/raw-sql), e.g. via . EF even lets you compose over the raw SQL with regular LINQ queries, allowing you to express only a part of the query in raw SQL. This is a good technique when the raw SQL only needs to be used in a single query in your codebase. +* Define a [user-defined function]() (UDF), and then call that from your queries. Note that since 5.0, EF allows UDFs to return full resultsets - these are known as table-valued functions (TVFs) - and also allows mapping a `DbSet` to a function, making it look just like just another table. +* Define a database view and query from it in your queries. Note that unlike functions, views cannot accept parameters. + +> [!NOTE] +> Raw SQL should generally be used as a last resort, after making sure that EF can't generate the SQL you want, and when performance is important enough for the given query to justify it. Using raw SQL brings considerable maintenance disadvantages. + +## Asynchronous programming + +As a general rule, in order for your application to be scalable, it's important to always use asynchronous APIs rather than synchronous one (e.g. rather than ). Synchronous APIs block the thread for the duration of database I/O, increasing the need for threads and the number of thread context switches that must occur. + +For more information, see the page on [async programming](core/miscellaneous/async). + +> [!WARNING] +> Avoid mixing synchronous and asynchronous code in the same application - it's very easy to inadvertently trigger subtle thread-pool starvation issues. diff --git a/entity-framework/core/performance/efficient-updating.md b/entity-framework/core/performance/efficient-updating.md new file mode 100644 index 0000000000..c81a23e744 --- /dev/null +++ b/entity-framework/core/performance/efficient-updating.md @@ -0,0 +1,65 @@ +--- +title: Efficient Updating - EF Core +description: Performance guide for efficient updating using Entity Framework Core +author: roji +ms.date: 12/1/2020 +uid: core/miscellaneous/efficient-updating +--- +# Efficient Updating + +## Batching + +EF Core helps minimize roundtrips by automatically batching together all updates in a single roundtrip. Consider the following: + +```csharp +var blog = context.Blogs.Single(b => b.Name == "EF Core Blog"); +blog.Url = "http://some.new.website"; +context.Add(new Blog { Name = "Another blog"}); +context.Add(new Blog { Name = "Yet another blog"}); +context.SaveChanges(); +``` + +The above loads a blog from the database, changes its name, and then adds two new blogs; to apply this, two SQL INSERT statements and one UPDATE statement are sent to the database. Rather than sending them one by one, as Blog instances are added, EF Core tracks these changes internally, and executes them in a single roundtrip when is called. + +The number of statements that EF batches in a single roundtrip depends on the database provider being used. For example, performance analysis has shown batching to be generally less efficient for SQL Server when less than 4 statements are involved. Similarly, the benefits of batching degrade after around 40 statements for SQL Server, so EF Core will by default only execute up to 42 statements in a single batch, and execute additional statements in separate roundtrips. + +Users can also tweak these thresholds to achieve potentially higher performance - but benchmark carefully before modifying these: + +```csharp +protected override void OnConfiguring(DbContextOptionsBuilder optionsBuilder) + => optionsBuilder.UseSqlServer(@"...", o => o + .MinBatchSize(1) + .MaxBatchSize(100)) +``` + +## Bulk updates + +Let's assume you want to give all Employees of a certain department a raise. A typical implementation for this in EF Core would look like the following + +```csharp +foreach (var employee in context.Employees.Where(e => e.Department.Id == 10)) +{ + employee.Salary += 1000; +} +context.SaveChanges(); +``` + +While this is perfectly valid code, let's analyze what it does from a performance perspective: + +* A database roundtrip is performed, to load all the relevant employees; note that this brings all the Employees' row data to the client, even if only the salary will be needed. +* EF Core's change tracking creates snapshots when loading the entities, and then compares those snapshots to the instances to find out which properties changed. +* A second database roundtrip is perform to save all the changes. While all changes are done in a single roundtrip thanks to batching, EF Core still sends an UPDATE statement per employee, which must be executed by the database. + +Relational databases also support *bulk updates*, so the above could be rewritten as the following single SQL statement: + +```sql +UPDATE [Employees] SET [Salary] = [Salary] + 1000 WHERE [DepartmentId] = 10; +``` + +This performs the entire operation in a single roundtrip, without loading or sending any actual data to the database, and without making use of EF's change tracking machinery, which does have an overhead cost. + +Unfortunately, EF doesn't currently provide APIs for performing bulk updates. Until these are introduced, you can use raw SQL to perform the operation where performance is sensitive: + +```csharp +context.Database.ExecuteSqlRaw("UPDATE [Employees] SET [Salary] = [Salary] + 1000 WHERE [DepartmentId] = {0}", departmentId); +``` diff --git a/entity-framework/core/performance/index.md b/entity-framework/core/performance/index.md new file mode 100644 index 0000000000..40dc548006 --- /dev/null +++ b/entity-framework/core/performance/index.md @@ -0,0 +1,44 @@ +--- +title: Introduction to Performance - EF Core +description: Performance guide for efficiently using Entity Framework Core +author: roji +ms.date: 12/1/2020 +uid: core/miscellaneous/performance/index +--- +# Introduction to Performance + +Database performance is a vast and complex topic, spanning an entire stack of components: the database, networking, the database driver, and data access layers such as EF Core. While high-level layers and O/RMs such as EF Core considerably simplify application development and improve maintainability, they can sometimes be opaque, hiding performance-critical internal details such as the SQL being executed. This section attempts to provide an overview of how to achieve good performance with EF Core, and how to avoid common pitfalls which can degrade application performance. + +## Identify bottlenecks and measure, measure, measure + +As always with performance, it's important not to rush into optimization without data showing a problem; as the great Donald Knuth once said, "Premature optimization is the root of all evil". The [performance diagnosis](xref:core/miscellaneous/performance-diagnosis) section discusses various ways to understand where your application is spending time in database logic, and how to pinpoint specific problematic areas. Once a slow query has been identified, solutions can be considered: is your database missing an index? Should you try out other querying patterns? + +Always benchmark your code and possible alternatives yourself - the performance diagnosis section contains a sample benchmark with BenchmarkDotNet, which you can use as a template for your own benchmarks. Don't assume that general, public benchmarks apply as-is to your specific use-case; a variety of factors such as database latency, query complexity and actual data amounts in your tables can have a profound effect on which solution is best. For example, many public benchmarks are carried out in ideal networking conditions, where latency to the database is almost zero, and with extremely light queries which hardly require any processing (or disk I/O) on the database side. While these are valuable for comparing the runtime overheads of different data access layers, the differences they reveal usually prove to be negligible in a real-world application, where the database performs actual work and latency to the database is a significant perf factor. + +## Aspects of data access performance + +Overall data access performance can be broken down into the following broad categories: + +### Pure database performance + +With relational database, EF translates the application's LINQ queries into the SQL statements getting executed by the database; these SQL statements themselves can run more or less efficiently. The right index in the right place can make a world of difference in SQL performance, or rewriting your LINQ query may make EF generate a better SQL query. + +### Network data transfer + +As with any networking system, it's important to limit the amount of data going back and forth on the wire. This covers making sure that you only send and load data which you're actually going to need, but also avoiding the so-called "cartesian explosion" effect when loading related entities. + +### Network roundtrips + +Beyond the amount of data going back and forth, the network roundtrips, since the time taken for a query to execute in the database can be dwarfed by the time packets travel back and forth between your application and your database. Roundtrip overhead heavily depends on your environment; the further away your database server is, the high the latency and the costlier each roundtrip. With the advent of the cloud, applications increasingly find themselves further away from the database, and "chatty" applications which perform too many roundtrips experience degraded performance. Therefore, it's important to understand exactly when your application contacts the database, how many roundtrips it performs, and whether that number can be minimized them. + +### EF runtime overhead + +Finally, EF itself adds some runtime overhead to database operations: EF needs to compile your queries from LINQ to SQL (although that should normally be done only once), change tracking adds some overhead (but can be disabled), etc. In practice, the EF overhead for real-world applications is likely to be negligible in most cases, as query execution time in the database and network latency dominate the total time; but it's important to understand what your options are and how to avoid some pitfalls. + +## Know what's happening under the hood + +EF allows developers to concentrate on business logic by generating SQL, materializing results, and performing other tasks. Like any layer or abstraction, it also tends to hide what's happening under-the-hood, such as the actual SQL queries being executed. Performance isn't necessarily a critical aspect of every application out there, but in applications where it is, it is vital that the developer understand what EF is doing for them: inspect outgoing SQL queries, follow roundtrips to make sure the N+1 problem isn't occurring, etc. + +## Cache outside the database + +Finally, the most efficient way to interact with a database, is to not interact with it at all. In other words, if database access shows up as a performance bottleneck in your application, it may be worthwhile to cache certain results outside of the database, so as to minimize requests. Although caching adds complexity, it is an especially crucial part of any scalable application: while the application tier can be easy scaled by adding additional servers to handle increased load, scaling the database tier is usually far more complicated. diff --git a/entity-framework/core/performance/modeling-for-performance.md b/entity-framework/core/performance/modeling-for-performance.md new file mode 100644 index 0000000000..9e54f94ae7 --- /dev/null +++ b/entity-framework/core/performance/modeling-for-performance.md @@ -0,0 +1,60 @@ +--- +title: Performance Guide - EF Core +description: Performance guide for efficiently using Entity Framework Core +author: roji +ms.date: 12/1/2020 +uid: core/miscellaneous/performance/modeling +--- +# Modeling for Performance + +In many cases, the way you model can have a profound impact on the performance of your application; while a properly normalized and "correct" model is usually a good starting point, in real-world applications some pragmatic compromises can go a long way for achieving good performance. Since it's quite difficult to change your model once an application is running in production, it's worth keeping performance in mind when creating the initial model. + +## Denormalization and caching + +*Denormalization* is the practice of adding redundant data to your schema, usually in order to eliminate joins when querying. For example, for a model with Blogs and Posts, where each Post has a Rating, you may be required to frequently show the average rating of the Blog. The simple approach to this would group the Posts by their Blog, and calculate the average as part of the query; but this requires a costly join between the two tables. Denormalization would add the calculated average of all posts to a new column on Blog, so that it is immediately accessible, without joining or calculating. + +The above can be viewed as a form of *caching* - aggregate information from the Posts is cached on their Blog; and like with any caching, the problem is how to keep the cached value up to date with the data it's caching. In many cases, it's OK for the cached data to lag for a bit; for example, in the example above, it's usually reasonable for the blog's average rating to not be completely up to date at any given point. If that's the case, you can have it recalculated every now and then; otherwise, a more elaborate system must be set up to keep the cached values up to date. + +The following details some techniques for denormalization and caching in EF Core, and points to the relevant sections in the documentation. + +### Stored computed columns + +If the data to be cached is a product of other columns in the same table, then a [stored computed column](xref:core/modeling/generated-properties#computed-columns) can be a perfect solution. For example, a `Customer` may have `FirstName` and `LastName` columns, but we may need to search by the customer's *full name*. A stored computed column is automatically maintained by the database - which recalculates it whenever the row is changed - and you can even define an index over it to speed up queries. + +### Update cache columns when inputs change + +If your cached column needs to reference inputs from outside the table's row, you cannot use computed columns. However, it is still possible to recalculate the column whenever its input changes; for example, you could recalculate the average Blog's rating every time a Post is changed, added or removed. Be sure to identify the exact conditions when recalculation is needed, otherwise your cached value will go out of sync. + +One way to do this, is to perform the update yourself, via the regular EF Core API. `SaveChanges` [Events](xref:core/logging-events-diagnostics/events) or [interceptors](xref:core/logging-events-diagnostics/interceptors#savechanges-interception) can be used to automatically check if any Posts are being updated, and to perform the recalculation that way. Note that this typically entails additional database roundtrips, as additional commands must be sent. + +For more perf-sensitive applications, database triggers can be defined to automatically perform the recalculation in the database. This saves the extra database roundtrips, automatically occurs within the same transaction as the main update, and can be simpler to set up. EF doesn't provide any specific API for creating or maintaining triggers, but it's perfectly fine to [create an empty migration and add the trigger definition via raw SQL](xref:core/managing-schemas/migrations/managing#arbitrary-changes-via-raw-sql). + +### Materialized views + +Materialized views are similar to regular views, except that their data is stored on disk ("materialized"), rather than calculated every time when the view is queried. This tool is useful when you don't want to simply add a single cache column to an existing database, but rather want to cache the entire resultset of a complicated and expensive query's results, just as if it were a regular table; these results can then be queried very cheaply without any computation or joins happening. Unlike computed columns, materialized views aren't automatically updated when their underlying tables change - they must be manually refreshed. If the cached data can lag, refreshing the view can be done via a timer; another option is to set up database triggers to review a materialized view once certain database events occur. + +EF doesn't currently provide any specific API for creating or maintaining views, materialized or otherwise; but it's perfectly fine to [create an empty migration and add the view definition via raw SQL](xref:core/managing-schemas/migrations/managing#arbitrary-changes-via-raw-sql). + +## Inheritance mapping + +It's recommended to read [the dedicated page on inheritance](xref:core/modeling/inheritance) before continuing with this section. + +EF Core currently supports two techniques for mapping an inheritance model to a relational database: + +* **Table-per-hierarchy** (TPH), in which an entire .NET hierarchy of classes is mapped to a single database table +* **Table-per-type** (TPT), in which each type in the .NET hierarchy is mapped to a different table in the database. + +The choice of inheritance mapping technique can have a considerable impact on application performance - it's recommended to carefully measure before committing to a choice. + +People sometimes choose TPT because it appears to be the "cleaner" technique; a separate table for each .NET type makes the database schema look similar to the .NET type hierarchy. In addition, since TPH must represent the entire hierarchy in a single table, rows have *all* columns regardless of the type actually being held in the row, and unrelated columns are always empty and unused. Aside from seeming to be an "unclean" mapping technique, many believe that these empty columns take up considerable space in the database and may hurt performance as well. + +However, measuring shows that TPT is in most cases the inferior mapping technique from a performance standpoint; where all data in TPH comes from a single table, TPT queries must join together multiple tables, and joins are one of the primary sources of performance issues in relational databases. Databases also generally tend to deal well with empty columns, and features such as [SQL Server sparse columns](/sql/relational-databases/tables/use-sparse-columns) can reduce this overhead even further. + +For a concrete example, [see this benchmark]() which sets up a simple model with a 7-type hierarchy; 5000 rows are seeded for each type - totalling 35000 rows - and the benchmark simply loads all rows from the database: + +| Method | Mean | Error | StdDev | Gen 0 | Gen 1 | Gen 2 | Allocated | +|------- |---------:|--------:|--------:|----------:|----------:|----------:|----------:| +| TPH | 132.3 ms | 2.29 ms | 2.03 ms | 8000.0000 | 3000.0000 | 1250.0000 | 44.49 MB | +| TPT | 201.3 ms | 3.32 ms | 3.10 ms | 9000.0000 | 4000.0000 | - | 61.84 MB | + +As can be seen, TPH is considerably more efficient than TPT. Note that actual results always depend on the specific query being executed, so other queries may show a different performance gap; you're encouraged to use this benchmark code as a template for testing other queries. diff --git a/entity-framework/core/performance/performance-diagnosis.md b/entity-framework/core/performance/performance-diagnosis.md new file mode 100644 index 0000000000..f7833466f1 --- /dev/null +++ b/entity-framework/core/performance/performance-diagnosis.md @@ -0,0 +1,151 @@ +--- +title: Performance Diagnosis - EF Core +description: Diagnosing Entity Framework Core performance and identifying bottlenecks +author: roji +ms.date: 12/1/2020 +uid: core/miscellaneous/performance-diagnosis +--- +# Performance Diagnosis + +This section discusses ways for detecting performance issues in your EF application, and once a problematic area has been identified, how to further analyze them to identify the root problem. It's important to carefully diagnose and investigate any problems before jumping to any conclusions, and to avoid assuming where the root of the issue is. + +## Identifying slow database commands via logging + +At the end of the day, EF prepares and executes commands to be executed against your database; with relational database, that means executing SQL statements via the ADO.NET database API. If a certain query is taking too much time (e.g. because an index is missing), this can be seen discovered by inspecting command execution logs and observing how long they actually take. + +EF makes it very easy to capture command execution times, via either [simple logging](core/logging-events-diagnostics/simple-logging) or [Microsoft.Extensions.Logging](core/logging-events-diagnostics/extensions-logging): + +### [Simple logging](#tab/simple-logging) + +```csharp +class MyDbContext +{ + protected override void OnConfiguring(DbContextOptionsBuilder optionsBuilder) + { + optionsBuilder + .UseSqlServer(@"Server=(localdb)\mssqllocaldb;Database=Blogging;Integrated Security=True") + .LogTo(Console.WriteLine, LogLevel.Information); + } +} +``` + +### [Microsoft.Extensions.Logging](#tab/microsoft-extensions-logging) + +```csharp +class MyDbContext +{ + static ILoggerFactory ContextLoggerFactory + => LoggerFactory.Create(b => b.AddConsole().AddFilter("", LogLevel.Information)); + + protected override void OnConfiguring(DbContextOptionsBuilder optionsBuilder) + => optionsBuilder + .UseSqlServer(@"Server=(localdb)\mssqllocaldb;Database=Blogging;Integrated Security=True") + .UseLoggerFactory(ContextLoggerFactory); +} +``` + +*** + +When the logging level is set at `LogLevel.Information`, EF emits a log message for each command execution with the time taken: + +```log +info: 06/12/2020 09:12:36.117 RelationalEventId.CommandExecuted[20101] (Microsoft.EntityFrameworkCore.Database.Command) + Executed DbCommand (4ms) [Parameters=[], CommandType='Text', CommandTimeout='30'] + SELECT [b].[Id], [b].[Name] + FROM [Blogs] AS [b] + WHERE [b].[Name] = N'foo' +``` + +The above command took 4 milliseconds. If a certain command takes more than expected, you've found a possible culprit for a performance issue, and can now focus on it to understand why it's running slowly. Command logging can also reveal cases where unexpected [database roundtrips](xref:core/miscellaneous/efficient-execution#database-roundtrips) are being made; this would show up as multiple commands where only one is expected. + +> [!WARNING] +> Leaving command execution logging enabled in your production environment is usually a bad idea. The logging itself slows down your application, and may quickly create huge log files which can fill up your server's disk. It's recommended to only keep logging on for a short interval of time to gather data - while carefully monitoring your application - or to capture logging data on a pre-production system. + +## Correlating database commands to LINQ queries + +One problem with command execution logging is that it's sometimes difficult to correlate SQL queries and LINQ queries: the SQL commands executed by EF can look very different from the LINQ queries from which they were generated. To help with this difficulty, you may want to use EF's [query tags](xref:core/querying/tags) feature, which allows you to inject a small, identifying comment into the SQL query: + +```csharp +var blogs = ctx.Blogs + .TagWith("GetBlogByName") + .Where(b => b.Name == "foo") + .ToList(); +``` + +The tag shows up in the logs: + +```csharp +info: 06/12/2020 09:25:42.951 RelationalEventId.CommandExecuted[20101] (Microsoft.EntityFrameworkCore.Database.Command) + Executed DbCommand (4ms) [Parameters=[], CommandType='Text', CommandTimeout='30'] + -- GetBlogByName + + SELECT [b].[Id], [b].[Name] + FROM [Blogs] AS [b] + WHERE [b].[Name] = N'foo' +``` + +It's often worth tagging the major queries of an application in this way, to make the command execution logs more immediately readable. + +## Other interfaces for capturing performance data + +There are various alternatives to EF's logging feature for capturing command execution times, which may be more powerful. Databases typically come with their own tracing and performance analysis tools, which usually provide much richer, database-specific information beyond simple execution times; the actual setup, capabilities and usage vary considerably across databases. + +For example, [SQL Server Management Studio](/sql/ssms/download-sql-server-management-studio-ssms) is a powerful client that can connect to your SQL Server instance and provide valuable management and performance information. It's beyond the scope of this section to go into the details, but two capabilities worth mentioning are the [Activity Monitor](/sql/relational-databases/performance-monitor/open-activity-monitor-sql-server-management-studio), which provides a live dashboard of server activity (including the most expensive queries), and the [Extended Events (XEvent)](/sql/relational-databases/extended-events/quick-start-extended-events-in-sql-server) feature, which allows defining arbitrary data capture sessions which can be tailored to your exact needs. [The SQL Server documentation on monitoring](/sql/relational-databases/performance/monitor-and-tune-for-performance) provides more information on these features, as well as others. + +Another approach for capturing performance data is to collect information automatically emitted by either EF or the database driver via the `DiagnosticSource` interface, and then analyze that data or display it on a dashboard. If you are using Azure, then [Azure Application Insights](https://docs.microsoft.com/en-us/azure/azure-monitor/learn/tutorial-performance) provides such powerful monitoring out of the box, integrating database performance and query execution times in the analysis of how quickly your web requests are being served. More information on this is available in the [Application Insights performance tutorial](/azure/azure-monitor/learn/tutorial-performance), and in the [Azure SQL analytics page](/azure/azure-monitor/insights/azure-sql). + +## Inspecting query execution plans + +Once you've pinpointed a problematic query that requires optimization, the next step is usually analyzing the query's *execution plan*. When databases receive a SQL statement, they typically produce a plan of how that plan is to be executed; this sometimes requires complicated decision-making based on which indexes have been defined, how much data exists in tables, etc. (incidentally, the plan itself should usually be cached at the server for optimal performance). Relational databases typically provide a way for users to see the query plan, along with calculated costing for different parts of the query; this is invaluable for improving your queries. + +To get started on SQL Server, see the documentation on [query execution plans](/sql/relational-databases/performance/execution-plans). The typical analysis workflow would be to use [SQL Server Management Studio](/sql/relational-databases/performance/display-an-actual-execution-plan), pasting the SQL of a slow query identified via one of the means above, and [producing a graphical execution plan](/sql/relational-databases/performance/display-an-actual-execution-plan): + +![Display a SQL Server execution plan](_static/actualexecplan.png) + +While execution plans may seem complicated at first, it's worth spending a bit of time getting familiar with them. It's particularly important to note the costs associated with each node of the plan, and to identify how indexes are used (or not) in the various nodes. + +While the above information is specific to SQL Server, other databases typically provide the same kind of tools with similar visualization. + +> [!IMPORTANT] +> Databases sometimes generate different query plans depending on actual data in the database. For example, if a table contains only a few rows, a database may choose not to use an index on that table, but to perform a full table scan instead. If analyzing query plans on a test database, always make sure it contains data that is similar to your production system. + +## Event counters + +The above sections focused on how to get information about your commands, and how these commands are executed in the database. In addition to that, EF exposes a set of *event counters* which provide more lower-level information on what's happening inside EF itself, and how your application is using it. These counters can be very useful for diagnosing specific performance issues and performance anomalies, such as [query caching issues](xref:core/miscellaneous/writing-efficient-queries#dynamically-constructed-queries) which cause constant recompilation, undisposed DbContext leaks, and others. + +See the dedicated page on [EF's event counters](xref:core/logging-events-diagnostics/event-counters) for more information. + +## Benchmarking with EF Core + +At the end of the day, you sometimes need to know whether a particular way of writing or executing a query is faster than another. It's important to never assume or speculate the answer, and it's extremely easy to put together a quick benchmark to get the answer. When writing benchmarks, it's strongly recommended to use the well-known [BenchmarkDotNet](https://benchmarkdotnet.org/index.html) library, which handles many pitfalls users encounter when trying to write their own benchmarks: have you performed some warmup iterations? How many iterations does your benchmark actually run, and why? Let's take a look at what a benchmark with EF Core looks like. + +> [!TIP] +> The full benchmark project for the source below is available [here](). You are encouraged to copy it and use it as a template for your own benchmarks. + +As a simple benchmark scenario, let's compare the following different methods of calculating the average ranking of all Blogs in our database: + +* Load all entities, sum up their individual rankings, and calculate the average. +* The same as above, only use a non-tracking query. This should be faster, since identity resolution isn't performed, and the entities aren't snapshotted for the purposes of change tracking. +* Avoid loading the entire Blog entity instances at all, by projecting out the ranking only. The saves us from transferring the other, unneeded columns of the Blog entity type. +* Calculate the average in the database by making it part of the query. This should be the fastest way, since only the result is transferred back to the client. + +With BenchmarkDotNet, you write the code to be benchmarked as a simple method - just like a unit test - and BenchmarkDotNet automatically runs each method for sufficient number of iterations, reliably measuring how long it takes and how much memory is allocated. Here's the benchmark code: + +[!code-csharp[Main](../../../samples/core/Benchmarks/AverageBlogRanking.cs?name=Benchmarks)] + +The results are below, as printed by BenchmarkDotNet: + +| Method | Mean | Error | StdDev | Median | Ratio | RatioSD | Gen 0 | Gen 1 | Gen 2 | Allocated | +|------------------------ |-----------:|---------:|---------:|-----------:|------:|--------:|---------:|--------:|------:|-----------:| +| LoadEntities | 2,860.4 us | 54.31 us | 93.68 us | 2,844.5 us | 4.55 | 0.33 | 210.9375 | 70.3125 | - | 1309.56 KB | +| LoadEntitiesNonTracking | 1,353.0 us | 21.26 us | 18.85 us | 1,355.6 us | 2.10 | 0.14 | 87.8906 | 3.9063 | - | 540.09 KB | +| ProjectOnlyRanking | 910.9 us | 20.91 us | 61.65 us | 892.9 us | 1.46 | 0.14 | 41.0156 | 0.9766 | - | 252.08 KB | +| CalculateInDatabase | 627.1 us | 14.58 us | 42.54 us | 626.4 us | 1.00 | 0.00 | 4.8828 | - | - | 33.27 KB | + +> [!NOTE] +> As the methods instantiate and dispose the context within the method, these operations are counted for the benchmark, although strictly speaking they are not part of the querying process. This should not matter if the goal is to compare two alternatives to one another (since the context instantiation and disposal are the same), and gives a more holistic measurement for the entire operation. + +One limitation of BenchmarkDotNet is that it measures simple, single-thread performance of the methods you provide, and is therefore not well-suited for benchmarking concurrent scenarios. + +> [!IMPORTANT] +> Always make sure to have data in your database that is similar to production data when benchmarking, otherwise the benchmark results may not represent actual performance in production. diff --git a/entity-framework/core/querying/related-data/lazy.md b/entity-framework/core/querying/related-data/lazy.md index ca821bd28c..a34bf15082 100644 --- a/entity-framework/core/querying/related-data/lazy.md +++ b/entity-framework/core/querying/related-data/lazy.md @@ -47,6 +47,9 @@ public class Post } ``` +> [!WARNING] +> Lazy loading can cause unneeded extra database roundtrips to occur (the so-called N+1 problem), and care should be taken to avoid this. See the [performance section](core/miscellaneous/writing-efficient-queries) for more details. + ## Lazy loading without proxies Lazy-loading proxies work by injecting the `ILazyLoader` service into an entity, as described in [Entity Type Constructors](xref:core/modeling/constructors). For example: diff --git a/entity-framework/toc.yml b/entity-framework/toc.yml index 1da653ab69..34cd0c4590 100644 --- a/entity-framework/toc.yml +++ b/entity-framework/toc.yml @@ -255,6 +255,21 @@ - name: Test with InMemory href: core/testing/in-memory.md + - name: Performance + items: + - name: Introduction + href: core/performance/index.md + - name: Performance diagnosis + href: core/performance/performance-diagnosis.md + - name: Efficient querying + href: core/performance/efficient-querying.md + - name: Updating data efficiently + href: core/performance/efficient-updating.md + - name: Modeling for performance + href: core/performance/modeling-for-performance.md + - name: Advanced performance topics + href: core/performance/advanced-performance-topics.md + - name: Miscellaneous items: - name: Supported .NET implementations diff --git a/samples/core/Benchmarks/AverageBlogRanking.cs b/samples/core/Benchmarks/AverageBlogRanking.cs new file mode 100644 index 0000000000..6f5878107c --- /dev/null +++ b/samples/core/Benchmarks/AverageBlogRanking.cs @@ -0,0 +1,113 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. + +using System; +using System.Linq; +using BenchmarkDotNet.Attributes; +using Microsoft.EntityFrameworkCore; + +namespace Benchmarks +{ + [MemoryDiagnoser] + public class AverageBlogRanking + { + public const int NumBlogs = 1000; + + [GlobalSetup] + public void Setup() + { + using var context = new BloggingContext(); + context.Database.EnsureDeleted(); + context.Database.EnsureCreated(); + context.SeedData(); + } + + #region Benchmarks + [Benchmark] + public double LoadEntities() + { + var sum = 0; + var count = 0; + using var ctx = new BloggingContext(); + foreach (var blog in ctx.Blogs) + { + sum += blog.Rating; + count++; + } + + return sum / count; + } + + [Benchmark] + public double LoadEntitiesNonTracking() + { + var sum = 0; + var count = 0; + using var ctx = new BloggingContext(); + foreach (var blog in ctx.Blogs.AsNoTracking()) + { + sum += blog.Rating; + count++; + } + + return sum / count; + } + + [Benchmark] + public double ProjectOnlyRanking() + { + var sum = 0; + var count = 0; + using var ctx = new BloggingContext(); + foreach (var rating in ctx.Blogs.Select(b => b.Rating)) + { + sum += rating; + count++; + } + + return sum / count; + } + + [Benchmark(Baseline = true)] + public double CalculateInDatabase() + { + using var ctx = new BloggingContext(); + return ctx.Blogs.Average(b => b.Rating); + } + #endregion Benchmarks + + public class BloggingContext : DbContext + { + public DbSet Blogs { get; set; } + + protected override void OnConfiguring(DbContextOptionsBuilder optionsBuilder) + { + optionsBuilder.UseSqlServer( + @"Server=localhost;Database=test;User=SA;Password=Abcd5678;Connect Timeout=60;ConnectRetryCount=0"); + // @"Server=(localdb)\mssqllocaldb;Database=Blogging;Integrated Security=True"); + } + + public void SeedData() + { + Blogs.AddRange( + Enumerable.Range(0, NumBlogs).Select(i => new Blog + { + Name = $"Blog{i}", + Url = $"blog{i}.blogs.net", + CreationTime = new DateTime(2020, 1, 1), + Rating = i % 5 + })); + SaveChanges(); + } + } + + public class Blog + { + public int BlogId { get; set; } + public string Name { get; set; } + public string Url { get; set; } + public DateTime CreationTime { get; set; } + public int Rating { get; set; } + } + } +} diff --git a/samples/core/Benchmarks/Benchmarks.csproj b/samples/core/Benchmarks/Benchmarks.csproj new file mode 100644 index 0000000000..94fb4a34a9 --- /dev/null +++ b/samples/core/Benchmarks/Benchmarks.csproj @@ -0,0 +1,12 @@ + + + Exe + net5.0 + + + + + + + + diff --git a/samples/core/Benchmarks/DynamicallyConstructedQueries.cs b/samples/core/Benchmarks/DynamicallyConstructedQueries.cs new file mode 100644 index 0000000000..3f817b44b7 --- /dev/null +++ b/samples/core/Benchmarks/DynamicallyConstructedQueries.cs @@ -0,0 +1,112 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Linq.Expressions; +using System.Threading; +using BenchmarkDotNet.Attributes; +using Microsoft.EntityFrameworkCore; + +namespace Benchmarks +{ + [MemoryDiagnoser] + public class DynamicallyConstructedQueries + { + private int _blogNumber; + + [GlobalSetup] + public void GlobalSetup() + { + using var context = new BloggingContext(); + context.Database.EnsureDeleted(); + context.Database.EnsureCreated(); + } + + #region WithConstant + [Benchmark] + public int WithConstant() + { + return GetBlogCount("blog" + Interlocked.Increment(ref _blogNumber)); + + int GetBlogCount(string url) + { + using var context = new BloggingContext(); + + IQueryable blogs = context.Blogs; + + if (url is not null) + { + var blogParam = Expression.Parameter(typeof(Blog), "b"); + var whereLambda = Expression.Lambda>( + Expression.Equal( + Expression.MakeMemberAccess( + blogParam, + typeof(Blog).GetMember(nameof(Blog.Url)).Single() + ), + Expression.Constant(url)), + blogParam); + + blogs = blogs.Where(whereLambda); + } + + return blogs.Count(); + } + } + #endregion + + #region WithParameter + [Benchmark] + public int WithParameter() + { + return GetBlogCount("blog" + Interlocked.Increment(ref _blogNumber)); + + int GetBlogCount(string url) + { + using var context = new BloggingContext(); + + IQueryable blogs = context.Blogs; + + if (url is not null) + { + blogs = blogs.Where(b => b.Url == url); + } + + return blogs.Count(); + } + } + #endregion + + public class BloggingContext : DbContext + { + public DbSet Blogs { get; set; } + public DbSet Posts { get; set; } + + protected override void OnConfiguring(DbContextOptionsBuilder optionsBuilder) + { + optionsBuilder.UseSqlServer( + @"Server=localhost;Database=test;User=SA;Password=Abcd5678;Connect Timeout=60;ConnectRetryCount=0"); + // @"Server=(localdb)\mssqllocaldb;Database=Blogging;Integrated Security=True"); + } + } + + public class Blog + { + public int BlogId { get; set; } + public string Url { get; set; } + public int Rating { get; set; } + public List Posts { get; set; } + } + + public class Post + { + public int PostId { get; set; } + public string Title { get; set; } + public string Content { get; set; } + + public int BlogId { get; set; } + public Blog Blog { get; set; } + } + } +} diff --git a/samples/core/Benchmarks/Inheritance.cs b/samples/core/Benchmarks/Inheritance.cs new file mode 100644 index 0000000000..94786c6b53 --- /dev/null +++ b/samples/core/Benchmarks/Inheritance.cs @@ -0,0 +1,142 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. + +using System; +using System.Collections.Generic; +using System.Linq; +using BenchmarkDotNet.Attributes; +using Microsoft.EntityFrameworkCore; + +namespace Benchmarks +{ + [MemoryDiagnoser] + public class Inheritance + { + [Params(5000)] + public int RowsPerEntityType { get; set; } + + [GlobalSetup(Target=nameof(TPH))] + public void SetupTPH() + { + Console.WriteLine("Setting up database..."); + using var context = new TPHContext(); + context.Database.EnsureDeleted(); + context.Database.EnsureCreated(); + context.SeedData(RowsPerEntityType); + Console.WriteLine("Setup complete."); + } + + [GlobalSetup(Target=nameof(TPT))] + public void SetupTPT() + { + Console.WriteLine("Setting up database..."); + using var context = new TPTContext(); + context.Database.EnsureDeleted(); + context.Database.EnsureCreated(); + context.SeedData(RowsPerEntityType); + Console.WriteLine("Setup complete."); + } + + [Benchmark] + public List TPH() + { + using var context = new TPHContext(); + + return context.Roots.ToList(); + } + + [Benchmark] + public List TPT() + { + using var context = new TPTContext(); + + return context.Roots.ToList(); + } + + public abstract class InheritanceContext : DbContext + { + public DbSet Roots { get; set; } + + protected override void OnConfiguring(DbContextOptionsBuilder optionsBuilder) + { + optionsBuilder.UseSqlServer( + @"Server=localhost;Database=test;User=SA;Password=Abcd5678;Connect Timeout=60;ConnectRetryCount=0"); + // @"Server=(localdb)\mssqllocaldb;Database=Blogging;Integrated Security=True"); + } + + public void SeedData(int rowsPerEntityType) + { + Set().AddRange(Enumerable.Range(0, rowsPerEntityType).Select(i => new Root { RootProperty = i })); + Set().AddRange(Enumerable.Range(0, rowsPerEntityType).Select(i => new Child1 { Child1Property = i })); + Set().AddRange(Enumerable.Range(0, rowsPerEntityType).Select(i => new Child1A { Child1AProperty = i })); + Set().AddRange(Enumerable.Range(0, rowsPerEntityType).Select(i => new Child1B { Child1BProperty = i })); + Set().AddRange(Enumerable.Range(0, rowsPerEntityType).Select(i => new Child2 { Child2Property = i })); + Set().AddRange(Enumerable.Range(0, rowsPerEntityType).Select(i => new Child2A { Child2AProperty = i })); + Set().AddRange(Enumerable.Range(0, rowsPerEntityType).Select(i => new Child2B { Child2BProperty = i })); + SaveChanges(); + } + } + + public class TPHContext : InheritanceContext + { + protected override void OnModelCreating(ModelBuilder modelBuilder) + { + modelBuilder.Entity(); + modelBuilder.Entity(); + modelBuilder.Entity(); + modelBuilder.Entity(); + modelBuilder.Entity(); + modelBuilder.Entity(); + } + } + + public class TPTContext : InheritanceContext + { + protected override void OnModelCreating(ModelBuilder modelBuilder) + { + modelBuilder.Entity().ToTable("Child1"); + modelBuilder.Entity().ToTable("Child1A"); + modelBuilder.Entity().ToTable("Child1B"); + modelBuilder.Entity().ToTable("Child2"); + modelBuilder.Entity().ToTable("Child2A"); + modelBuilder.Entity().ToTable("Child2B"); + } + } + + public class Root + { + public int Id { get; set; } + public int RootProperty { get; set; } + } + + public class Child1 : Root + { + public int Child1Property { get; set; } + } + + public class Child1A : Root + { + public int Child1AProperty { get; set; } + } + + public class Child1B : Root + { + public int Child1BProperty { get; set; } + } + + public class Child2 : Root + { + public int Child2Property { get; set; } + } + + public class Child2A : Root + { + public int Child2AProperty { get; set; } + } + + public class Child2B : Root + { + public int Child2BProperty { get; set; } + } + } +} diff --git a/samples/core/Benchmarks/Program.cs b/samples/core/Benchmarks/Program.cs new file mode 100644 index 0000000000..61204931fe --- /dev/null +++ b/samples/core/Benchmarks/Program.cs @@ -0,0 +1,10 @@ +using BenchmarkDotNet.Running; + +namespace Benchmarks +{ + public class Program + { + public static void Main(string[] args) + => BenchmarkSwitcher.FromAssembly(typeof(Program).Assembly).Run(args); + } +} diff --git a/samples/core/Benchmarks/QueryTrackingBehavior.cs b/samples/core/Benchmarks/QueryTrackingBehavior.cs new file mode 100644 index 0000000000..c3b1621b4f --- /dev/null +++ b/samples/core/Benchmarks/QueryTrackingBehavior.cs @@ -0,0 +1,98 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. + +using System; +using System.Collections.Generic; +using System.Linq; +using BenchmarkDotNet.Attributes; +using Microsoft.EntityFrameworkCore; + +namespace Benchmarks +{ + [MemoryDiagnoser] + public class QueryTrackingBehavior + { + [Params(1)] + public int NumBlogs { get; set; } + + [Params(5000)] + public int NumPostsPerBlog { get; set; } + + [GlobalSetup] + public void Setup() + { + Console.WriteLine("Setting up database..."); + using var context = new BloggingContext(); + context.Database.EnsureDeleted(); + context.Database.EnsureCreated(); + context.SeedData(NumBlogs, NumPostsPerBlog); + Console.WriteLine("Setup complete."); + } + + [Benchmark] + public List Tracking() + { + using var context = new BloggingContext(); + + return context.Posts.Include(p => p.Blog).ToList(); + } + + [Benchmark] + public List NoTracking() + { + using var context = new BloggingContext(); + + return context.Posts.AsNoTracking().Include(p => p.Blog).ToList(); + } + + [Benchmark] + public List NoTrackingWithIdentityResolution() + { + using var context = new BloggingContext(); + + return context.Posts.AsNoTrackingWithIdentityResolution().Include(p => p.Blog).ToList(); + } + + public class BloggingContext : DbContext + { + public DbSet Blogs { get; set; } + public DbSet Posts { get; set; } + + protected override void OnConfiguring(DbContextOptionsBuilder optionsBuilder) + { + optionsBuilder.UseSqlServer( + @"Server=localhost;Database=test;User=SA;Password=Abcd5678;Connect Timeout=60;ConnectRetryCount=0"); + // @"Server=(localdb)\mssqllocaldb;Database=Blogging;Integrated Security=True"); + } + + public void SeedData(int numBlogs, int numPostsPerBlog) + { + using var context = new BloggingContext(); + context.AddRange( + Enumerable.Range(0, numBlogs).Select(_ => new Blog + { + Posts = Enumerable.Range(0, numPostsPerBlog).Select(_ => new Post()).ToList() + })); + context.SaveChanges(); + } + } + + public class Blog + { + public int BlogId { get; set; } + public string Url { get; set; } + public int Rating { get; set; } + public List Posts { get; set; } + } + + public class Post + { + public int PostId { get; set; } + public string Title { get; set; } + public string Content { get; set; } + + public int BlogId { get; set; } + public Blog Blog { get; set; } + } + } +} diff --git a/samples/core/Benchmarks/RelatedEntityLoading.cs b/samples/core/Benchmarks/RelatedEntityLoading.cs new file mode 100644 index 0000000000..9532e12567 --- /dev/null +++ b/samples/core/Benchmarks/RelatedEntityLoading.cs @@ -0,0 +1,134 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. + +using System.Collections.Generic; +using BenchmarkDotNet.Attributes; +using Microsoft.EntityFrameworkCore; + +namespace Benchmarks +{ + [MemoryDiagnoser] + public class RelatedEntityLoading + { + [Params(RelatedEntityLoadingMode.Eager, RelatedEntityLoadingMode.Explicit, RelatedEntityLoadingMode.Lazy)] + public RelatedEntityLoadingMode RelatedEntityLoadingMode { get; set; } + + [GlobalSetup] + public void Setup() + { + if (RelatedEntityLoadingMode == RelatedEntityLoadingMode.Lazy) + { + using var context = new LazyBloggingContext(); + context.Database.EnsureDeleted(); + context.Database.EnsureCreated(); + context.SeedData(); + } + else + { + using var context = new BloggingContext(); + context.Database.EnsureDeleted(); + context.Database.EnsureCreated(); + context.SeedData(); + } + } + + [Benchmark] + public void NoRelated() + { + + } + + [Benchmark] + public void Lazy() + { + + } + + #region Non-lazy + + public class BloggingContext : DbContext + { + public DbSet Blogs { get; set; } + public DbSet Posts { get; set; } + + protected override void OnConfiguring(DbContextOptionsBuilder optionsBuilder) + { + optionsBuilder.UseSqlServer( + @"Server=localhost;Database=test;User=SA;Password=Abcd5678;Connect Timeout=60;ConnectRetryCount=0"); + // @"Server=(localdb)\mssqllocaldb;Database=Blogging;Integrated Security=True"); + } + + public void SeedData() + { + + } + } + + public class Blog + { + public int BlogId { get; set; } + public string Url { get; set; } + public int Rating { get; set; } + public List Posts { get; set; } + } + + public class Post + { + public int PostId { get; set; } + public string Title { get; set; } + public string Content { get; set; } + + public int BlogId { get; set; } + public Blog Blog { get; set; } + } + + #endregion Non-lazy + + #region Lazy + + public class LazyBloggingContext : DbContext + { + public DbSet Blogs { get; set; } + public DbSet Posts { get; set; } + + protected override void OnConfiguring(DbContextOptionsBuilder optionsBuilder) + { + optionsBuilder.UseSqlServer( + @"Server=localhost;Database=test;User=SA;Password=Abcd5678;Connect Timeout=60;ConnectRetryCount=0"); + // @"Server=(localdb)\mssqllocaldb;Database=Blogging;Integrated Security=True"); + } + + public void SeedData() + { + + } + } + + public class LazyBlog + { + public int BlogId { get; set; } + public string Url { get; set; } + public int Rating { get; set; } + public virtual List Posts { get; set; } + } + + public class LazyPost + { + public int PostId { get; set; } + public string Title { get; set; } + public string Content { get; set; } + + public int BlogId { get; set; } + public virtual Blog Blog { get; set; } + } + + #endregion Lazy + } + + public enum RelatedEntityLoadingMode + { + Eager, + Explicit, + Lazy + } +} diff --git a/samples/core/Samples.sln b/samples/core/Samples.sln index 85361c9c03..f97955f836 100644 --- a/samples/core/Samples.sln +++ b/samples/core/Samples.sln @@ -127,6 +127,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "DiagnosticListeners", "Misc EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ConfiguringDbContext", "Miscellaneous\ConfiguringDbContext\ConfiguringDbContext.csproj", "{73503DF2-CD85-4710-BE94-B83B87054709}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Benchmarks", "Benchmarks\Benchmarks.csproj", "{7CC1CD44-6C02-4736-9A6D-B3F2E987CBCB}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -345,6 +347,10 @@ Global {73503DF2-CD85-4710-BE94-B83B87054709}.Debug|Any CPU.Build.0 = Debug|Any CPU {73503DF2-CD85-4710-BE94-B83B87054709}.Release|Any CPU.ActiveCfg = Release|Any CPU {73503DF2-CD85-4710-BE94-B83B87054709}.Release|Any CPU.Build.0 = Release|Any CPU + {7CC1CD44-6C02-4736-9A6D-B3F2E987CBCB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7CC1CD44-6C02-4736-9A6D-B3F2E987CBCB}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7CC1CD44-6C02-4736-9A6D-B3F2E987CBCB}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7CC1CD44-6C02-4736-9A6D-B3F2E987CBCB}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE