diff --git a/.github/workflows/on-push-do-ci-build-pg15-jsonnet.yml b/.github/workflows/on-push-do-ci-build-pg15-jsonnet.yml
index 80d6d62be4..d88170238b 100644
--- a/.github/workflows/on-push-do-ci-build-pg15-jsonnet.yml
+++ b/.github/workflows/on-push-do-ci-build-pg15-jsonnet.yml
@@ -153,4 +153,4 @@ jobs:
- name: test-aspnet-core
if: ${{ success() || failure() }}
run: ./build.sh test-aspnet-core
- shell: bash
+ shell: bash
\ No newline at end of file
diff --git a/.github/workflows/on-push-do-ci-build-pgLatest-systemtextjson.yml b/.github/workflows/on-push-do-ci-build-pgLatest-systemtextjson.yml
index 25818c1eb2..50ed2d84f5 100644
--- a/.github/workflows/on-push-do-ci-build-pgLatest-systemtextjson.yml
+++ b/.github/workflows/on-push-do-ci-build-pgLatest-systemtextjson.yml
@@ -153,4 +153,4 @@ jobs:
- name: test-aspnet-core
if: ${{ success() || failure() }}
run: ./build.sh test-aspnet-core
- shell: bash
+ shell: bash
\ No newline at end of file
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
deleted file mode 100644
index 68ee5e6a54..0000000000
--- a/azure-pipelines.yml
+++ /dev/null
@@ -1,207 +0,0 @@
-trigger:
- batch: true
- branches:
- include:
- - master
- paths:
- exclude:
- - docs/*
- - documentation/*
- - .github/*
-
-pr:
- - master
-
-resources:
- containers:
- - container: pg12_plv8
- image: ionx/postgres-plv8:12.8
- ports:
- - 5432:5432
- env:
- POSTGRES_HOST_AUTH_METHOD: trust
- NAMEDATALEN: 150
- user: postgres
- - container: pg15
- image: postgres:15-alpine
- ports:
- - 5432:5432
- env:
- POSTGRES_HOST_AUTH_METHOD: trust
- NAMEDATALEN: 150
- user: postgres
- - container: pgLatest
- image: postgres:latest
- ports:
- - 5432:5432
- env:
- POSTGRES_HOST_AUTH_METHOD: trust
- NAMEDATALEN: 150
- user: postgres
-
-variables:
- DOTNET_CLI_TELEMETRY_OPTOUT: 1
- DOTNET_SKIP_FIRST_TIME_EXPERIENCE: 1
- node_version: 16.x
- pg_db: marten_testing
- CONFIGURATION: Release
- FRAMEWORK: net6.0
- DISABLE_TEST_PARALLELIZATION: true
- CONNECTION_STRING: "Host=localhost;Port=5432;Database=marten_testing;Username=postgres;Password=Password12!;Timeout=60;Cancellation Timeout=5000; Command Timeout=60"
- NUKE_TELEMETRY_OPTOUT: true
-
-jobs:
- - job: build_dotnet
- displayName: build_net6.0
- # timeoutInMinutes: 20
- pool:
- vmImage: 'ubuntu-latest'
- strategy:
- matrix:
- pg12_plv8:
- postgresService: pg12_plv8
- serializer: Newtonsoft
- plv8: true
- pg12_SystemTextJson:
- postgresService: pg12_plv8
- serializer: SystemTextJson
- plv8: true
- pg15:
- postgresService: pg15
- serializer: SystemTextJson
- plv8: false
- pgLatest:
- postgresService: pgLatest
- serializer: Newtonsoft
- plv8: false
- services:
- postgres: $[ variables['postgresService'] ]
- steps:
- - task: UseDotNet@2
- displayName: Install .NET 6.0.x
- inputs:
- packageType: 'sdk'
- version: '6.x'
- - task: UseDotNet@2
- displayName: Install .NET 7.0.x
- inputs:
- packageType: 'sdk'
- version: '7.x'
- - task: UseDotNet@2
- displayName: Install .NET 8.0.x
- inputs:
- packageType: 'sdk'
- version: '8.x'
- - script: |
- dotnet workload update
- dotnet workload install aspire
- displayName: Install .NET Aspire workload
- - task: UseNode@1
- displayName: Install Node.js
- inputs:
- version: $(node_version)
- - script: |
- PG_CONTAINER_NAME=$(docker ps --filter expose=5432/tcp --format {{.Names}})
- docker exec $PG_CONTAINER_NAME psql -U postgres -c "create database $(pg_db);"
- displayName: Create db
- - script: |
- PG_CONTAINER_NAME=$(docker ps --filter expose=5432/tcp --format {{.Names}})
- docker exec $PG_CONTAINER_NAME bash -c "echo -e '\nfsync = off' >> /var/lib/postgresql/data/postgresql.conf"
- docker exec $PG_CONTAINER_NAME bash -c "echo -e '\nfull_page_writes = off' >> /var/lib/postgresql/data/postgresql.conf"
- docker exec $PG_CONTAINER_NAME bash -c "echo -e '\nsynchronous_commit = off' >> /var/lib/postgresql/data/postgresql.conf"
- docker container restart $PG_CONTAINER_NAME
- displayName: Optimize database for running tests faster
- - script: |
- PG_CONTAINER_NAME=$(docker ps --filter expose=5432/tcp --format {{.Names}})
- docker exec $PG_CONTAINER_NAME psql -U postgres -d $(pg_db) -c "create extension if not exists plv8;"
- docker exec $PG_CONTAINER_NAME psql -U postgres -c "DO 'plv8.elog(NOTICE, plv8.version);' LANGUAGE plv8;"
- displayName: Add plv8 extension
- condition: eq(variables['plv8'], 'true')
- - script: ./build.sh compile
- displayName: compile
- env:
- DEFAULT_SERIALIZER: $(serializer)
- - script: ./build.sh mocha
- displayName: mocha
- continueOnError: true
- env:
- DEFAULT_SERIALIZER: $(serializer)
- - script: ./build.sh test-base-lib
- displayName: test-base-lib
- continueOnError: true
- env:
- DEFAULT_SERIALIZER: $(serializer)
- - script: ./build.sh test-core
- displayName: test-core
- continueOnError: true
- env:
- DEFAULT_SERIALIZER: $(serializer)
- - script: ./build.sh test-document-db
- displayName: test-document-db
- continueOnError: true
- env:
- DEFAULT_SERIALIZER: $(serializer)
- - script: ./build.sh test-event-sourcing
- displayName: test-event-sourcing
- continueOnError: true
- env:
- DEFAULT_SERIALIZER: $(serializer)
- - script: ./build.sh test-cli
- displayName: test-cli
- continueOnError: true
- env:
- DEFAULT_SERIALIZER: $(serializer)
- - script: ./build.sh test-core
- displayName: test-core
- continueOnError: true
- env:
- DEFAULT_SERIALIZER: $(serializer)
- - script: ./build.sh test-linq
- displayName: test-linq
- continueOnError: true
- env:
- DEFAULT_SERIALIZER: $(serializer)
- # - script: ./build.sh test-multi-tenancy
- # displayName: test-multi-tenancy
- # continueOnError: true
- # env:
- # DEFAULT_SERIALIZER: $(serializer)
- - script: ./build.sh test-patching
- displayName: test-patching
- continueOnError: true
- env:
- DEFAULT_SERIALIZER: $(serializer)
- - script: ./build.sh test-value-types
- displayName: test-value-types
- continueOnError: true
- env:
- DEFAULT_SERIALIZER: $(serializer)
- - script: ./build.sh test-code-gen
- displayName: test-code-gen
- continueOnError: true
- env:
- DEFAULT_SERIALIZER: $(serializer)
- - script: ./build.sh test-noda-time
- displayName: test-noda-time
- continueOnError: true
- env:
- DEFAULT_SERIALIZER: $(serializer)
- - script: ./build.sh test-aspnet-core
- displayName: test-aspnet-core
- continueOnError: true
- env:
- DEFAULT_SERIALIZER: $(serializer)
- - script: ./build.sh test-plv8
- displayName: test-plv8
- continueOnError: true
- env:
- DEFAULT_SERIALIZER: $(serializer)
- condition: eq(variables['plv8'], 'true')
- # This step will fail the job if the any previous step had failed
- - script: |
- if [ $(Agent.JobStatus) == 'SucceededWithIssues' ]; then
- exit 1
- fi
- name: CheckForFailure
- condition: always()
-
diff --git a/docs/configuration/multitenancy.md b/docs/configuration/multitenancy.md
index 0450798e40..023e56f897 100644
--- a/docs/configuration/multitenancy.md
+++ b/docs/configuration/multitenancy.md
@@ -39,7 +39,7 @@ var store = DocumentStore.For(opts =>
opts.TenantIdStyle = TenantIdStyle.ForceUpperCase;
});
```
-snippet source | anchor
+snippet source | anchor
## Static Database to Tenant Mapping
diff --git a/docs/configuration/storeoptions.md b/docs/configuration/storeoptions.md
index 57feb7b84e..5abeb9482a 100644
--- a/docs/configuration/storeoptions.md
+++ b/docs/configuration/storeoptions.md
@@ -15,7 +15,7 @@ public static DocumentStore For(Action configure)
return new DocumentStore(options);
}
```
-snippet source | anchor
+snippet source | anchor
The major parts of `StoreOptions` are shown in the class diagram below:
diff --git a/docs/documents/concurrency.md b/docs/documents/concurrency.md
index 9e51f85f6d..cbd7df991d 100644
--- a/docs/documents/concurrency.md
+++ b/docs/documents/concurrency.md
@@ -37,7 +37,7 @@ public class CoffeeShop: Shop
public ICollection Employees { get; set; } = new List();
}
```
-snippet source | anchor
+snippet source | anchor
Or by using Marten's configuration API to do it programmatically:
@@ -62,13 +62,13 @@ To demonstrate the failure case, consider the following acceptance test from M
```cs
[Fact]
-public void update_with_stale_version_standard()
+public async Task update_with_stale_version_standard()
{
var doc1 = new CoffeeShop();
using (var session = theStore.LightweightSession())
{
session.Store(doc1);
- session.SaveChanges();
+ await session.SaveChangesAsync();
}
var session1 = theStore.DirtyTrackedSession();
@@ -83,11 +83,11 @@ public void update_with_stale_version_standard()
session2Copy.Name = "Dominican Joe's";
// Should go through just fine
- session2.SaveChanges();
+ await session2.SaveChangesAsync();
- var ex = Exception.ShouldBeThrownBy(() =>
+ var ex = await Should.ThrowAsync(async () =>
{
- session1.SaveChanges();
+ await session1.SaveChangesAsync();
});
ex.Message.ShouldBe($"Optimistic concurrency check failed for {typeof(Shop).FullName} #{doc1.Id}");
@@ -98,13 +98,11 @@ public void update_with_stale_version_standard()
session2.Dispose();
}
- using (var query = theStore.QuerySession())
- {
- query.Load(doc1.Id).Name.ShouldBe("Dominican Joe's");
- }
+ await using var query = theStore.QuerySession();
+ query.Load(doc1.Id).Name.ShouldBe("Dominican Joe's");
}
```
-snippet source | anchor
+snippet source | anchor
Marten is throwing an `AggregateException` for the entire batch of changes.
diff --git a/docs/documents/deletes.md b/docs/documents/deletes.md
index 7ac98a31e1..ba39fd56f1 100644
--- a/docs/documents/deletes.md
+++ b/docs/documents/deletes.md
@@ -54,7 +54,7 @@ Marten also provides the ability to delete any documents of a certain type meeti
```cs
theSession.DeleteWhere(x => x.Double == 578);
-theSession.SaveChanges();
+await theSession.SaveChangesAsync();
```
snippet source | anchor
@@ -79,17 +79,17 @@ var company1 = new Company { Name = "ECorp" };
session.StoreObjects(new object[] { user1, issue1, company1 });
-session.SaveChanges();
+await session.SaveChangesAsync();
// Delete a mix of documents types
using (var documentSession = theStore.LightweightSession())
{
documentSession.DeleteObjects(new object[] { user1, company1 });
- documentSession.SaveChanges();
+ await documentSession.SaveChangesAsync();
}
```
-snippet source | anchor
+snippet source | anchor
## Soft Deletes
@@ -179,7 +179,7 @@ in this acceptance test from the Marten codebase:
```cs
[Fact]
-public void query_soft_deleted_docs()
+public async Task query_soft_deleted_docs()
{
var user1 = new User { UserName = "foo" };
var user2 = new User { UserName = "bar" };
@@ -188,11 +188,11 @@ public void query_soft_deleted_docs()
using var session = theStore.LightweightSession();
session.Store(user1, user2, user3, user4);
- session.SaveChanges();
+ await session.SaveChangesAsync();
// Deleting 'bar' and 'baz'
session.DeleteWhere(x => x.UserName.StartsWith("b"));
- session.SaveChanges();
+ await session.SaveChangesAsync();
// no where clause, deleted docs should be filtered out
session.Query().OrderBy(x => x.UserName).Select(x => x.UserName)
@@ -207,7 +207,7 @@ public void query_soft_deleted_docs()
```cs
[Fact]
-public void query_soft_deleted_docs()
+public async Task query_soft_deleted_docs()
{
var user1 = new User { UserName = "foo" };
var user2 = new User { UserName = "bar" };
@@ -216,11 +216,11 @@ public void query_soft_deleted_docs()
using var session = theStore.LightweightSession();
session.Store(user1, user2, user3, user4);
- session.SaveChanges();
+ await session.SaveChangesAsync();
// Deleting 'bar' and 'baz'
session.DeleteWhere(x => x.UserName.StartsWith("b"));
- session.SaveChanges();
+ await session.SaveChangesAsync();
// no where clause, deleted docs should be filtered out
session.Query().OrderBy(x => x.UserName).Select(x => x.UserName)
@@ -249,7 +249,7 @@ as shown in this acceptance tests:
```cs
[Fact]
-public void query_maybe_soft_deleted_docs()
+public async Task query_maybe_soft_deleted_docs()
{
var user1 = new User { UserName = "foo" };
var user2 = new User { UserName = "bar" };
@@ -258,10 +258,10 @@ public void query_maybe_soft_deleted_docs()
using var session = theStore.LightweightSession();
session.Store(user1, user2, user3, user4);
- session.SaveChanges();
+ await session.SaveChangesAsync();
session.DeleteWhere(x => x.UserName.StartsWith("b"));
- session.SaveChanges();
+ await session.SaveChangesAsync();
// no where clause, all documents are returned
session.Query().Where(x => x.MaybeDeleted()).OrderBy(x => x.UserName).Select(x => x.UserName)
@@ -279,7 +279,7 @@ public void query_maybe_soft_deleted_docs()
```cs
[Fact]
-public void query_maybe_soft_deleted_docs()
+public async Task query_maybe_soft_deleted_docs()
{
var user1 = new User { UserName = "foo" };
var user2 = new User { UserName = "bar" };
@@ -288,10 +288,10 @@ public void query_maybe_soft_deleted_docs()
using var session = theStore.LightweightSession();
session.Store(user1, user2, user3, user4);
- session.SaveChanges();
+ await session.SaveChangesAsync();
session.DeleteWhere(x => x.UserName.StartsWith("b"));
- session.SaveChanges();
+ await session.SaveChangesAsync();
// no where clause, all documents are returned
session.Query().Where(x => x.MaybeDeleted()).OrderBy(x => x.UserName).Select(x => x.UserName)
@@ -363,7 +363,7 @@ as shown below:
```cs
[Fact]
-public void query_is_soft_deleted_docs()
+public async Task query_is_soft_deleted_docs()
{
var user1 = new User { UserName = "foo" };
var user2 = new User { UserName = "bar" };
@@ -372,10 +372,10 @@ public void query_is_soft_deleted_docs()
using var session = theStore.LightweightSession();
session.Store(user1, user2, user3, user4);
- session.SaveChanges();
+ await session.SaveChangesAsync();
session.DeleteWhere(x => x.UserName.StartsWith("b"));
- session.SaveChanges();
+ await session.SaveChangesAsync();
// no where clause
session.Query().Where(x => x.IsDeleted()).OrderBy(x => x.UserName).Select(x => x.UserName)
@@ -393,7 +393,7 @@ public void query_is_soft_deleted_docs()
```cs
[Fact]
-public void query_is_soft_deleted_docs()
+public async Task query_is_soft_deleted_docs()
{
var user1 = new User { UserName = "foo" };
var user2 = new User { UserName = "bar" };
@@ -402,10 +402,10 @@ public void query_is_soft_deleted_docs()
using var session = theStore.LightweightSession();
session.Store(user1, user2, user3, user4);
- session.SaveChanges();
+ await session.SaveChangesAsync();
session.DeleteWhere(x => x.UserName.StartsWith("b"));
- session.SaveChanges();
+ await session.SaveChangesAsync();
// no where clause
session.Query().Where(x => x.IsDeleted()).OrderBy(x => x.UserName).Select(x => x.UserName)
@@ -431,7 +431,7 @@ and the counterpart `DeletedSince(DateTimeOffset)` as show below:
```cs
[Fact]
-public void query_is_soft_deleted_since_docs()
+public async Task query_is_soft_deleted_since_docs()
{
var user1 = new User { UserName = "foo" };
var user2 = new User { UserName = "bar" };
@@ -440,14 +440,14 @@ public void query_is_soft_deleted_since_docs()
using var session = theStore.LightweightSession();
session.Store(user1, user2, user3, user4);
- session.SaveChanges();
+ await session.SaveChangesAsync();
session.Delete(user3);
- session.SaveChanges();
+ await session.SaveChangesAsync();
var epoch = session.MetadataFor(user3).DeletedAt;
session.Delete(user4);
- session.SaveChanges();
+ await session.SaveChangesAsync();
session.Query().Where(x => x.DeletedSince(epoch.Value)).Select(x => x.UserName)
.ToList().ShouldHaveTheSameElementsAs("jack");
@@ -457,7 +457,7 @@ public void query_is_soft_deleted_since_docs()
```cs
[Fact]
-public void query_is_soft_deleted_since_docs()
+public async Task query_is_soft_deleted_since_docs()
{
var user1 = new User { UserName = "foo" };
var user2 = new User { UserName = "bar" };
@@ -466,14 +466,14 @@ public void query_is_soft_deleted_since_docs()
using var session = theStore.LightweightSession();
session.Store(user1, user2, user3, user4);
- session.SaveChanges();
+ await session.SaveChangesAsync();
session.Delete(user3);
- session.SaveChanges();
+ await session.SaveChangesAsync();
var epoch = session.MetadataFor(user3).DeletedAt;
session.Delete(user4);
- session.SaveChanges();
+ await session.SaveChangesAsync();
session.Query().Where(x => x.DeletedSince(epoch.Value)).Select(x => x.UserName)
.ToList().ShouldHaveTheSameElementsAs("jack");
diff --git a/docs/documents/hierarchies.md b/docs/documents/hierarchies.md
index a596d9caf6..8b18a1f07b 100644
--- a/docs/documents/hierarchies.md
+++ b/docs/documents/hierarchies.md
@@ -33,7 +33,7 @@ using (var session = store.QuerySession())
session.Query().ToList();
}
```
-snippet source | anchor
+snippet source | anchor
With the configuration above, you can now query by `User` and get `AdminUser` and `SuperUser` documents as part of the results,
@@ -153,27 +153,27 @@ Now you can query the "complex" hierarchy in the following ways:
```cs
[Fact]
-public void get_all_subclasses_of_a_subclass()
+public async Task get_all_subclasses_of_a_subclass()
{
var smurf = new Smurf {Ability = "Follow the herd"};
var papa = new PapaSmurf {Ability = "Lead"};
var brainy = new BrainySmurf {Ability = "Invent"};
theSession.Store(smurf, papa, brainy);
- theSession.SaveChanges();
+ await theSession.SaveChangesAsync();
theSession.Query().Count().ShouldBe(3);
}
[Fact]
-public void get_all_subclasses_of_a_subclass2()
+public async Task get_all_subclasses_of_a_subclass2()
{
var smurf = new Smurf {Ability = "Follow the herd"};
var papa = new PapaSmurf {Ability = "Lead"};
var brainy = new BrainySmurf {Ability = "Invent"};
theSession.Store(smurf, papa, brainy);
- theSession.SaveChanges();
+ await theSession.SaveChangesAsync();
theSession.Logger = new TestOutputMartenLogger(_output);
@@ -181,20 +181,20 @@ public void get_all_subclasses_of_a_subclass2()
}
[Fact]
-public void get_all_subclasses_of_a_subclass_with_where()
+public async Task get_all_subclasses_of_a_subclass_with_where()
{
var smurf = new Smurf {Ability = "Follow the herd"};
var papa = new PapaSmurf {Ability = "Lead"};
var brainy = new BrainySmurf {Ability = "Invent"};
theSession.Store(smurf, papa, brainy);
- theSession.SaveChanges();
+ await theSession.SaveChangesAsync();
theSession.Query().Count(s => s.Ability == "Invent").ShouldBe(1);
}
[Fact]
-public void get_all_subclasses_of_a_subclass_with_where_with_camel_casing()
+public async Task get_all_subclasses_of_a_subclass_with_where_with_camel_casing()
{
StoreOptions(_ =>
{
@@ -221,13 +221,13 @@ public void get_all_subclasses_of_a_subclass_with_where_with_camel_casing()
var brainy = new BrainySmurf {Ability = "Invent"};
theSession.Store(smurf, papa, brainy);
- theSession.SaveChanges();
+ await theSession.SaveChangesAsync();
theSession.Query().Count(s => s.Ability == "Invent").ShouldBe(1);
}
[Fact]
-public void get_all_subclasses_of_an_interface()
+public async Task get_all_subclasses_of_an_interface()
{
var smurf = new Smurf {Ability = "Follow the herd"};
var papa = new PapaSmurf {Ability = "Lead"};
@@ -235,7 +235,7 @@ public void get_all_subclasses_of_an_interface()
var brainy = new BrainySmurf {Ability = "Invent"};
theSession.Store(smurf, papa, brainy, papy);
- theSession.SaveChanges();
+ await theSession.SaveChangesAsync();
theSession.Query().Count().ShouldBe(3);
}
diff --git a/docs/documents/identity.md b/docs/documents/identity.md
index 21c961bb47..d39eb4f2c0 100644
--- a/docs/documents/identity.md
+++ b/docs/documents/identity.md
@@ -63,7 +63,7 @@ public class NonStandardDoc
public string Name;
}
```
-snippet source | anchor
+snippet source | anchor
The identity property or field can also be configured through `StoreOptions` by using the `Schema` to obtain a document mapping:
@@ -73,7 +73,7 @@ The identity property or field can also be configured through `StoreOptions` by
```cs
storeOptions.Schema.For().Identity(x => x.Name);
```
-snippet source | anchor
+snippet source | anchor
## Guid Identifiers
@@ -96,7 +96,7 @@ options.Policies.ForAllDocuments(m =>
}
});
```
-snippet source | anchor
+snippet source | anchor
It is also possible use the SequentialGuid id generation algorithm for a specific document type.
@@ -106,7 +106,7 @@ It is also possible use the SequentialGuid id generation algorithm for a specifi
```cs
options.Schema.For().IdStrategy(new CombGuidIdGeneration());
```
-snippet source | anchor
+snippet source | anchor
## Sequential Identifiers with Hilo
@@ -225,7 +225,7 @@ public class DocumentWithStringId
public string Id { get; set; }
}
```
-snippet source | anchor
+snippet source | anchor
You can use the "identity key" option for identity generation that would create string values of the pattern `[type alias]/[sequence]` where the type alias is typically the document class name in all lower case and the sequence is a _HiLo_ sequence number.
@@ -243,7 +243,7 @@ var store = DocumentStore.For(opts =>
.DocumentAlias("doc");
});
```
-snippet source | anchor
+snippet source | anchor
## Custom Identity Strategies
@@ -267,7 +267,7 @@ public class CustomIdGeneration : IIdGeneration
}
```
-snippet source | anchor
+snippet source | anchor
The `Build()` method should return the actual `IdGenerator` for the document type, where `T` is the type of the Id field.
@@ -287,7 +287,7 @@ options.Policies.ForAllDocuments(m =>
}
});
```
-snippet source | anchor
+snippet source | anchor
It is also possible define a custom id generation algorithm for a specific document type.
@@ -297,7 +297,7 @@ It is also possible define a custom id generation algorithm for a specific docum
```cs
options.Schema.For().IdStrategy(new CustomIdGeneration());
```
-snippet source | anchor
+snippet source | anchor
## Strong Typed Identifiers
@@ -459,6 +459,26 @@ public async Task load_many()
}
```
snippet source | anchor
+
+```cs
+[Fact]
+public async Task load_many()
+{
+ var issue1 = new Issue3{Name = Guid.NewGuid().ToString()};
+ var Issue3 = new Issue3{Name = Guid.NewGuid().ToString()};
+ var issue3 = new Issue3{Name = Guid.NewGuid().ToString()};
+ theSession.Store(issue1, Issue3, issue3);
+
+ await theSession.SaveChangesAsync();
+
+ var results = await theSession.Query()
+ .Where(x => x.Id.IsOneOf(issue1.Id, Issue3.Id, issue3.Id))
+ .ToListAsync();
+
+ results.Count.ShouldBe(3);
+}
+```
+snippet source | anchor
::: warning
@@ -532,7 +552,7 @@ public class LimitedDoc
public LowerLimit Lower { get; set; }
}
```
-snippet source | anchor
+snippet source | anchor
```cs
[ValueObject]
@@ -570,7 +590,7 @@ And the `UpperLimit` and `LowerLimit` value types can be registered with Marten
opts.RegisterValueType(typeof(UpperLimit));
opts.RegisterValueType(typeof(LowerLimit));
```
-snippet source | anchor
+snippet source | anchor
```cs
// opts is a StoreOptions just like you'd have in
@@ -608,7 +628,7 @@ public async Task store_several_and_order_by()
ordered.ShouldHaveTheSameElementsAs(doc1.Id, doc4.Id, doc3.Id, doc2.Id);
}
```
-snippet source | anchor
+snippet source | anchor
```cs
[Fact]
diff --git a/docs/documents/multi-tenancy.md b/docs/documents/multi-tenancy.md
index 9dc8ab7711..d6befb8edb 100644
--- a/docs/documents/multi-tenancy.md
+++ b/docs/documents/multi-tenancy.md
@@ -18,7 +18,7 @@ using (var session = theStore.LightweightSession("tenant1"))
{
session.Store(new User { Id = "u1", UserName = "Bill", Roles = new[] { "admin" } });
session.Store(new User { Id = "u2", UserName = "Lindsey", Roles = new string[0] });
- session.SaveChanges();
+ await session.SaveChangesAsync();
}
```
snippet source | anchor
@@ -29,7 +29,7 @@ using (var session = theStore.LightweightSession("tenant1"))
{
session.Store(new User { Id = "u1", UserName = "Bill", Roles = new[] { "admin" } });
session.Store(new User { Id = "u2", UserName = "Lindsey", Roles = new string[0] });
- session.SaveChanges();
+ await session.SaveChangesAsync();
}
```
snippet source | anchor
@@ -40,10 +40,10 @@ using (var session = store.LightweightSession("tenant1"))
{
session.Store(new User { UserName = "Bill" });
session.Store(new User { UserName = "Lindsey" });
- session.SaveChanges();
+ await session.SaveChangesAsync();
}
```
-snippet source | anchor
+snippet source | anchor
As with storing, the load operations respect tenancy of the session.
@@ -61,7 +61,7 @@ using (var query = store.QuerySession("tenant1"))
.ShouldHaveTheSameElementsAs("Bill", "Lindsey");
}
```
-snippet source | anchor
+snippet source | anchor
Lastly, unlike reading operations, `IDocumentSession.Store` offers an overload to explicitly pass in a tenant identifier, bypassing any tenancy associated with the session. Similar overload for tenancy exists for `IDocumentStore.BulkInsert`.
@@ -140,7 +140,7 @@ using (var session = store.QuerySession())
session.Query().Count(x => x.TenantIsOneOf("Red")).ShouldBe(11);
}
```
-snippet source | anchor
+snippet source | anchor
```cs
using var store = DocumentStore.For(opts =>
@@ -208,7 +208,7 @@ using (var session = store.QuerySession())
session.Query().Count(x => x.TenantIsOneOf("Red")).ShouldBe(11);
}
```
-snippet source | anchor
+snippet source | anchor
In some cases, You may want to disable using the default tenant for storing documents, set `StoreOptions.DefaultTenantUsageEnabled` to `false`. With this option disabled, Tenant (non-default tenant) should be passed via method argument or `SessionOptions` when creating a session using document store. Marten will throw an exception `DefaultTenantUsageDisabledException` if a session is created using default tenant.
@@ -230,7 +230,7 @@ using (var query = store.QuerySession("tenant1"))
.ShouldHaveTheSameElementsAs("Bill", "Lindsey");
}
```
-snippet source | anchor
+snippet source | anchor
Marten will automatically filter the LINQ query for the current tenant _if the current document type is tenanted_. However, if
@@ -244,14 +244,14 @@ filter:
var actual = await query.Query().Where(x => x.TenantIsOneOf("Green", "Red") && x.Flag)
.OrderBy(x => x.Id).Select(x => x.Id).ToListAsync();
```
-snippet source | anchor
+snippet source | anchor
```cs
// query data for a selected list of tenants
var actual = await query.Query().Where(x => x.TenantIsOneOf("Green", "Red") && x.Flag)
.OrderBy(x => x.Id).Select(x => x.Id).ToListAsync();
```
-snippet source | anchor
+snippet source | anchor
Or the `AnyTenant()` filter:
@@ -263,14 +263,14 @@ Or the `AnyTenant()` filter:
var actual = query.Query().Where(x => x.AnyTenant() && x.Flag)
.OrderBy(x => x.Id).Select(x => x.Id).ToArray();
```
-snippet source | anchor
+snippet source | anchor
```cs
// query data across all tenants
var actual = query.Query().Where(x => x.AnyTenant() && x.Flag)
.OrderBy(x => x.Id).Select(x => x.Id).ToArray();
```
-snippet source | anchor
+snippet source | anchor
## Configuring Tenancy
@@ -330,7 +330,7 @@ storeOptions.Policies.AllDocumentsAreMultiTenantedWithPartitioning(x =>
x.ByExternallyManagedRangePartitions();
});
```
-snippet source | anchor
+snippet source | anchor
To enable partitioning for a specific document type, use this option:
@@ -348,7 +348,7 @@ var store = DocumentStore.For(opts =>
});
});
```
-snippet source | anchor
+snippet source | anchor
And lastly, if you need to use a mix of tenanted and global document types, but still want to use a consistent
@@ -376,7 +376,7 @@ var store = DocumentStore.For(opts =>
});
});
```
-snippet source | anchor
+snippet source | anchor
### Tenancy Through Policies
@@ -390,7 +390,7 @@ storeOptions.Policies.AllDocumentsAreMultiTenanted();
// Shorthand for
// storeOptions.Policies.ForAllDocuments(_ => _.TenancyStyle = TenancyStyle.Conjoined);
```
-snippet source | anchor
+snippet source | anchor
### Tenancy At Document Level & Policy Overrides
diff --git a/docs/documents/partial-updates-patching.md b/docs/documents/partial-updates-patching.md
index da84173f51..9c8509dffa 100644
--- a/docs/documents/partial-updates-patching.md
+++ b/docs/documents/partial-updates-patching.md
@@ -37,7 +37,7 @@ To apply a patch to all documents matching a given criteria, use the following s
// Change every Target document where the Color is Blue
theSession.Patch(x => x.Color == Colors.Blue).Set(x => x.Number, 2);
```
-snippet source | anchor
+snippet source | anchor
## Set a single Property/Field
@@ -49,16 +49,16 @@ shown below:
```cs
[Fact]
-public void set_an_immediate_property_by_id()
+public async Task set_an_immediate_property_by_id()
{
var target = Target.Random(true);
target.Number = 5;
theSession.Store(target);
- theSession.SaveChanges();
+ await theSession.SaveChangesAsync();
theSession.Patch(target.Id).Set(x => x.Number, 10);
- theSession.SaveChanges();
+ await theSession.SaveChangesAsync();
using (var query = theStore.QuerySession())
{
@@ -66,7 +66,7 @@ public void set_an_immediate_property_by_id()
}
}
```
-snippet source | anchor
+snippet source | anchor
### Set a new Property/Field
@@ -79,14 +79,14 @@ To initialize a new property on existing documents:
const string where = "(data ->> 'UpdatedAt') is null";
theSession.Query(where).Count.ShouldBe(3);
theSession.Patch(new WhereFragment(where)).Set("UpdatedAt", DateTime.UtcNow);
-theSession.SaveChanges();
+await theSession.SaveChangesAsync();
using (var query = theStore.QuerySession())
{
query.Query(where).Count.ShouldBe(0);
}
```
-snippet source | anchor
+snippet source | anchor
## Duplicate an existing Property/Field
@@ -99,10 +99,10 @@ To copy an existing value to a new location:
var target = Target.Random();
target.AnotherString = null;
theSession.Store(target);
-theSession.SaveChanges();
+await theSession.SaveChangesAsync();
theSession.Patch(target.Id).Duplicate(t => t.String, t => t.AnotherString);
-theSession.SaveChanges();
+await theSession.SaveChangesAsync();
using (var query = theStore.QuerySession())
{
@@ -110,7 +110,7 @@ using (var query = theStore.QuerySession())
result.AnotherString.ShouldBe(target.String);
}
```
-snippet source | anchor
+snippet source | anchor
The same value can be copied to multiple new locations:
@@ -123,7 +123,7 @@ theSession.Patch(target.Id).Duplicate(t => t.String,
t => t.Inner.String,
t => t.Inner.AnotherString);
```
-snippet source | anchor
+snippet source | anchor
The new locations need not exist in the persisted document, null or absent parents will be initialized
@@ -136,16 +136,16 @@ To increment a persisted value in the persisted document, use this operation:
```cs
[Fact]
-public void increment_for_int()
+public async Task increment_for_int()
{
var target = Target.Random();
target.Number = 6;
theSession.Store(target);
- theSession.SaveChanges();
+ await theSession.SaveChangesAsync();
theSession.Patch(target.Id).Increment(x => x.Number);
- theSession.SaveChanges();
+ await theSession.SaveChangesAsync();
using (var query = theStore.QuerySession())
{
@@ -153,7 +153,7 @@ public void increment_for_int()
}
}
```
-snippet source | anchor
+snippet source | anchor
By default, the `Patch.Increment()` operation will add 1 to the existing value. You can optionally override the increment:
@@ -162,16 +162,16 @@ By default, the `Patch.Increment()` operation will add 1 to the existing value.
```cs
[Fact]
-public void increment_for_int_with_explicit_increment()
+public async Task increment_for_int_with_explicit_increment()
{
var target = Target.Random();
target.Number = 6;
theSession.Store(target);
- theSession.SaveChanges();
+ await theSession.SaveChangesAsync();
theSession.Patch(target.Id).Increment(x => x.Number, 3);
- theSession.SaveChanges();
+ await theSession.SaveChangesAsync();
using (var query = theStore.QuerySession())
{
@@ -179,7 +179,7 @@ public void increment_for_int_with_explicit_increment()
}
}
```
-snippet source | anchor
+snippet source | anchor
## Append Element to a Child Collection
@@ -194,7 +194,7 @@ The `Patch.Append()` operation adds a new item to the end of a child collection:
```cs
[Fact]
-public void append_complex_element()
+public async Task append_complex_element()
{
var target = Target.Random(true);
var initialCount = target.Children.Length;
@@ -202,10 +202,10 @@ public void append_complex_element()
var child = Target.Random();
theSession.Store(target);
- theSession.SaveChanges();
+ await theSession.SaveChangesAsync();
theSession.Patch(target.Id).Append(x => x.Children, child);
- theSession.SaveChanges();
+ await theSession.SaveChangesAsync();
using (var query = theStore.QuerySession())
{
@@ -216,7 +216,7 @@ public void append_complex_element()
}
}
```
-snippet source | anchor
+snippet source | anchor
The `Patch.AppendIfNotExists()` operation will treat the child collection as a set rather than a list and only append the element if it does not already exist within the collection
@@ -233,7 +233,7 @@ being 0 so that a new item would be inserted at the beginning of the child colle
```cs
[Fact]
-public void insert_first_complex_element()
+public async Task insert_first_complex_element()
{
var target = Target.Random(true);
var initialCount = target.Children.Length;
@@ -241,10 +241,10 @@ public void insert_first_complex_element()
var child = Target.Random();
theSession.Store(target);
- theSession.SaveChanges();
+ await theSession.SaveChangesAsync();
theSession.Patch(target.Id).Insert(x => x.Children, child);
- theSession.SaveChanges();
+ await theSession.SaveChangesAsync();
using (var query = theStore.QuerySession())
{
@@ -255,7 +255,7 @@ public void insert_first_complex_element()
}
}
```
-snippet source | anchor
+snippet source | anchor
The `Patch.InsertIfNotExists()` operation will only insert the element if the element at the designated index does not already exist.
@@ -268,7 +268,7 @@ The `Patch.Remove()` operation removes the given item from a child collection:
```cs
[Fact]
-public void remove_primitive_element()
+public async Task remove_primitive_element()
{
var random = new Random();
var target = Target.Random();
@@ -280,10 +280,10 @@ public void remove_primitive_element()
var child = target.NumberArray[random.Next(0, initialCount)];
theSession.Store(target);
- theSession.SaveChanges();
+ await theSession.SaveChangesAsync();
theSession.Patch(target.Id).Remove(x => x.NumberArray, child);
- theSession.SaveChanges();
+ await theSession.SaveChangesAsync();
using (var query = theStore.QuerySession())
{
@@ -294,7 +294,7 @@ public void remove_primitive_element()
}
}
```
-snippet source | anchor
+snippet source | anchor
Removing complex items can also be accomplished, matching is performed on all fields:
@@ -303,7 +303,7 @@ Removing complex items can also be accomplished, matching is performed on all fi
```cs
[Fact]
-public void remove_complex_element()
+public async Task remove_complex_element()
{
var target = Target.Random(true);
var initialCount = target.Children.Length;
@@ -312,10 +312,10 @@ public void remove_complex_element()
var child = target.Children[random.Next(0, initialCount)];
theSession.Store(target);
- theSession.SaveChanges();
+ await theSession.SaveChangesAsync();
theSession.Patch(target.Id).Remove(x => x.Children, child);
- theSession.SaveChanges();
+ await theSession.SaveChangesAsync();
using (var query = theStore.QuerySession())
{
@@ -326,7 +326,7 @@ public void remove_complex_element()
}
}
```
-snippet source | anchor
+snippet source | anchor
To remove reoccurring values from a collection specify `RemoveAction.RemoveAll`:
@@ -335,7 +335,7 @@ To remove reoccurring values from a collection specify `RemoveAction.RemoveAll`:
```cs
[Fact]
-public void remove_repeated_primitive_elements()
+public async Task remove_repeated_primitive_elements()
{
var random = new Random();
var target = Target.Random();
@@ -354,10 +354,10 @@ public void remove_repeated_primitive_elements()
}
theSession.Store(target);
- theSession.SaveChanges();
+ await theSession.SaveChangesAsync();
theSession.Patch(target.Id).Remove(x => x.NumberArray, child, RemoveAction.RemoveAll);
- theSession.SaveChanges();
+ await theSession.SaveChangesAsync();
using (var query = theStore.QuerySession())
{
@@ -368,7 +368,7 @@ public void remove_repeated_primitive_elements()
}
}
```
-snippet source | anchor
+snippet source | anchor
## Rename a Property/Field
@@ -381,17 +381,17 @@ old name to the new name.
```cs
[Fact]
-public void rename_deep_prop()
+public async Task rename_deep_prop()
{
var target = Target.Random(true);
target.Inner.String = "Foo";
target.Inner.AnotherString = "Bar";
theSession.Store(target);
- theSession.SaveChanges();
+ await theSession.SaveChangesAsync();
theSession.Patch(target.Id).Rename("String", x => x.Inner.AnotherString);
- theSession.SaveChanges();
+ await theSession.SaveChangesAsync();
using (var query = theStore.QuerySession())
{
@@ -401,7 +401,7 @@ public void rename_deep_prop()
}
}
```
-snippet source | anchor
+snippet source | anchor
Renaming can be used on nested values.
@@ -418,7 +418,7 @@ To delete a redundant property no longer available on the class use the string o
```cs
theSession.Patch(target.Id).Delete("String");
```
-snippet source | anchor
+snippet source | anchor
To delete a redundant property nested on a child class specify a location lambda:
@@ -428,7 +428,7 @@ To delete a redundant property nested on a child class specify a location lambda
```cs
theSession.Patch(target.Id).Delete("String", t => t.Inner);
```
-snippet source | anchor
+snippet source | anchor
A current property may be erased simply with a lambda:
@@ -438,7 +438,7 @@ A current property may be erased simply with a lambda:
```cs
theSession.Patch(target.Id).Delete(t => t.Inner);
```
-snippet source | anchor
+snippet source | anchor
Many documents may be patched using a where expressions:
@@ -449,14 +449,14 @@ Many documents may be patched using a where expressions:
const string where = "(data ->> 'String') is not null";
theSession.Query(where).Count.ShouldBe(15);
theSession.Patch(new WhereFragment(where)).Delete("String");
-theSession.SaveChanges();
+await theSession.SaveChangesAsync();
using (var query = theStore.QuerySession())
{
query.Query(where).Count(t => t.String != null).ShouldBe(0);
}
```
-snippet source | anchor
+snippet source | anchor
## Multi-field patching/chaining patch operations
@@ -465,18 +465,18 @@ using (var query = theStore.QuerySession())
```cs
[Fact]
-public void able_to_chain_patch_operations()
+public async Task able_to_chain_patch_operations()
{
var target = Target.Random(true);
target.Number = 5;
theSession.Store(target);
- theSession.SaveChanges();
+ await theSession.SaveChangesAsync();
theSession.Patch(target.Id)
.Set(x => x.Number, 10)
.Increment(x => x.Number, 10);
- theSession.SaveChanges();
+ await theSession.SaveChangesAsync();
using (var query = theStore.QuerySession())
{
@@ -484,5 +484,5 @@ public void able_to_chain_patch_operations()
}
}
```
-snippet source | anchor
+snippet source | anchor
diff --git a/docs/documents/plv8.md b/docs/documents/plv8.md
index 58decd1064..4a7f45e857 100644
--- a/docs/documents/plv8.md
+++ b/docs/documents/plv8.md
@@ -89,16 +89,16 @@ shown below:
```cs
[Fact]
-public void set_an_immediate_property_by_id()
+public async Task set_an_immediate_property_by_id()
{
var target = Target.Random(true);
target.Number = 5;
theSession.Store(target);
- theSession.SaveChanges();
+ await theSession.SaveChangesAsync();
theSession.Patch(target.Id).Set(x => x.Number, 10);
- theSession.SaveChanges();
+ await theSession.SaveChangesAsync();
using (var query = theStore.QuerySession())
{
@@ -119,7 +119,7 @@ To initialize a new property on existing documents:
const string where = "(data ->> 'UpdatedAt') is null";
theSession.Query(where).Count.ShouldBe(3);
theSession.Patch(new WhereFragment(where)).Set("UpdatedAt", DateTime.UtcNow);
-theSession.SaveChanges();
+await theSession.SaveChangesAsync();
using (var query = theStore.QuerySession())
{
@@ -139,10 +139,10 @@ To copy an existing value to a new location:
var target = Target.Random();
target.AnotherString = null;
theSession.Store(target);
-theSession.SaveChanges();
+await theSession.SaveChangesAsync();
theSession.Patch(target.Id).Duplicate(t => t.String, t => t.AnotherString);
-theSession.SaveChanges();
+await theSession.SaveChangesAsync();
using (var query = theStore.QuerySession())
{
@@ -176,16 +176,16 @@ To increment a persisted value in the persisted document, use this operation:
```cs
[Fact]
-public void increment_for_int()
+public async Task increment_for_int()
{
var target = Target.Random();
target.Number = 6;
theSession.Store(target);
- theSession.SaveChanges();
+ await theSession.SaveChangesAsync();
theSession.Patch(target.Id).Increment(x => x.Number);
- theSession.SaveChanges();
+ await theSession.SaveChangesAsync();
using (var query = theStore.QuerySession())
{
@@ -202,16 +202,16 @@ By default, the `Patch.Increment()` operation will add 1 to the existing value.
```cs
[Fact]
-public void increment_for_int_with_explicit_increment()
+public async Task increment_for_int_with_explicit_increment()
{
var target = Target.Random();
target.Number = 6;
theSession.Store(target);
- theSession.SaveChanges();
+ await theSession.SaveChangesAsync();
theSession.Patch(target.Id).Increment(x => x.Number, 3);
- theSession.SaveChanges();
+ await theSession.SaveChangesAsync();
using (var query = theStore.QuerySession())
{
@@ -234,7 +234,7 @@ The `Patch.Append()` operation adds a new item to the end of a child collection:
```cs
[Fact]
-public void append_complex_element()
+public async Task append_complex_element()
{
var target = Target.Random(true);
var initialCount = target.Children.Length;
@@ -242,10 +242,10 @@ public void append_complex_element()
var child = Target.Random();
theSession.Store(target);
- theSession.SaveChanges();
+ await theSession.SaveChangesAsync();
theSession.Patch(target.Id).Append(x => x.Children, child);
- theSession.SaveChanges();
+ await theSession.SaveChangesAsync();
using (var query = theStore.QuerySession())
{
@@ -273,7 +273,7 @@ being 0 so that a new item would be inserted at the beginning of the child colle
```cs
[Fact]
-public void insert_first_complex_element()
+public async Task insert_first_complex_element()
{
var target = Target.Random(true);
var initialCount = target.Children.Length;
@@ -281,10 +281,10 @@ public void insert_first_complex_element()
var child = Target.Random();
theSession.Store(target);
- theSession.SaveChanges();
+ await theSession.SaveChangesAsync();
theSession.Patch(target.Id).Insert(x => x.Children, child);
- theSession.SaveChanges();
+ await theSession.SaveChangesAsync();
using (var query = theStore.QuerySession())
{
@@ -308,7 +308,7 @@ The `Patch.Remove()` operation removes the given item from a child collection:
```cs
[Fact]
-public void remove_primitive_element()
+public async Task remove_primitive_element()
{
var target = Target.Random();
target.NumberArray = new[] { Random.Shared.Next(0, 10), Random.Shared.Next(0, 10), Random.Shared.Next(0, 10) };
@@ -319,10 +319,10 @@ public void remove_primitive_element()
var child = target.NumberArray[Random.Shared.Next(0, initialCount)];
theSession.Store(target);
- theSession.SaveChanges();
+ await theSession.SaveChangesAsync();
theSession.Patch(target.Id).Remove(x => x.NumberArray, child);
- theSession.SaveChanges();
+ await theSession.SaveChangesAsync();
using (var query = theStore.QuerySession())
{
@@ -342,7 +342,7 @@ Removing complex items can also be accomplished, matching is performed on all fi
```cs
[Fact]
-public void remove_complex_element()
+public async Task remove_complex_element()
{
var target = Target.Random(true);
var initialCount = target.Children.Length;
@@ -351,10 +351,10 @@ public void remove_complex_element()
var child = target.Children[random.Next(0, initialCount)];
theSession.Store(target);
- theSession.SaveChanges();
+ await theSession.SaveChangesAsync();
theSession.Patch(target.Id).Remove(x => x.Children, child);
- theSession.SaveChanges();
+ await theSession.SaveChangesAsync();
using (var query = theStore.QuerySession())
{
@@ -374,7 +374,7 @@ To remove reoccurring values from a collection specify `RemoveAction.RemoveAll`:
```cs
[Fact]
-public void remove_repeated_primitive_elements()
+public async Task remove_repeated_primitive_elements()
{
var random = new Random();
var target = Target.Random();
@@ -393,10 +393,10 @@ public void remove_repeated_primitive_elements()
}
theSession.Store(target);
- theSession.SaveChanges();
+ await theSession.SaveChangesAsync();
theSession.Patch(target.Id).Remove(x => x.NumberArray, child, RemoveAction.RemoveAll);
- theSession.SaveChanges();
+ await theSession.SaveChangesAsync();
using (var query = theStore.QuerySession())
{
@@ -420,17 +420,17 @@ old name to the new name.
```cs
[Fact]
-public void rename_deep_prop()
+public async Task rename_deep_prop()
{
var target = Target.Random(true);
target.Inner.String = "Foo";
target.Inner.AnotherString = "Bar";
theSession.Store(target);
- theSession.SaveChanges();
+ await theSession.SaveChangesAsync();
theSession.Patch(target.Id).Rename("String", x => x.Inner.AnotherString);
- theSession.SaveChanges();
+ await theSession.SaveChangesAsync();
using (var query = theStore.QuerySession())
{
@@ -488,7 +488,7 @@ Many documents may be patched using a where expressions:
const string where = "(data ->> 'String') is not null";
theSession.Query(where).Count.ShouldBe(15);
theSession.Patch(new WhereFragment(where)).Delete("String");
-theSession.SaveChanges();
+await theSession.SaveChangesAsync();
using (var query = theStore.QuerySession())
{
@@ -589,13 +589,13 @@ of Linq queries. If you only care about the transformed JSON, you use this synta
```cs
[Fact]
-public void can_select_a_string_field_in_compiled_query()
+public async Task can_select_a_string_field_in_compiled_query()
{
var user = new User { FirstName = "Eric", LastName = "Berry" };
using var session = theStore.LightweightSession();
session.Store(user);
- session.SaveChanges();
+ await session.SaveChangesAsync();
var name = session.Query().Select(x => x.FirstName)
.Single();
diff --git a/docs/documents/querying/batched-queries.md b/docs/documents/querying/batched-queries.md
index 4fe7002768..b759ff5c65 100644
--- a/docs/documents/querying/batched-queries.md
+++ b/docs/documents/querying/batched-queries.md
@@ -52,7 +52,7 @@ await batch.Execute();
var internalUser = await firstInternal;
Debug.WriteLine($"The first internal user is {internalUser.FirstName} {internalUser.LastName}");
```
-snippet source | anchor
+snippet source | anchor
## Combining Compiled Queries and Batch Queries
@@ -74,7 +74,7 @@ public class FindByFirstName: ICompiledQuery
}
}
```
-snippet source | anchor
+snippet source | anchor
To use that compiled query class in a batch query, you simply use the `IBatchedQuery.Query(ICompiledQuery)` syntax shown below:
@@ -92,7 +92,7 @@ await batch.Execute();
(await justin).Id.ShouldBe(user1.Id);
(await tamba).Id.ShouldBe(user2.Id);
```
-snippet source | anchor
+snippet source | anchor
## Running Synchronously
@@ -112,7 +112,7 @@ batch.ExecuteSynchronously();
justin.Result.Id.ShouldBe(user1.Id);
tamba.Result.Id.ShouldBe(user2.Id);
```
-snippet source | anchor
+snippet source | anchor
The mechanics of running synchronously are identical except for calling `IBatchedQuery.ExecuteSynchronously()`.
diff --git a/docs/documents/querying/compiled-queries.md b/docs/documents/querying/compiled-queries.md
index 4327db05f0..1dcc7ac00c 100644
--- a/docs/documents/querying/compiled-queries.md
+++ b/docs/documents/querying/compiled-queries.md
@@ -52,7 +52,7 @@ public class FindByFirstName: ICompiledQuery
}
}
```
-snippet source | anchor
+snippet source | anchor
::: tip
@@ -73,7 +73,7 @@ var justin = session.Query(new FindByFirstName { FirstName = "Justin" });
var tamba = await session.QueryAsync(new FindByFirstName { FirstName = "Tamba" });
```
-snippet source | anchor
+snippet source | anchor
Or to use it as part of a batched query, this syntax:
@@ -91,7 +91,7 @@ await batch.Execute();
(await justin).Id.ShouldBe(user1.Id);
(await tamba).Id.ShouldBe(user2.Id);
```
-snippet source | anchor
+snippet source | anchor
## How Does It Work?
@@ -242,14 +242,14 @@ on the query:
```cs
[Fact]
-public void simple_compiled_include_for_a_single_document()
+public async Task simple_compiled_include_for_a_single_document()
{
var user = new User();
var issue = new Issue { AssigneeId = user.Id, Title = "Garage Door is busted" };
using var session = theStore.IdentitySession();
session.Store