diff --git a/docs/asciidoc/Aggregations/Bucket/Children/ChildrenAggregationMapping.doc.asciidoc b/docs/asciidoc/Aggregations/Bucket/Children/ChildrenAggregationMapping.doc.asciidoc deleted file mode 100644 index e03f8eb564a..00000000000 --- a/docs/asciidoc/Aggregations/Bucket/Children/ChildrenAggregationMapping.doc.asciidoc +++ /dev/null @@ -1,16 +0,0 @@ -To use the child aggregation you have to make sure -a `_parent` mapping is in place, here we create the project -index with two mapped types, `project` and `commitactivity` and -we add a `_parent` mapping from `commitactivity` to `parent` - -[source, csharp] ----- -var createProjectIndex = TestClient.GetClient().CreateIndex(typeof(Project), c => c - .Mappings(map=>map - .Map(m=>m.AutoMap()) - .Map(m=>m - .Parent() - ) - ) -); ----- diff --git a/docs/asciidoc/Aggregations/WritingAggregations.doc.asciidoc b/docs/asciidoc/Aggregations/WritingAggregations.doc.asciidoc deleted file mode 100644 index 7f2a8739c7d..00000000000 --- a/docs/asciidoc/Aggregations/WritingAggregations.doc.asciidoc +++ /dev/null @@ -1,78 +0,0 @@ -Aggregations are arguably one of the most powerful features of Elasticsearch. -NEST allows you to write your aggregations using a strict fluent dsl, a verbatim object initializer -syntax that maps verbatim to the elasticsearch API -a more terse object initializer aggregation DSL. - -Three different ways, yikes thats a lot to take in! Lets go over them one by one and explain when you might -want to use which one. - -The fluent lambda syntax is the most terse way to write aggregations. -It benefits from types that are carried over to sub aggregations - -[source, csharp] ----- -s => s -.Aggregations(aggs => aggs - .Children("name_of_child_agg", child => child - .Aggregations(childAggs => childAggs - .Average("average_per_child", avg => avg.Field(p => p.ConfidenceFactor)) - .Max("max_per_child", avg => avg.Field(p => p.ConfidenceFactor)) - ) - ) -) ----- -The object initializer syntax (OIS) is a one-to-one mapping with how aggregations -have to be represented in the Elasticsearch API. While it has the benefit of being a one-to-one -mapping, being dictionary based in C# means it can grow exponentially in complexity rather quickly. - -[source, csharp] ----- -new SearchRequest -{ - Aggregations = new ChildrenAggregation("name_of_child_agg", typeof(CommitActivity)) - { - Aggregations = - new AverageAggregation("average_per_child", "confidenceFactor") - && new MaxAggregation("max_per_child", "confidenceFactor") - } -} ----- -For this reason the OIS syntax can be shortened dramatically by using `*Agg` related family, -These allow you to forego introducing intermediary Dictionaries to represent the aggregation DSL. -It also allows you to combine multiple aggregations using bitwise AND (` -`) operator. - -Compare the following example with the previous vanilla OIS syntax - -[source, csharp] ----- -new SearchRequest -{ - Aggregations = new ChildrenAggregation("name_of_child_agg", typeof(CommitActivity)) - { - Aggregations = - new AverageAggregation("average_per_child", Field(p => p.ConfidenceFactor)) - && new MaxAggregation("max_per_child", Field(p => p.ConfidenceFactor)) - } -} ----- -An advanced scenario may involve an existing collection of aggregation functions that should be set as aggregations -on the request. Using LINQ's `.Aggregate()` method, each function can be applied to the aggregation descriptor -(`childAggs` below) in turn, returning the descriptor after each function application. - -[source, csharp] ----- -var aggregations = new List, IAggregationContainer>> -{ - a => a.Average("average_per_child", avg => avg.Field(p => p.ConfidenceFactor)), - a => a.Max("max_per_child", avg => avg.Field(p => p.ConfidenceFactor)) -}; -return s => s - .Aggregations(aggs => aggs - .Children("name_of_child_agg", child => child - .Aggregations(childAggs => - aggregations.Aggregate(childAggs, (acc, agg) => { agg(acc); return acc; }) - ) - ) - ); ----- diff --git a/docs/asciidoc/ClientConcepts/ConnectionPooling/BuildingBlocks/Transports.Doc.asciidoc b/docs/asciidoc/ClientConcepts/ConnectionPooling/BuildingBlocks/Transports.Doc.asciidoc deleted file mode 100644 index 359ea30aefc..00000000000 --- a/docs/asciidoc/ClientConcepts/ConnectionPooling/BuildingBlocks/Transports.Doc.asciidoc +++ /dev/null @@ -1,39 +0,0 @@ -= Transports - -The `ITransport` interface can be seen as the motor block of the client. It's interface is deceitfully simple. -It's ultimately responsible from translating a client call to a response. If for some reason you do not agree with the way we wrote -the internals of the client, by implementing a custom `ITransport`, you can circumvent all of it and introduce your own. - - - -Transport is generically typed to a type that implements IConnectionConfigurationValues -This is the minimum ITransport needs to report back for the client to function. -e.g in the low level client, transport is instantiated like this: - -[source, csharp] ----- -var lowLevelTransport = new Transport(new ConnectionConfiguration()); ----- -In the high level client like this: - -[source, csharp] ----- -var highlevelTransport = new Transport(new ConnectionSettings()); ----- -[source, csharp] ----- -var connectionPool = new SingleNodeConnectionPool(new Uri("http://localhost:9200")); -var inMemoryTransport = new Transport(new ConnectionSettings(connectionPool, new InMemoryConnection())); ----- -The only two methods on `ITransport` are `Request()` and `RequestAsync()`, the default `ITransport` implementation is responsible for introducing -many of the building blocks in the client, if these do not work for you can swap them out for your own custom `ITransport` implementation. -If you feel this need, please let us know as we'd love to learn why you've go down this route! - -[source, csharp] ----- -var response = inMemoryTransport.Request>(HttpMethod.GET, "/_search", new { query = new { match_all = new { } } }); ----- -[source, csharp] ----- -response = await inMemoryTransport.RequestAsync>(HttpMethod.GET, "/_search", new { query = new { match_all = new { } } }); ----- diff --git a/docs/asciidoc/ClientConcepts/ConnectionPooling/Exceptions/UnexpectedExceptions.doc.asciidoc b/docs/asciidoc/ClientConcepts/ConnectionPooling/Exceptions/UnexpectedExceptions.doc.asciidoc deleted file mode 100644 index 1dc333afec6..00000000000 --- a/docs/asciidoc/ClientConcepts/ConnectionPooling/Exceptions/UnexpectedExceptions.doc.asciidoc +++ /dev/null @@ -1,114 +0,0 @@ -== Unexpected exceptions -When a client call throws an exception that the IConnction can not handle, this exception will bubble -out the client as an UnexpectedElasticsearchClientException, regardless whether the client is configured to throw or not. -An IConnection is in charge of knowning what exceptions it can recover from or not. The default IConnection that is based on WebRequest can and -will recover from WebExceptions but others will be grounds for immediately exiting the pipeline. - - -[source, csharp] ----- -var audit = new Auditor(() => Framework.Cluster - .Nodes(10) - .ClientCalls(r => r.SucceedAlways()) - .ClientCalls(r => r.OnPort(9201).FailAlways(new Exception("boom!"))) - .StaticConnectionPool() - .Settings(s => s.DisablePing()) - ); -audit = await audit.TraceCall( - new ClientCall { - { AuditEvent.HealthyResponse, 9200 }, - } - ); -audit = await audit.TraceUnexpectedException( - new ClientCall { - { AuditEvent.BadResponse, 9201 }, - }, - (e) => - { - e.FailureReason.Should().Be(PipelineFailure.Unexpected); - e.InnerException.Should().NotBeNull(); - e.InnerException.Message.Should().Be("boom!"); - } - ); -e.FailureReason.Should().Be(PipelineFailure.Unexpected); -e.InnerException.Should().NotBeNull(); -e.InnerException.Message.Should().Be("boom!"); ----- - -Sometimes an unexpected exception happens further down in the pipeline, this is why we -wrap them inside an UnexpectedElasticsearchClientException so that information about where -in the pipeline the unexpected exception is not lost, here a call to 9200 fails using a webexception. -It then falls over to 9201 which throws an hard exception from within IConnection. We assert that we -can still see the audit trail for the whole coordinated request. - - -[source, csharp] ----- -var audit = new Auditor(() => Framework.Cluster - .Nodes(10) -#if DOTNETCORE - .ClientCalls(r => r.OnPort(9200).FailAlways(new System.Net.Http.HttpRequestException("recover"))) -#else - .ClientCalls(r => r.OnPort(9200).FailAlways(new WebException("recover"))) -#endif - .ClientCalls(r => r.OnPort(9201).FailAlways(new Exception("boom!"))) - .StaticConnectionPool() - .Settings(s => s.DisablePing()) - ); -audit = await audit.TraceUnexpectedException( - new ClientCall { - { AuditEvent.BadResponse, 9200 }, - { AuditEvent.BadResponse, 9201 }, - }, - (e) => - { - e.FailureReason.Should().Be(PipelineFailure.Unexpected); - e.InnerException.Should().NotBeNull(); - e.InnerException.Message.Should().Be("boom!"); - } - ); -e.FailureReason.Should().Be(PipelineFailure.Unexpected); -e.InnerException.Should().NotBeNull(); -e.InnerException.Message.Should().Be("boom!"); ----- - -An unexpected hard exception on ping and sniff is something we *do* try to revover from and failover. -Here pinging nodes on first use is enabled and 9200 throws on ping, we still fallover to 9201's ping succeeds. -However the client call on 9201 throws a hard exception we can not recover from - - -[source, csharp] ----- -var audit = new Auditor(() => Framework.Cluster - .Nodes(10) - .Ping(r => r.OnPort(9200).FailAlways(new Exception("ping exception"))) - .Ping(r => r.OnPort(9201).SucceedAlways()) - .ClientCalls(r => r.OnPort(9201).FailAlways(new Exception("boom!"))) - .StaticConnectionPool() - .AllDefaults() - ); ----- -[source, csharp] ----- -audit = await audit.TraceUnexpectedException( - new ClientCall { - { AuditEvent.PingFailure, 9200 }, - { AuditEvent.PingSuccess, 9201 }, - { AuditEvent.BadResponse, 9201 }, - }, - (e) => - { - e.FailureReason.Should().Be(PipelineFailure.Unexpected); -e.InnerException.Should().NotBeNull(); - e.InnerException.Message.Should().Be("boom!"); -e.SeenExceptions.Should().NotBeEmpty(); - var pipelineException = e.SeenExceptions.First(); - pipelineException.FailureReason.Should().Be(PipelineFailure.PingFailure); - pipelineException.InnerException.Message.Should().Be("ping exception"); -var pingException = e.AuditTrail.First(a => a.Event == AuditEvent.PingFailure).Exception; - pingException.Should().NotBeNull(); - pingException.Message.Should().Be("ping exception"); - - } -); ----- diff --git a/docs/asciidoc/ClientConcepts/ConnectionPooling/Exceptions/UnrecoverableExceptions.doc.asciidoc b/docs/asciidoc/ClientConcepts/ConnectionPooling/Exceptions/UnrecoverableExceptions.doc.asciidoc deleted file mode 100644 index 7a2c0e52b85..00000000000 --- a/docs/asciidoc/ClientConcepts/ConnectionPooling/Exceptions/UnrecoverableExceptions.doc.asciidoc +++ /dev/null @@ -1,44 +0,0 @@ -== Unrecoverable exceptions -Unrecoverable exceptions are excepted exceptions that are grounds to exit the client pipeline immediately. -By default the client won't throw on any ElasticsearchClientException but return an invalid response. -You can configure the client to throw using ThrowExceptions() on ConnectionSettings. The following test -both a client that throws and one that returns an invalid response with an `.OriginalException` exposed - - -[source, csharp] ----- -var recoverablExceptions = new[] - { - new PipelineException(PipelineFailure.BadResponse), - new PipelineException(PipelineFailure.PingFailure), - }; -recoverablExceptions.Should().OnlyContain(e => e.Recoverable); -var unrecoverableExceptions = new[] - { - new PipelineException(PipelineFailure.CouldNotStartSniffOnStartup), - new PipelineException(PipelineFailure.SniffFailure), - new PipelineException(PipelineFailure.Unexpected), - new PipelineException(PipelineFailure.BadAuthentication), - new PipelineException(PipelineFailure.MaxRetriesReached), - new PipelineException(PipelineFailure.MaxTimeoutReached) - }; -unrecoverableExceptions.Should().OnlyContain(e => !e.Recoverable); -var audit = new Auditor(() => Framework.Cluster - .Nodes(10) - .Ping(r => r.SucceedAlways()) - .ClientCalls(r => r.FailAlways(401)) - .StaticConnectionPool() - .AllDefaults() - ); -audit = await audit.TraceElasticsearchException( - new ClientCall { - { AuditEvent.PingSuccess, 9200 }, - { AuditEvent.BadResponse, 9200 }, - }, - (e) => - { - e.FailureReason.Should().Be(PipelineFailure.BadAuthentication); - } - ); -e.FailureReason.Should().Be(PipelineFailure.BadAuthentication); ----- diff --git a/docs/asciidoc/ClientConcepts/ConnectionPooling/Failover/FallingOver.doc.asciidoc b/docs/asciidoc/ClientConcepts/ConnectionPooling/Failover/FallingOver.doc.asciidoc deleted file mode 100644 index f83117ff4d0..00000000000 --- a/docs/asciidoc/ClientConcepts/ConnectionPooling/Failover/FallingOver.doc.asciidoc +++ /dev/null @@ -1,80 +0,0 @@ -== Fail over -When using connection pooling and the pool has sufficient nodes a request will be retried if -the call to a node throws an exception or returns a 502 or 503 - - -[source, csharp] ----- -var audit = new Auditor(() => Framework.Cluster - .Nodes(10) - .ClientCalls(r => r.FailAlways()) - .ClientCalls(r => r.OnPort(9201).SucceedAlways()) - .StaticConnectionPool() - .Settings(s => s.DisablePing()) - ); -audit = await audit.TraceCall( - new ClientCall { - { BadResponse, 9200 }, - { HealthyResponse, 9201 }, - } - ); ----- -502 Bad Gateway -Will be treated as an error that requires retrying - - -[source, csharp] ----- -var audit = new Auditor(() => Framework.Cluster - .Nodes(10) - .ClientCalls(r => r.FailAlways(502)) - .ClientCalls(r => r.OnPort(9201).SucceedAlways()) - .StaticConnectionPool() - .Settings(s => s.DisablePing()) - ); -audit = await audit.TraceCall( - new ClientCall { - { BadResponse, 9200 }, - { HealthyResponse, 9201 }, - } - ); ----- -503 Service Unavailable -Will be treated as an error that requires retrying - - -[source, csharp] ----- -var audit = new Auditor(() => Framework.Cluster - .Nodes(10) - .ClientCalls(r => r.FailAlways(503)) - .ClientCalls(r => r.OnPort(9201).SucceedAlways()) - .StaticConnectionPool() - .Settings(s => s.DisablePing()) - ); -audit = await audit.TraceCall( - new ClientCall { - { BadResponse, 9200 }, - { HealthyResponse, 9201 }, - } - ); ----- - -If a call returns a valid http status code other then 502/503 the request won't be retried. - - -[source, csharp] ----- -var audit = new Auditor(() => Framework.Cluster - .Nodes(10) - .ClientCalls(r => r.FailAlways(418)) - .ClientCalls(r => r.OnPort(9201).SucceedAlways()) - .StaticConnectionPool() - .Settings(s => s.DisablePing()) - ); -audit = await audit.TraceCall( - new ClientCall { - { BadResponse, 9200 }, - } - ); ----- diff --git a/docs/asciidoc/ClientConcepts/ConnectionPooling/MaxRetries/RespectsMaxRetry.doc.asciidoc b/docs/asciidoc/ClientConcepts/ConnectionPooling/MaxRetries/RespectsMaxRetry.doc.asciidoc deleted file mode 100644 index 2aa8b032712..00000000000 --- a/docs/asciidoc/ClientConcepts/ConnectionPooling/MaxRetries/RespectsMaxRetry.doc.asciidoc +++ /dev/null @@ -1,146 +0,0 @@ -== MaxRetries -By default retry as many times as we have nodes. However retries still respect the request timeout. -Meaning if you have a 100 node cluster and a request timeout of 20 seconds we will retry as many times as we can -but give up after 20 seconds - - -[source, csharp] ----- -var audit = new Auditor(() => Framework.Cluster - .Nodes(10) - .ClientCalls(r => r.FailAlways()) - .ClientCalls(r => r.OnPort(9209).SucceedAlways()) - .StaticConnectionPool() - .Settings(s => s.DisablePing()) - ); -audit = await audit.TraceCall( - new ClientCall { - { BadResponse, 9200 }, - { BadResponse, 9201 }, - { BadResponse, 9202 }, - { BadResponse, 9203 }, - { BadResponse, 9204 }, - { BadResponse, 9205 }, - { BadResponse, 9206 }, - { BadResponse, 9207 }, - { BadResponse, 9208 }, - { HealthyResponse, 9209 } - } - ); ----- - -When you have a 100 node cluster you might want to ensure a fixed number of retries. -Remember that the actual number of requests is initial attempt + set number of retries - - -[source, csharp] ----- -var audit = new Auditor(() => Framework.Cluster - .Nodes(10) - .ClientCalls(r => r.FailAlways()) - .ClientCalls(r => r.OnPort(9209).SucceedAlways()) - .StaticConnectionPool() - .Settings(s => s.DisablePing().MaximumRetries(3)) - ); -audit = await audit.TraceCall( - new ClientCall { - { BadResponse, 9200 }, - { BadResponse, 9201 }, - { BadResponse, 9202 }, - { BadResponse, 9203 }, - { MaxRetriesReached } - } - ); ----- - -In our previous test we simulated very fast failures, in the real world a call might take upwards of a second -Here we simulate a particular heavy search that takes 10 seconds to fail, our Request timeout is set to 20 seconds. -In this case it does not make sense to retry our 10 second query on 10 nodes. We should try it twice and give up before a third call is attempted - - -[source, csharp] ----- -var audit = new Auditor(() => Framework.Cluster - .Nodes(10) - .ClientCalls(r => r.FailAlways().Takes(TimeSpan.FromSeconds(10))) - .ClientCalls(r => r.OnPort(9209).SucceedAlways()) - .StaticConnectionPool() - .Settings(s => s.DisablePing().RequestTimeout(TimeSpan.FromSeconds(20))) - ); -audit = await audit.TraceCall( - new ClientCall { - { BadResponse, 9200 }, - { BadResponse, 9201 }, - { MaxTimeoutReached } - } - ); ----- - -If you set smaller request time outs you might not want it to also affect the retry timeout, therefor you can configure these separately too. -Here we simulate calls taking 3 seconds, a request time out of 2 and an overall retry timeout of 10 seconds. -We should see 5 attempts to perform this query, testing that our request timeout cuts the query off short and that our max retry timeout of 10 -wins over the configured request timeout - - -[source, csharp] ----- -var audit = new Auditor(() => Framework.Cluster - .Nodes(10) - .ClientCalls(r => r.FailAlways().Takes(TimeSpan.FromSeconds(3))) - .ClientCalls(r => r.OnPort(9209).FailAlways()) - .StaticConnectionPool() - .Settings(s => s.DisablePing().RequestTimeout(TimeSpan.FromSeconds(2)).MaxRetryTimeout(TimeSpan.FromSeconds(10))) - ); -audit = await audit.TraceCall( - new ClientCall { - { BadResponse, 9200 }, - { BadResponse, 9201 }, - { BadResponse, 9202 }, - { BadResponse, 9203 }, - { BadResponse, 9204 }, - { MaxTimeoutReached } - } - ); ----- - -If your retry policy expands beyond available nodes we won't retry the same node twice - - -[source, csharp] ----- -var audit = new Auditor(() => Framework.Cluster - .Nodes(2) - .ClientCalls(r => r.FailAlways().Takes(TimeSpan.FromSeconds(3))) - .ClientCalls(r => r.OnPort(9209).SucceedAlways()) - .StaticConnectionPool() - .Settings(s => s.DisablePing().RequestTimeout(TimeSpan.FromSeconds(2)).MaxRetryTimeout(TimeSpan.FromSeconds(10))) - ); -audit = await audit.TraceCall( - new ClientCall { - { BadResponse, 9200 }, - { BadResponse, 9201 }, - { MaxRetriesReached } - } - ); ----- - -This makes setting any retry setting on a single node connection pool a NOOP, this is by design! -Connection pooling and connection failover is about trying to fail sanely whilst still utilizing available resources and -not giving up on the fail fast principle. It's *NOT* a mechanism for forcing requests to succeed. - - -[source, csharp] ----- -var audit = new Auditor(() => Framework.Cluster - .Nodes(10) - .ClientCalls(r => r.FailAlways().Takes(TimeSpan.FromSeconds(3))) - .ClientCalls(r => r.OnPort(9209).SucceedAlways()) - .SingleNodeConnection() - .Settings(s => s.DisablePing().MaximumRetries(10)) - ); -audit = await audit.TraceCall( - new ClientCall { - { BadResponse, 9200 } - } - ); ----- diff --git a/docs/asciidoc/ClientConcepts/ConnectionPooling/Pinging/FirstUsage.doc.asciidoc b/docs/asciidoc/ClientConcepts/ConnectionPooling/Pinging/FirstUsage.doc.asciidoc deleted file mode 100644 index 6648607066a..00000000000 --- a/docs/asciidoc/ClientConcepts/ConnectionPooling/Pinging/FirstUsage.doc.asciidoc +++ /dev/null @@ -1,128 +0,0 @@ -== Pinging - -Pinging is enabled by default for the Static & Sniffing connection pool. -This means that the first time a node is used or resurrected we issue a ping with a smaller (configurable) timeout. -This allows us to fail and fallover to a healthy node faster - - -A cluster with 2 nodes where the second node fails on ping - -[source, csharp] ----- -var audit = new Auditor(() => Framework.Cluster - .Nodes(2) - .Ping(p => p.Succeeds(Always)) - .Ping(p => p.OnPort(9201).FailAlways()) - .StaticConnectionPool() - .AllDefaults() -); ----- -[source, csharp] ----- -await audit.TraceCalls( ----- -The first call goes to 9200 which succeeds - -[source, csharp] ----- -new ClientCall { - { PingSuccess, 9200}, - { HealthyResponse, 9200}, - { pool => - { - pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(0); - } } - }, ----- -The 2nd call does a ping on 9201 because its used for the first time. -It fails so we wrap over to node 9200 which we've already pinged - -[source, csharp] ----- -new ClientCall { - { PingFailure, 9201}, - { HealthyResponse, 9200}, ----- -Finally we assert that the connectionpool has one node that is marked as dead - -[source, csharp] ----- -{ pool => pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(1) } - } -); ----- -A cluster with 4 nodes where the second and third pings fail - -[source, csharp] ----- -var audit = new Auditor(() => Framework.Cluster - .Nodes(4) - .Ping(p => p.SucceedAlways()) - .Ping(p => p.OnPort(9201).FailAlways()) - .Ping(p => p.OnPort(9202).FailAlways()) - .StaticConnectionPool() - .AllDefaults() -); ----- -[source, csharp] ----- -await audit.TraceCalls( ----- -The first call goes to 9200 which succeeds - -[source, csharp] ----- -new ClientCall { - { PingSuccess, 9200}, - { HealthyResponse, 9200}, - { pool => - { - pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(0); - } } - }, ----- -The 2nd call does a ping on 9201 because its used for the first time. -It fails and so we ping 9202 which also fails. We then ping 9203 becuase -we haven't used it before and it succeeds - -[source, csharp] ----- -new ClientCall { - { PingFailure, 9201}, - { PingFailure, 9202}, - { PingSuccess, 9203}, - { HealthyResponse, 9203}, ----- -Finally we assert that the connectionpool has two nodes that are marked as dead - -[source, csharp] ----- -{ pool => pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(2) } - } -); ----- -A healthy cluster of 4 (min master nodes of 3 of course!) - -[source, csharp] ----- -var audit = new Auditor(() => Framework.Cluster - .Nodes(4) - .Ping(p => p.SucceedAlways()) - .StaticConnectionPool() - .AllDefaults() -); ----- -[source, csharp] ----- -await audit.TraceCalls( - new ClientCall { { PingSuccess, 9200}, { HealthyResponse, 9200} }, - new ClientCall { { PingSuccess, 9201}, { HealthyResponse, 9201} }, - new ClientCall { { PingSuccess, 9202}, { HealthyResponse, 9202} }, - new ClientCall { { PingSuccess, 9203}, { HealthyResponse, 9203} }, - new ClientCall { { HealthyResponse, 9200} }, - new ClientCall { { HealthyResponse, 9201} }, - new ClientCall { { HealthyResponse, 9202} }, - new ClientCall { { HealthyResponse, 9203} }, - new ClientCall { { HealthyResponse, 9200} } - ); ----- diff --git a/docs/asciidoc/ClientConcepts/ConnectionPooling/Pinging/Revival.doc.asciidoc b/docs/asciidoc/ClientConcepts/ConnectionPooling/Pinging/Revival.doc.asciidoc deleted file mode 100644 index 89c4e99284a..00000000000 --- a/docs/asciidoc/ClientConcepts/ConnectionPooling/Pinging/Revival.doc.asciidoc +++ /dev/null @@ -1,48 +0,0 @@ -== Pinging - -When a node is marked dead it will only be put in the dog house for a certain amount of time. Once it comes out of the dog house, or revived, we schedule a ping -before the actual call to make sure its up and running. If its still down we put it back in the dog house a little longer. For an explanation on these timeouts see: TODO LINK - - -[source, csharp] ----- -var audit = new Auditor(() => Framework.Cluster - .Nodes(3) - .ClientCalls(r => r.SucceedAlways()) - .ClientCalls(r => r.OnPort(9202).Fails(Once)) - .Ping(p => p.SucceedAlways()) - .StaticConnectionPool() - .AllDefaults() - ); -audit = await audit.TraceCalls( - new ClientCall { { PingSuccess, 9200 }, { HealthyResponse, 9200 } }, - new ClientCall { { PingSuccess, 9201 }, { HealthyResponse, 9201 } }, - new ClientCall { - { PingSuccess, 9202}, - { BadResponse, 9202}, - { HealthyResponse, 9200}, - { pool => pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(1) } - }, - new ClientCall { { HealthyResponse, 9201 } }, - new ClientCall { { HealthyResponse, 9200 } }, - new ClientCall { { HealthyResponse, 9201 } }, - new ClientCall { - { HealthyResponse, 9200 }, - { pool => pool.Nodes.First(n=>!n.IsAlive).DeadUntil.Should().BeAfter(DateTime.UtcNow) } - } - ); -audit = await audit.TraceCalls( - new ClientCall { { HealthyResponse, 9201 } }, - new ClientCall { { HealthyResponse, 9200 } }, - new ClientCall { { HealthyResponse, 9201 } } - ); -audit.ChangeTime(d => d.AddMinutes(20)); -audit = await audit.TraceCalls( - new ClientCall { { HealthyResponse, 9201 } }, - new ClientCall { - { Resurrection, 9202 }, - { PingSuccess, 9202 }, - { HealthyResponse, 9202 } - } - ); ----- diff --git a/docs/asciidoc/ClientConcepts/ConnectionPooling/RequestOverrides/RespectsMaxRetryOverrides.doc.asciidoc b/docs/asciidoc/ClientConcepts/ConnectionPooling/RequestOverrides/RespectsMaxRetryOverrides.doc.asciidoc deleted file mode 100644 index ce2f5afac55..00000000000 --- a/docs/asciidoc/ClientConcepts/ConnectionPooling/RequestOverrides/RespectsMaxRetryOverrides.doc.asciidoc +++ /dev/null @@ -1,68 +0,0 @@ -== MaxRetries -By default retry as many times as we have nodes. However retries still respect the request timeout. -Meaning if you have a 100 node cluster and a request timeout of 20 seconds we will retry as many times as we can -but give up after 20 seconds - - -[source, csharp] ----- -var audit = new Auditor(() => Framework.Cluster - .Nodes(10) - .ClientCalls(r => r.FailAlways()) - .ClientCalls(r => r.OnPort(9209).SucceedAlways()) - .StaticConnectionPool() - .Settings(s => s.DisablePing()) - ); -audit = await audit.TraceCall( - new ClientCall(r => r.MaxRetries(2)) { - { BadResponse, 9200 }, - { BadResponse, 9201 }, - { BadResponse, 9202 }, - { MaxRetriesReached } - } - ); ----- - -When you have a 100 node cluster you might want to ensure a fixed number of retries. -Remember that the actual number of requests is initial attempt + set number of retries - - -[source, csharp] ----- -var audit = new Auditor(() => Framework.Cluster - .Nodes(10) - .ClientCalls(r => r.FailAlways()) - .ClientCalls(r => r.OnPort(9209).SucceedAlways()) - .StaticConnectionPool() - .Settings(s => s.DisablePing().MaximumRetries(5)) - ); -audit = await audit.TraceCall( - new ClientCall(r => r.MaxRetries(2)) { - { BadResponse, 9200 }, - { BadResponse, 9201 }, - { BadResponse, 9202 }, - { MaxRetriesReached } - } - ); ----- - -This makes setting any retry setting on a single node connection pool a NOOP, this is by design! -Connection pooling and connection failover is about trying to fail sanely whilst still utilizing available resources and -not giving up on the fail fast principle. It's *NOT* a mechanism for forcing requests to succeed. - - -[source, csharp] ----- -var audit = new Auditor(() => Framework.Cluster - .Nodes(10) - .ClientCalls(r => r.FailAlways().Takes(TimeSpan.FromSeconds(3))) - .ClientCalls(r => r.OnPort(9209).SucceedAlways()) - .SingleNodeConnection() - .Settings(s => s.DisablePing().MaximumRetries(10)) - ); -audit = await audit.TraceCall( - new ClientCall(r => r.MaxRetries(10)) { - { BadResponse, 9200 } - } - ); ----- diff --git a/docs/asciidoc/ClientConcepts/ConnectionPooling/Sniffing/OnConnectionFailure.doc.asciidoc b/docs/asciidoc/ClientConcepts/ConnectionPooling/Sniffing/OnConnectionFailure.doc.asciidoc deleted file mode 100644 index ef2b68cd4cc..00000000000 --- a/docs/asciidoc/ClientConcepts/ConnectionPooling/Sniffing/OnConnectionFailure.doc.asciidoc +++ /dev/null @@ -1,175 +0,0 @@ -== Sniffing on connection failure -Sniffing on connection is enabled by default when using a connection pool that allows reseeding. -The only IConnectionPool we ship that allows this is the SniffingConnectionPool. - -This can be very handy to force a refresh of the pools known healthy node by inspecting elasticsearch itself. -A sniff tries to get the nodes by asking each currently known node until one response. - - -Here we seed our connection with 5 known nodes 9200-9204 of which we think -9202, 9203, 9204 are master eligible nodes. Our virtualized cluster will throw once when doing -a search on 9201. This should a sniff to be kicked off. - -[source, csharp] ----- -var audit = new Auditor(() => Framework.Cluster - .Nodes(5) - .MasterEligible(9202, 9203, 9204) - .ClientCalls(r => r.SucceedAlways()) - .ClientCalls(r => r.OnPort(9201).Fails(Once)) ----- -When the call fails on 9201 the sniff succeeds and returns a new cluster of healty nodes -this cluster only has 3 nodes and the known masters are 9200 and 9202 but a search on 9201 -still fails once - -[source, csharp] ----- -.Sniff(p => p.SucceedAlways(Framework.Cluster - .Nodes(3) - .MasterEligible(9200, 9202) - .ClientCalls(r => r.OnPort(9201).Fails(Once)) ----- -After this second failure on 9201 another sniff will be returned a cluster that no -longer fails but looks completely different (9210-9212) we should be able to handle this - -[source, csharp] ----- -.Sniff(s => s.SucceedAlways(Framework.Cluster - .Nodes(3, 9210) - .MasterEligible(9210, 9212) - .ClientCalls(r => r.SucceedAlways()) - .Sniff(r => r.SucceedAlways()) - )) - )) - .SniffingConnectionPool() - .Settings(s => s.DisablePing().SniffOnStartup(false)) -); ----- -[source, csharp] ----- -audit = await audit.TraceCalls( ----- - - -[source, csharp] ----- -new ClientCall { - { HealthyResponse, 9200 }, - { pool => pool.Nodes.Count.Should().Be(5) } - }, - new ClientCall { - { BadResponse, 9201}, ----- -We assert we do a sniff on our first known master node 9202 - -[source, csharp] ----- -{ SniffOnFail }, - { SniffSuccess, 9202}, - { HealthyResponse, 9200}, ----- -Our pool should now have three nodes - -[source, csharp] ----- -{ pool => pool.Nodes.Count.Should().Be(3) } - }, - new ClientCall { - { BadResponse, 9201}, ----- -We assert we do a sniff on the first master node in our updated cluster - -[source, csharp] ----- -{ SniffOnFail }, - { SniffSuccess, 9200}, - { HealthyResponse, 9210}, - { pool => pool.Nodes.Count.Should().Be(3) } - }, - new ClientCall { { HealthyResponse, 9211 } }, - new ClientCall { { HealthyResponse, 9212 } }, - new ClientCall { { HealthyResponse, 9210 } }, - new ClientCall { { HealthyResponse, 9211 } }, - new ClientCall { { HealthyResponse, 9212 } }, - new ClientCall { { HealthyResponse, 9210 } }, - new ClientCall { { HealthyResponse, 9211 } }, - new ClientCall { { HealthyResponse, 9212 } }, - new ClientCall { { HealthyResponse, 9210 } } -); ----- -Here we set up our cluster exactly the same as the previous setup -Only we enable pinging (default is true) and make the ping fail - -[source, csharp] ----- -var audit = new Auditor(() => Framework.Cluster - .Nodes(5) - .MasterEligible(9202, 9203, 9204) - .Ping(r => r.OnPort(9201).Fails(Once)) - .Sniff(p => p.SucceedAlways(Framework.Cluster - .Nodes(3) - .MasterEligible(9200, 9202) - .Ping(r => r.OnPort(9201).Fails(Once)) - .Sniff(s => s.SucceedAlways(Framework.Cluster - .Nodes(3, 9210) - .MasterEligible(9210, 9211) - .Ping(r => r.SucceedAlways()) - .Sniff(r => r.SucceedAlways()) - )) - )) - .SniffingConnectionPool() - .Settings(s => s.SniffOnStartup(false)) -); ----- -[source, csharp] ----- -audit = await audit.TraceCalls( - new ClientCall { - { PingSuccess, 9200 }, - { HealthyResponse, 9200 }, - { pool => pool.Nodes.Count.Should().Be(5) } - }, - new ClientCall { - { PingFailure, 9201}, ----- -We assert we do a sniff on our first known master node 9202 - -[source, csharp] ----- -{ SniffOnFail }, - { SniffSuccess, 9202}, - { PingSuccess, 9200}, - { HealthyResponse, 9200}, ----- -Our pool should now have three nodes - -[source, csharp] ----- -{ pool => pool.Nodes.Count.Should().Be(3) } - }, - new ClientCall { - { PingFailure, 9201}, ----- -We assert we do a sniff on the first master node in our updated cluster - -[source, csharp] ----- -{ SniffOnFail }, - { SniffSuccess, 9200}, - { PingSuccess, 9210}, - { HealthyResponse, 9210}, - { pool => pool.Nodes.Count.Should().Be(3) } - }, - new ClientCall { { PingSuccess, 9211 }, { HealthyResponse, 9211 } }, - new ClientCall { { PingSuccess, 9212 }, { HealthyResponse, 9212 } }, ----- -9210 was already pinged after the sniff returned the new nodes - -[source, csharp] ----- -new ClientCall { { HealthyResponse, 9210 } }, - new ClientCall { { HealthyResponse, 9211 } }, - new ClientCall { { HealthyResponse, 9212 } }, - new ClientCall { { HealthyResponse, 9210 } } -); ----- diff --git a/docs/asciidoc/ClientConcepts/ConnectionPooling/Sniffing/OnStaleClusterState.doc.asciidoc b/docs/asciidoc/ClientConcepts/ConnectionPooling/Sniffing/OnStaleClusterState.doc.asciidoc deleted file mode 100644 index d20b01abde4..00000000000 --- a/docs/asciidoc/ClientConcepts/ConnectionPooling/Sniffing/OnStaleClusterState.doc.asciidoc +++ /dev/null @@ -1,97 +0,0 @@ -== Sniffing periodically - -Connection pools that return true for `SupportsReseeding` can be configured to sniff periodically. -In addition to sniffing on startup and sniffing on failures, sniffing periodically can benefit scenerio's where -clusters are often scaled horizontally during peak hours. An application might have a healthy view of a subset of the nodes -but without sniffing periodically it will never find the nodes that have been added to help out with load - - -[source, csharp] ----- -var audit = new Auditor(() => Framework.Cluster - .Nodes(10) - .MasterEligible(9202, 9203, 9204) - .ClientCalls(r => r.SucceedAlways()) - .Sniff(s => s.SucceedAlways(Framework.Cluster - .Nodes(100) - .MasterEligible(9202, 9203, 9204) - .ClientCalls(r => r.SucceedAlways()) - .Sniff(ss => ss.SucceedAlways(Framework.Cluster - .Nodes(10) - .MasterEligible(9202, 9203, 9204) - .ClientCalls(r => r.SucceedAlways()) - )) - )) - .SniffingConnectionPool() - .Settings(s => s - .DisablePing() - .SniffOnConnectionFault(false) - .SniffOnStartup(false) - .SniffLifeSpan(TimeSpan.FromMinutes(30)) - ) - ); ----- -healty cluster all nodes return healthy responses - -[source, csharp] ----- -audit = await audit.TraceCalls( - new ClientCall { { HealthyResponse, 9200 } }, - new ClientCall { { HealthyResponse, 9201 } }, - new ClientCall { { HealthyResponse, 9202 } }, - new ClientCall { { HealthyResponse, 9203 } }, - new ClientCall { { HealthyResponse, 9204 } }, - new ClientCall { { HealthyResponse, 9205 } }, - new ClientCall { { HealthyResponse, 9206 } }, - new ClientCall { { HealthyResponse, 9207 } }, - new ClientCall { { HealthyResponse, 9208 } }, - new ClientCall { { HealthyResponse, 9209 } }, - new ClientCall { - { HealthyResponse, 9200 }, - { pool => pool.Nodes.Count.Should().Be(10) } - } -); ----- -Now let's forward the clock 31 minutes, our sniff lifespan should now go state -and the first call should do a sniff which discovered we scaled up to a 100 nodes! - -[source, csharp] ----- -audit.ChangeTime(d => d.AddMinutes(31)); ----- -[source, csharp] ----- -audit = await audit.TraceCalls( - new ClientCall { ----- -a sniff is done first and it prefers the first node master node - -[source, csharp] ----- -{ SniffOnStaleCluster }, - { SniffSuccess, 9202 }, - { HealthyResponse, 9201 }, - { pool => pool.Nodes.Count.Should().Be(100) } - } -); ----- -[source, csharp] ----- -audit.ChangeTime(d => d.AddMinutes(31)); ----- -[source, csharp] ----- -audit = await audit.TraceCalls( - new ClientCall { ----- -a sniff is done first and it prefers the first node master node - -[source, csharp] ----- -{ SniffOnStaleCluster }, - { SniffSuccess, 9202 }, - { HealthyResponse, 9200 }, - { pool => pool.Nodes.Count.Should().Be(10) } - } -); ----- diff --git a/docs/asciidoc/ClientConcepts/ConnectionPooling/Sniffing/OnStartup.doc.asciidoc b/docs/asciidoc/ClientConcepts/ConnectionPooling/Sniffing/OnStartup.doc.asciidoc deleted file mode 100644 index 1f27d68c313..00000000000 --- a/docs/asciidoc/ClientConcepts/ConnectionPooling/Sniffing/OnStartup.doc.asciidoc +++ /dev/null @@ -1,119 +0,0 @@ -== Sniffing on startup - -Connection pools that return true for `SupportsReseeding` by default sniff on startup. - - -[source, csharp] ----- -var audit = new Auditor(() => Framework.Cluster - .Nodes(10) - .Sniff(s => s.Fails(Always)) - .Sniff(s => s.OnPort(9202).Succeeds(Always)) - .SniffingConnectionPool() - .AllDefaults() - ); -await audit.TraceCall(new ClientCall - { - { SniffOnStartup}, - { SniffFailure, 9200}, - { SniffFailure, 9201}, - { SniffSuccess, 9202}, - { PingSuccess , 9200}, - { HealthyResponse, 9200} - }); -var audit = new Auditor(() => Framework.Cluster - .Nodes(10) - .Sniff(s => s.Fails(Always)) - .Sniff(s => s.OnPort(9202).Succeeds(Always)) - .SniffingConnectionPool() - .AllDefaults() - ); -await audit.TraceCalls( - new ClientCall - { - { SniffOnStartup}, - { SniffFailure, 9200}, - { SniffFailure, 9201}, - { SniffSuccess, 9202}, - { PingSuccess , 9200}, - { HealthyResponse, 9200} - }, - new ClientCall - { - { PingSuccess, 9201}, - { HealthyResponse, 9201} - } - ); -var audit = new Auditor(() => Framework.Cluster - .Nodes(10) - .Sniff(s => s.Fails(Always)) - .Sniff(s => s.OnPort(9202).Succeeds(Always, Framework.Cluster.Nodes(8, startFrom: 9204))) - .SniffingConnectionPool() - .AllDefaults() - ); -await audit.TraceCall(new ClientCall { - { SniffOnStartup}, - { SniffFailure, 9200}, - { SniffFailure, 9201}, - { SniffSuccess, 9202}, - { PingSuccess, 9204}, - { HealthyResponse, 9204} - }); -var audit = new Auditor(() => Framework.Cluster - .Nodes(10) - .Sniff(s => s.Fails(Always)) - .Sniff(s => s.OnPort(9209).Succeeds(Always)) - .SniffingConnectionPool() - .AllDefaults() - ); -await audit.TraceCall(new ClientCall { - { SniffOnStartup}, - { SniffFailure, 9200}, - { SniffFailure, 9201}, - { SniffFailure, 9202}, - { SniffFailure, 9203}, - { SniffFailure, 9204}, - { SniffFailure, 9205}, - { SniffFailure, 9206}, - { SniffFailure, 9207}, - { SniffFailure, 9208}, - { SniffSuccess, 9209}, - { PingSuccess, 9200}, - { HealthyResponse, 9200} - }); -var audit = new Auditor(() => Framework.Cluster - .Nodes(new[] { - new Node(new Uri("http://localhost:9200")) { MasterEligible = false }, - new Node(new Uri("http://localhost:9201")) { MasterEligible = false }, - new Node(new Uri("http://localhost:9202")) { MasterEligible = true }, - }) - .Sniff(s => s.Succeeds(Always)) - .SniffingConnectionPool() - .AllDefaults() - ); -await audit.TraceCall(new ClientCall { - { SniffOnStartup}, - { SniffSuccess, 9202}, - { PingSuccess, 9200}, - { HealthyResponse, 9200} - }); -var audit = new Auditor(() => Framework.Cluster - .Nodes(new[] { - new Node(new Uri("http://localhost:9200")) { MasterEligible = true }, - new Node(new Uri("http://localhost:9201")) { MasterEligible = true }, - new Node(new Uri("http://localhost:9202")) { MasterEligible = false }, - }) - .Sniff(s => s.Fails(Always)) - .Sniff(s => s.OnPort(9202).Succeeds(Always)) - .SniffingConnectionPool() - .AllDefaults() - ); -await audit.TraceCall(new ClientCall { - { SniffOnStartup}, - { SniffFailure, 9200}, - { SniffFailure, 9201}, - { SniffSuccess, 9202}, - { PingSuccess, 9200}, - { HealthyResponse, 9200} - }); ----- diff --git a/docs/asciidoc/ClientConcepts/ConnectionPooling/Sniffing/RoleDetection.doc.asciidoc b/docs/asciidoc/ClientConcepts/ConnectionPooling/Sniffing/RoleDetection.doc.asciidoc deleted file mode 100644 index 576d9500d74..00000000000 --- a/docs/asciidoc/ClientConcepts/ConnectionPooling/Sniffing/RoleDetection.doc.asciidoc +++ /dev/null @@ -1,121 +0,0 @@ -== Sniffing role detection - -When we sniff the custer state we detect the role of the node whether its master eligible and holds data -We use this information when selecting a node to perform an API call on. - - -[source, csharp] ----- -var audit = new Auditor(() => Framework.Cluster - .Nodes(10) - .Sniff(s => s.Fails(Always)) - .Sniff(s => s.OnPort(9202) - .Succeeds(Always, Framework.Cluster.Nodes(8).MasterEligible(9200, 9201, 9202)) - ) - .SniffingConnectionPool() - .AllDefaults() - ) - { - AssertPoolBeforeCall = (pool) => - { - pool.Should().NotBeNull(); - pool.Nodes.Should().HaveCount(10); - pool.Nodes.Where(n => n.MasterEligible).Should().HaveCount(10); - }, - AssertPoolAfterCall = (pool) => - { - pool.Should().NotBeNull(); - pool.Nodes.Should().HaveCount(8); - pool.Nodes.Where(n => n.MasterEligible).Should().HaveCount(3); - } - }; -pool.Should().NotBeNull(); -pool.Nodes.Should().HaveCount(10); -pool.Nodes.Where(n => n.MasterEligible).Should().HaveCount(10); -pool.Should().NotBeNull(); -pool.Nodes.Should().HaveCount(8); -pool.Nodes.Where(n => n.MasterEligible).Should().HaveCount(3); -await audit.TraceStartup(); -var audit = new Auditor(() => Framework.Cluster - .Nodes(10) - .Sniff(s => s.Fails(Always)) - .Sniff(s => s.OnPort(9202) - .Succeeds(Always, Framework.Cluster.Nodes(8).StoresNoData(9200, 9201, 9202)) - ) - .SniffingConnectionPool() - .AllDefaults() - ) - { - AssertPoolBeforeCall = (pool) => - { - pool.Should().NotBeNull(); - pool.Nodes.Should().HaveCount(10); - pool.Nodes.Where(n => n.HoldsData).Should().HaveCount(10); - }, - - AssertPoolAfterCall = (pool) => - { - pool.Should().NotBeNull(); - pool.Nodes.Should().HaveCount(8); - pool.Nodes.Where(n => n.HoldsData).Should().HaveCount(5); - } - }; -pool.Should().NotBeNull(); -pool.Nodes.Should().HaveCount(10); -pool.Nodes.Where(n => n.HoldsData).Should().HaveCount(10); -pool.Should().NotBeNull(); -pool.Nodes.Should().HaveCount(8); -pool.Nodes.Where(n => n.HoldsData).Should().HaveCount(5); -await audit.TraceStartup(); -var audit = new Auditor(() => Framework.Cluster - .Nodes(10) - .Sniff(s => s.SucceedAlways() - .Succeeds(Always, Framework.Cluster.Nodes(8).StoresNoData(9200, 9201, 9202).SniffShouldReturnFqdn()) - ) - .SniffingConnectionPool() - .AllDefaults() - ) - { - AssertPoolBeforeCall = (pool) => - { - pool.Should().NotBeNull(); - pool.Nodes.Should().HaveCount(10); - pool.Nodes.Where(n => n.HoldsData).Should().HaveCount(10); - pool.Nodes.Should().OnlyContain(n => n.Uri.Host == "localhost"); - }, - - AssertPoolAfterCall = (pool) => - { - pool.Should().NotBeNull(); - pool.Nodes.Should().HaveCount(8); - pool.Nodes.Where(n => n.HoldsData).Should().HaveCount(5); - pool.Nodes.Should().OnlyContain(n => n.Uri.Host.StartsWith("fqdn") && !n.Uri.Host.Contains("/")); - } - }; -pool.Should().NotBeNull(); -pool.Nodes.Should().HaveCount(10); -pool.Nodes.Where(n => n.HoldsData).Should().HaveCount(10); -pool.Nodes.Should().OnlyContain(n => n.Uri.Host == "localhost"); -pool.Should().NotBeNull(); -pool.Nodes.Should().HaveCount(8); -pool.Nodes.Where(n => n.HoldsData).Should().HaveCount(5); -pool.Nodes.Should().OnlyContain(n => n.Uri.Host.StartsWith("fqdn") && !n.Uri.Host.Contains("/")); -await audit.TraceStartup(); -var node = SniffAndReturnNode(); -node.MasterEligible.Should().BeTrue(); -node.HoldsData.Should().BeFalse(); -node = await SniffAndReturnNodeAsync(); -node.MasterEligible.Should().BeTrue(); -node.HoldsData.Should().BeFalse(); -var pipeline = CreatePipeline(); -pipeline.Sniff(); -var pipeline = CreatePipeline(); -await pipeline.SniffAsync(); -this._settings = - this._cluster.Client(u => new SniffingConnectionPool(new[] {u}), c => c.PrettyJson()).ConnectionSettings; -var pipeline = new RequestPipeline(this._settings, DateTimeProvider.Default, new MemoryStreamFactory(), - new SearchRequestParameters()); -var nodes = this._settings.ConnectionPool.Nodes; -nodes.Should().NotBeEmpty().And.HaveCount(1); -var node = nodes.First(); ----- diff --git a/docs/asciidoc/ClientConcepts/HighLevel/Inferrence/DocumentPaths/DocumentPaths.doc.asciidoc b/docs/asciidoc/ClientConcepts/HighLevel/Inferrence/DocumentPaths/DocumentPaths.doc.asciidoc deleted file mode 100644 index 22a2862b8ef..00000000000 --- a/docs/asciidoc/ClientConcepts/HighLevel/Inferrence/DocumentPaths/DocumentPaths.doc.asciidoc +++ /dev/null @@ -1,104 +0,0 @@ -# DocumentPaths -Many API's in elasticsearch describe a path to a document. In NEST besides generating a constructor that takes -and Index, Type and Id seperately we also generate a constructor taking a DocumentPath that allows you to describe the path -to your document more succintly - -Manually newing - -here we create a new document path based on Project with the id 1 - -[source, csharp] ----- -IDocumentPath path = new DocumentPath(1); ----- -[source, csharp] ----- -Expect("project").WhenSerializing(path.Index); -Expect("project").WhenSerializing(path.Type); -Expect(1).WhenSerializing(path.Id); ----- -You can still override the inferred index and type name - -[source, csharp] ----- -path = new DocumentPath(1).Type("project1"); ----- -[source, csharp] ----- -Expect("project1").WhenSerializing(path.Type); -path = new DocumentPath(1).Index("project1"); -Expect("project1").WhenSerializing(path.Index); ----- -there is also a static way to describe such paths - -[source, csharp] ----- -path = DocumentPath.Id(1); ----- -[source, csharp] ----- -Expect("project").WhenSerializing(path.Index); -Expect("project").WhenSerializing(path.Type); -Expect(1).WhenSerializing(path.Id); -var project = new Project { Name = "hello-world" }; ----- -here we create a new document path based on a Project - -[source, csharp] ----- -IDocumentPath path = new DocumentPath(project); ----- -[source, csharp] ----- -Expect("project").WhenSerializing(path.Index); -Expect("project").WhenSerializing(path.Type); -Expect("hello-world").WhenSerializing(path.Id); ----- -You can still override the inferred index and type name - -[source, csharp] ----- -path = new DocumentPath(project).Type("project1"); ----- -[source, csharp] ----- -Expect("project1").WhenSerializing(path.Type); -path = new DocumentPath(project).Index("project1"); -Expect("project1").WhenSerializing(path.Index); ----- -there is also a static way to describe such paths - -[source, csharp] ----- -path = DocumentPath.Id(project); ----- -[source, csharp] ----- -Expect("project").WhenSerializing(path.Index); -Expect("project").WhenSerializing(path.Type); -Expect("hello-world").WhenSerializing(path.Id); -DocumentPath p = project; -var project = new Project { Name = "hello-world" }; ----- -Here we can see and example how DocumentPath helps your describe your requests more tersely - -[source, csharp] ----- -var request = new IndexRequest(2) { Document = project }; ----- -[source, csharp] ----- -request = new IndexRequest(project) { }; ----- -when comparing with the full blown constructor and passing document manually -DocumentPath -T -'s benefits become apparent. - -[source, csharp] ----- -request = new IndexRequest(IndexName.From(), TypeName.From(), 2) -{ - Document = project -}; ----- diff --git a/docs/asciidoc/ClientConcepts/HighLevel/Inferrence/FieldInference.doc.asciidoc b/docs/asciidoc/ClientConcepts/HighLevel/Inferrence/FieldInference.doc.asciidoc deleted file mode 100644 index 8701d88bf1d..00000000000 --- a/docs/asciidoc/ClientConcepts/HighLevel/Inferrence/FieldInference.doc.asciidoc +++ /dev/null @@ -1,429 +0,0 @@ -# Strongly typed field access - -Several places in the elasticsearch API expect the path to a field from your original source document as a string. -NEST allows you to use C# expressions to strongly type these field path strings. - -These expressions are assigned to a type called `Field` and there are several ways to create a instance of that type - - -Using the constructor directly is possible but rather involved - -[source, csharp] ----- -var fieldString = new Field { Name = "name" }; ----- -especially when using C# expressions since these can not be simply new'ed - -[source, csharp] ----- -Expression> expression = p => p.Name; ----- -[source, csharp] ----- -var fieldExpression = Field.Create(expression); -Expect("name") - .WhenSerializing(fieldExpression) - .WhenSerializing(fieldString); ----- -Therefore you can also implicitly convert strings and expressions to Field's - -[source, csharp] ----- -Field fieldString = "name"; ----- -but for expressions this is still rather involved - -[source, csharp] ----- -Expression> expression = p => p.Name; ----- -[source, csharp] ----- -Field fieldExpression = expression; -Expect("name") - .WhenSerializing(fieldExpression) - .WhenSerializing(fieldString); ----- -to ease creating Field's from expressions there is a static Property class you can use - -[source, csharp] ----- -Field fieldString = "name"; ----- -but for expressions this is still rather involved - -[source, csharp] ----- -var fieldExpression = Infer.Field(p => p.Name); ----- -Using static imports in c# 6 this can be even shortened: -using static Nest.Static; - -[source, csharp] ----- -fieldExpression = Field(p => p.Name); ----- -Now this is much much terser then our first example using the constructor! - -[source, csharp] ----- -Expect("name") - .WhenSerializing(fieldString) - .WhenSerializing(fieldExpression); ----- -By default NEST will camelCase all the field names to be more javascripty - -using DefaultFieldNameInferrer() on ConnectionSettings you can change this behavior - -[source, csharp] ----- -var setup = WithConnectionSettings(s => s.DefaultFieldNameInferrer(p => p.ToUpper())); ----- -[source, csharp] ----- -setup.Expect("NAME").WhenSerializing(Field(p => p.Name)); ----- -However string are *always* passed along verbatim - -[source, csharp] ----- -setup.Expect("NaMe").WhenSerializing("NaMe"); ----- -if you want the same behavior for expressions simply do nothing in the default inferrer - -[source, csharp] ----- -setup = WithConnectionSettings(s => s.DefaultFieldNameInferrer(p => p)); ----- -[source, csharp] ----- -setup.Expect("Name").WhenSerializing(Field(p => p.Name)); ----- -Complex field name expressions - -You can follow your property expression to any depth, here we are traversing to the LeadDeveloper's (Person) FirstName - -[source, csharp] ----- -Expect("leadDeveloper.firstName").WhenSerializing(Field(p => p.LeadDeveloper.FirstName)); ----- -When dealing with collection index access is ingnored allowing you to traverse into properties of collections - -[source, csharp] ----- -Expect("curatedTags").WhenSerializing(Field(p => p.CuratedTags[0])); ----- -Similarly .First() also works, remember these are expressions and not actual code that will be executed - -[source, csharp] ----- -Expect("curatedTags").WhenSerializing(Field(p => p.CuratedTags.First())); ----- -[source, csharp] ----- -Expect("curatedTags.added").WhenSerializing(Field(p => p.CuratedTags[0].Added)); -Expect("curatedTags.name").WhenSerializing(Field(p => p.CuratedTags.First().Name)); ----- -When we see an indexer on a dictionary we assume they describe property names - -[source, csharp] ----- -Expect("metadata.hardcoded").WhenSerializing(Field(p => p.Metadata["hardcoded"])); ----- -[source, csharp] ----- -Expect("metadata.hardcoded.created").WhenSerializing(Field(p => p.Metadata["hardcoded"].Created)); ----- -A cool feature here is that we'll evaluate variables passed to these indexers - -[source, csharp] ----- -var variable = "var"; ----- -[source, csharp] ----- -Expect("metadata.var").WhenSerializing(Field(p => p.Metadata[variable])); -Expect("metadata.var.created").WhenSerializing(Field(p => p.Metadata[variable].Created)); ----- -If you are using elasticearch's multifield mapping (you really should!) these "virtual" sub fields -do not always map back on to your POCO, by calling .Suffix() you describe the sub fields that do not live in your c# objects - -[source, csharp] ----- -Expect("leadDeveloper.firstName.raw").WhenSerializing(Field(p => p.LeadDeveloper.FirstName.Suffix("raw"))); ----- -[source, csharp] ----- -Expect("curatedTags.raw").WhenSerializing(Field(p => p.CuratedTags[0].Suffix("raw"))); -Expect("curatedTags.raw").WhenSerializing(Field(p => p.CuratedTags.First().Suffix("raw"))); -Expect("curatedTags.added.raw").WhenSerializing(Field(p => p.CuratedTags[0].Added.Suffix("raw"))); -Expect("metadata.hardcoded.raw").WhenSerializing(Field(p => p.Metadata["hardcoded"].Suffix("raw"))); -Expect("metadata.hardcoded.created.raw").WhenSerializing(Field(p => p.Metadata["hardcoded"].Created.Suffix("raw"))); ----- -You can even chain them to any depth! - -[source, csharp] ----- -Expect("curatedTags.name.raw.evendeeper").WhenSerializing(Field(p => p.CuratedTags.First().Name.Suffix("raw").Suffix("evendeeper"))); ----- -Variables passed to suffix will be evaluated as well - -[source, csharp] ----- -var suffix = "unanalyzed"; ----- -[source, csharp] ----- -Expect("metadata.var.unanalyzed").WhenSerializing(Field(p => p.Metadata[variable].Suffix(suffix))); -Expect("metadata.var.created.unanalyzed").WhenSerializing(Field(p => p.Metadata[variable].Created.Suffix(suffix))); ----- - -Suffixes can be appended to expressions. This is useful in cases where you want to apply the same suffix -to a list of fields - - - - -[source, csharp] ----- -var expressions = new List>> -{ - p => p.Name, - p => p.Description, - p => p.CuratedTags.First().Name, - p => p.LeadDeveloper.FirstName -}; ----- -append the suffix "raw" to each expression - -[source, csharp] ----- -var fieldExpressions = - expressions.Select>, Field>(e => e.AppendSuffix("raw")).ToList(); ----- -[source, csharp] ----- -Expect("name.raw").WhenSerializing(fieldExpressions[0]); -Expect("description.raw").WhenSerializing(fieldExpressions[1]); -Expect("curatedTags.name.raw").WhenSerializing(fieldExpressions[2]); -Expect("leadDeveloper.firstName.raw").WhenSerializing(fieldExpressions[3]); ----- -Annotations - -When using NEST's property attributes you can specify a new name for the properties - -[source, csharp] ----- -public class BuiltIn -{ - [String(Name = "naam")] - public string Name { get; set; } -} ----- -[source, csharp] ----- -Expect("naam").WhenSerializing(Field(p => p.Name)); ----- - -Starting with NEST 2.x we also ask the serializer if it can resolve the property to a name. -Here we ask the default JsonNetSerializer and it takes JsonProperty into account - -[source, csharp] ----- -public class SerializerSpecific -{ - [JsonProperty("nameInJson")] - public string Name { get; set; } -} ----- -[source, csharp] ----- -Expect("nameInJson").WhenSerializing(Field(p => p.Name)); ----- - -If both are specified NEST takes precedence though - -[source, csharp] ----- -public class Both -{ - [String(Name = "naam")] - [JsonProperty("nameInJson")] - public string Name { get; set; } -} ----- -[source, csharp] ----- -Expect("naam").WhenSerializing(Field(p => p.Name)); -Expect(new - { - naam = "Martijn Laarman" - }).WhenSerializing(new Both { Name = "Martijn Laarman" }); ----- -[source, csharp] ----- -class A { public C C { get; set; } } ----- -[source, csharp] ----- -class B { public C C { get; set; } } ----- -[source, csharp] ----- -class C -{ - public string Name { get; set; } -} ----- - -Resolving field names is cached but this is per connection settings - - -[source, csharp] ----- -var connectionSettings = TestClient.CreateSettings(forceInMemory: true); -var client = new ElasticClient(connectionSettings); -var fieldNameOnA = client.Infer.Field(Field(p => p.C.Name)); -var fieldNameOnB = client.Infer.Field(Field(p => p.C.Name)); ----- -Here we have to similary shaped expressions on coming from A and on from B -that will resolve to the same field name, as expected - -[source, csharp] ----- -fieldNameOnA.Should().Be("c.name"); ----- -[source, csharp] ----- -fieldNameOnB.Should().Be("c.name"); ----- -now we create a new connectionsettings with a remap for C on class A to `d` -now when we resolve the field path for A will be different - -[source, csharp] ----- -var newConnectionSettings = TestClient.CreateSettings(forceInMemory: true, modifySettings: s => s - .InferMappingFor(m => m - .Rename(p => p.C, "d") - ) -); ----- -[source, csharp] ----- -var newClient = new ElasticClient(newConnectionSettings); -fieldNameOnA = newClient.Infer.Field(Field(p => p.C.Name)); -fieldNameOnB = newClient.Infer.Field(Field(p => p.C.Name)); -fieldNameOnA.Should().Be("d.name"); -fieldNameOnB.Should().Be("c.name"); ----- -however we didn't break inferrence on the first client instance using its separate connectionsettings - -[source, csharp] ----- -fieldNameOnA = client.Infer.Field(Field(p => p.C.Name)); ----- -[source, csharp] ----- -fieldNameOnB = client.Infer.Field(Field(p => p.C.Name)); -fieldNameOnA.Should().Be("c.name"); -fieldNameOnB.Should().Be("c.name"); ----- -To wrap up lets showcase the precedence that field names are inferred -1. A hard rename of the property on connection settings using Rename() -2. A NEST property mapping -3. Ask the serializer if the property has a verbatim value e.g it has an explicit JsonPropery attribute. -4. Pass the MemberInfo's Name to the DefaultFieldNameInferrer which by default camelCases -In the following example we have a class where each case wins - -[source, csharp] ----- -class Precedence -{ ----- -Eventhough this property has a NEST property mapping and a JsonProperty attribute -We are going to provide a hard rename for it on ConnectionSettings later that should win. - -[source, csharp] ----- -[String(Name = "renamedIgnoresNest")] - [JsonProperty("renamedIgnoresJsonProperty")] - public string RenamedOnConnectionSettings { get; set; } ----- -This property has both a NEST attribute and a JsonProperty, NEST should win. - -[source, csharp] ----- -[String(Name = "nestAtt")] - [JsonProperty("jsonProp")] - public string NestAttribute { get; set; } ----- -We should take the json property into account by itself - -[source, csharp] ----- -[JsonProperty("jsonProp")] - public string JsonProperty { get; set; } ----- -This property we are going to special case in our custom serializer to resolve to `ask` - -[source, csharp] ----- -[JsonProperty("dontaskme")] - public string AskSerializer { get; set; } ----- -We are going to register a DefaultFieldNameInferrer on ConnectionSettings -that will uppercase all properties. - -[source, csharp] ----- -public string DefaultFieldNameInferrer { get; set; } - -} ----- -[source, csharp] ----- -var usingSettings = WithConnectionSettings(s => s ----- -here we provide an explicit rename of a property on connectionsettings - -[source, csharp] ----- -.InferMappingFor(m => m - .Rename(p => p.RenamedOnConnectionSettings, "renamed") - ) ----- -All properties that are not mapped verbatim should be uppercased - -[source, csharp] ----- -.DefaultFieldNameInferrer(p => p.ToUpperInvariant()) -).WithSerializer(s => new CustomSerializer(s)); ----- -[source, csharp] ----- -usingSettings.Expect("renamed").ForField(Field(p => p.RenamedOnConnectionSettings)); -usingSettings.Expect("nestAtt").ForField(Field(p => p.NestAttribute)); -usingSettings.Expect("jsonProp").ForField(Field(p => p.JsonProperty)); -usingSettings.Expect("ask").ForField(Field(p => p.AskSerializer)); -usingSettings.Expect("DEFAULTFIELDNAMEINFERRER").ForField(Field(p => p.DefaultFieldNameInferrer)); ----- -The same rules apply when indexing an object - -[source, csharp] ----- -usingSettings.Expect(new [] -{ - "ask", - "DEFAULTFIELDNAMEINFERRER", - "jsonProp", - "nestAtt", - "renamed" -}).AsPropertiesOf(new Precedence -{ - RenamedOnConnectionSettings = "renamed on connection settings", - NestAttribute = "using a nest attribute", - JsonProperty = "the default serializer resolves json property attributes", - AskSerializer = "serializer fiddled with this one", - DefaultFieldNameInferrer = "shouting much?" -}); ----- diff --git a/docs/asciidoc/ClientConcepts/HighLevel/Inferrence/FieldNames/FieldInference.doc.asciidoc b/docs/asciidoc/ClientConcepts/HighLevel/Inferrence/FieldNames/FieldInference.doc.asciidoc deleted file mode 100644 index 82b58a192ec..00000000000 --- a/docs/asciidoc/ClientConcepts/HighLevel/Inferrence/FieldNames/FieldInference.doc.asciidoc +++ /dev/null @@ -1,313 +0,0 @@ -# Strongly typed field access - -Several places in the elasticsearch API expect the path to a field from your original source document as a string. -NEST allows you to use C# expressions to strongly type these field path strings. -These expressions are assigned to a type called `Field` and there are several ways to create a instance of that type - -Using the constructor directly is possible but rather involved - -[source, csharp] ----- -var fieldString = new Field { Name = "name" }; ----- -especially when using C# expressions since these can not be simply new'ed - -[source, csharp] ----- -Expression> expression = p => p.Name; ----- -[source, csharp] ----- -var fieldExpression = Field.Create(expression); -Expect("name") - .WhenSerializing(fieldExpression) - .WhenSerializing(fieldString); ----- -Therefor you can also implicitly convert strings and expressions to Field's - -[source, csharp] ----- -Field fieldString = "name"; ----- -but for expressions this is still rather involved - -[source, csharp] ----- -Expression> expression = p => p.Name; ----- -[source, csharp] ----- -Field fieldExpression = expression; -Expect("name") - .WhenSerializing(fieldExpression) - .WhenSerializing(fieldString); ----- -to ease creating Field's from expressions there is a static Property class you can use - -[source, csharp] ----- -Field fieldString = "name"; ----- -but for expressions this is still rather involved - -[source, csharp] ----- -var fieldExpression = Field(p => p.Name); ----- -Using static imports in c# 6 this can be even shortened: -using static Nest.Static; - -[source, csharp] ----- -fieldExpression = Field(p => p.Name); ----- -Now this is much much terser then our first example using the constructor! - -[source, csharp] ----- -Expect("name") - .WhenSerializing(fieldString) - .WhenSerializing(fieldExpression); ----- -By default NEST will camelCase all the field names to be more javascripty - -using DefaultFieldNameInferrer() on ConnectionSettings you can change this behavior - -[source, csharp] ----- -var setup = WithConnectionSettings(s => s.DefaultFieldNameInferrer(p => p.ToUpper())); ----- -[source, csharp] ----- -setup.Expect("NAME").WhenSerializing(Field(p => p.Name)); ----- -However string are *always* passed along verbatim - -[source, csharp] ----- -setup.Expect("NaMe").WhenSerializing("NaMe"); ----- -if you want the same behavior for expressions simply do nothing in the default inferrer - -[source, csharp] ----- -setup = WithConnectionSettings(s => s.DefaultFieldNameInferrer(p => p)); ----- -[source, csharp] ----- -setup.Expect("Name").WhenSerializing(Field(p => p.Name)); ----- -Complex field name expressions - -You can follow your property expression to any depth, here we are traversing to the LeadDeveloper's (Person) FirstName - -[source, csharp] ----- -Expect("leadDeveloper.firstName").WhenSerializing(Field(p => p.LeadDeveloper.FirstName)); ----- -When dealing with collection index access is ingnored allowing you to traverse into properties of collections - -[source, csharp] ----- -Expect("curatedTags").WhenSerializing(Field(p => p.CuratedTags[0])); ----- -Similarly .First() also works, remember these are expressions and not actual code that will be executed - -[source, csharp] ----- -Expect("curatedTags").WhenSerializing(Field(p => p.CuratedTags.First())); ----- -[source, csharp] ----- -Expect("curatedTags.added").WhenSerializing(Field(p => p.CuratedTags[0].Added)); -Expect("curatedTags.name").WhenSerializing(Field(p => p.CuratedTags.First().Name)); ----- -When we see an indexer on a dictionary we assume they describe property names - -[source, csharp] ----- -Expect("metadata.hardcoded").WhenSerializing(Field(p => p.Metadata["hardcoded"])); ----- -[source, csharp] ----- -Expect("metadata.hardcoded.created").WhenSerializing(Field(p => p.Metadata["hardcoded"].Created)); ----- -A cool feature here is that we'll evaluate variables passed to these indexers - -[source, csharp] ----- -var variable = "var"; ----- -[source, csharp] ----- -Expect("metadata.var").WhenSerializing(Field(p => p.Metadata[variable])); -Expect("metadata.var.created").WhenSerializing(Field(p => p.Metadata[variable].Created)); ----- -If you are using elasticearch's multifield mapping (you really should!) these "virtual" sub fields -do not always map back on to your POCO, by calling .Suffix() you describe the sub fields that do not live in your c# objects - -[source, csharp] ----- -Expect("leadDeveloper.firstName.raw").WhenSerializing(Field(p => p.LeadDeveloper.FirstName.Suffix("raw"))); ----- -[source, csharp] ----- -Expect("curatedTags.raw").WhenSerializing(Field(p => p.CuratedTags[0].Suffix("raw"))); -Expect("curatedTags.raw").WhenSerializing(Field(p => p.CuratedTags.First().Suffix("raw"))); -Expect("curatedTags.added.raw").WhenSerializing(Field(p => p.CuratedTags[0].Added.Suffix("raw"))); -Expect("metadata.hardcoded.raw").WhenSerializing(Field(p => p.Metadata["hardcoded"].Suffix("raw"))); -Expect("metadata.hardcoded.created.raw").WhenSerializing(Field(p => p.Metadata["hardcoded"].Created.Suffix("raw"))); ----- -You can even chain them to any depth! - -[source, csharp] ----- -Expect("curatedTags.name.raw.evendeeper").WhenSerializing(Field(p => p.CuratedTags.First().Name.Suffix("raw").Suffix("evendeeper"))); ----- -Variables passed to suffix will be evaluated as well - -[source, csharp] ----- -var suffix = "unanalyzed"; ----- -[source, csharp] ----- -Expect("metadata.var.unanalyzed").WhenSerializing(Field(p => p.Metadata[variable].Suffix(suffix))); -Expect("metadata.var.created.unanalyzed").WhenSerializing(Field(p => p.Metadata[variable].Created.Suffix(suffix))); ----- -Annotations - -When using NEST's property attributes you can specify a new name for the properties - -[source, csharp] ----- -Expect("naam").WhenSerializing(Field(p => p.Name)); ----- - -Starting with NEST 2.x we also ask the serializer if it can resolve the property to a name. -Here we ask the default JsonNetSerializer and it takes JsonProperty into account - -[source, csharp] ----- -Expect("nameInJson").WhenSerializing(Field(p => p.Name)); ----- - -If both are specified NEST takes precedence though - -[source, csharp] ----- -Expect("naam").WhenSerializing(Field(p => p.Name)); -Expect(new - { - naam = "Martijn Laarman" - }).WhenSerializing(new Both { Name = "Martijn Laarman" }); ----- -Resolving field names is cached but this is per connection settings - -[source, csharp] ----- -var connectionSettings = TestClient.CreateSettings(forceInMemory: true); -var client = new ElasticClient(connectionSettings); -var fieldNameOnA = client.Infer.Field(Field(p => p.C.Name)); -var fieldNameOnB = client.Infer.Field(Field(p => p.C.Name)); ----- -Here we have to similary shaped expressions on coming from A and on from B -that will resolve to the same field name, as expected - -[source, csharp] ----- -fieldNameOnA.Should().Be("c.name"); ----- -[source, csharp] ----- -fieldNameOnB.Should().Be("c.name"); ----- -now we create a new connectionsettings with a remap for C on class A to `d` -now when we resolve the field path for A will be different - -[source, csharp] ----- -var newConnectionSettings = TestClient.CreateSettings(forceInMemory: true, modifySettings: s => s - .InferMappingFor(m => m - .Rename(p => p.C, "d") - ) -); ----- -[source, csharp] ----- -var newClient = new ElasticClient(newConnectionSettings); -fieldNameOnA = newClient.Infer.Field(Field(p => p.C.Name)); -fieldNameOnB = newClient.Infer.Field(Field(p => p.C.Name)); -fieldNameOnA.Should().Be("d.name"); -fieldNameOnB.Should().Be("c.name"); ----- -however we didn't break inferrence on the first client instance using its separate connectionsettings - -[source, csharp] ----- -fieldNameOnA = client.Infer.Field(Field(p => p.C.Name)); ----- -[source, csharp] ----- -fieldNameOnB = client.Infer.Field(Field(p => p.C.Name)); -fieldNameOnA.Should().Be("c.name"); -fieldNameOnB.Should().Be("c.name"); ----- -To wrap up lets showcase the precedence that field names are inferred -1. A hard rename of the property on connection settings using Rename() -2. A NEST property mapping -3. Ask the serializer if the property has a verbatim value e.g it has an explicit JsonPropery attribute. -4. Pass the MemberInfo's Name to the DefaultFieldNameInferrer which by default camelCases -In the following example we have a class where each case wins - - -Here we create a custom converter that renames any property named `AskSerializer` to `ask` - -[source, csharp] ----- -var usingSettings = WithConnectionSettings(s => s ----- -here we provide an explicit rename of a property on connectionsettings - -[source, csharp] ----- -.InferMappingFor(m => m - .Rename(p => p.RenamedOnConnectionSettings, "renamed") - ) ----- -All properties that are not mapped verbatim should be uppercased - -[source, csharp] ----- -.DefaultFieldNameInferrer(p => p.ToUpperInvariant()) -).WithSerializer(s => new CustomSerializer(s)); ----- -[source, csharp] ----- -usingSettings.Expect("renamed").ForField(Field(p => p.RenamedOnConnectionSettings)); -usingSettings.Expect("nestAtt").ForField(Field(p => p.NestAttribute)); -usingSettings.Expect("jsonProp").ForField(Field(p => p.JsonProperty)); -usingSettings.Expect("ask").ForField(Field(p => p.AskSerializer)); -usingSettings.Expect("DEFAULTFIELDNAMEINFERRER").ForField(Field(p => p.DefaultFieldNameInferrer)); ----- -The same rules apply when indexing an object - -[source, csharp] ----- -usingSettings.Expect(new [] -{ - "ask", - "DEFAULTFIELDNAMEINFERRER", - "jsonProp", - "nestAtt", - "renamed" -}).AsPropertiesOf(new Precedence -{ - RenamedOnConnectionSettings = "renamed on connection settings", - NestAttribute = "using a nest attribute", - JsonProperty = "the default serializer resolves json property attributes", - AskSerializer = "serializer fiddled with this one", - DefaultFieldNameInferrer = "shouting much?" -}); ----- - diff --git a/docs/asciidoc/ClientConcepts/HighLevel/Inferrence/Id/IdsInference.doc.asciidoc b/docs/asciidoc/ClientConcepts/HighLevel/Inferrence/Id/IdsInference.doc.asciidoc deleted file mode 100644 index 6a659778cb3..00000000000 --- a/docs/asciidoc/ClientConcepts/HighLevel/Inferrence/Id/IdsInference.doc.asciidoc +++ /dev/null @@ -1,79 +0,0 @@ -# Ids - -Several places in the elasticsearch API expect an Id object to be passed. This is a special box type that you can implicitly convert to and from many value types. - -Methods that take an Id can be passed longs, ints, strings -Guids and they will implicitly converted to Ids - -[source, csharp] ----- -Nest.Id idFromInt = 1; -Nest.Id idFromLong = 2L; -Nest.Id idFromString = "hello-world"; -Nest.Id idFromGuid = new Guid("D70BD3CF-4E38-46F3-91CA-FCBEF29B148E"); -Expect(1).WhenSerializing(idFromInt); -Expect(2).WhenSerializing(idFromLong); -Expect("hello-world").WhenSerializing(idFromString); -Expect("d70bd3cf-4e38-46f3-91ca-fcbef29b148e").WhenSerializing(idFromGuid); ----- -Sometimes a method takes an object and we need an Id from that object to build up a path. -There is no implicit conversion from any object to Id but we can call Id.From. -Imagine your codebase has the following type that we want to index into elasticsearch - -By default NEST will try to find a property called `Id` on the class using reflection -and create a cached fast func delegate based on the properties getter - -[source, csharp] ----- -var dto = new MyDTO { Id =new Guid("D70BD3CF-4E38-46F3-91CA-FCBEF29B148E"), Name = "x", OtherName = "y" }; ----- -[source, csharp] ----- -Expect("d70bd3cf-4e38-46f3-91ca-fcbef29b148e").WhenInferringIdOn(dto); ----- -Using the connection settings you can specify a different property NEST should look for ids. -Here we instruct NEST to infer the Id for MyDTO based on its Name property - -[source, csharp] ----- -WithConnectionSettings(x => x - .InferMappingFor(m => m - .IdProperty(p => p.Name) - ) -).Expect("x").WhenInferringIdOn(dto); ----- -Even though we have a cache at play the cache is per connection settings, so we can create a different config - -[source, csharp] ----- -WithConnectionSettings(x => x - .InferMappingFor(m => m - .IdProperty(p => p.OtherName) - ) -).Expect("y").WhenInferringIdOn(dto); ----- -Another way is to mark the type with an ElasticType attribute, using a string IdProperty - -Now when we infer the id we expect it to be the Name property without doing any configuration on the ConnectionSettings - -[source, csharp] ----- -var dto = new MyOtherDTO { Id =new Guid("D70BD3CF-4E38-46F3-91CA-FCBEF29B148E"), Name = "x", OtherName = "y" }; ----- -[source, csharp] ----- -Expect("x").WhenInferringIdOn(dto); ----- -This attribute IS cached statically/globally, however connectionsettings with a config for the type will -still win over this static configuration - -[source, csharp] ----- -WithConnectionSettings(x => x - .InferMappingFor(m => m - .IdProperty(p => p.OtherName) - ) -).Expect("y").WhenInferringIdOn(dto); ----- -Eventhough we have a cache at play the cache its per connection settings, so we can create a different config - diff --git a/docs/asciidoc/ClientConcepts/HighLevel/Inferrence/IdsInference.doc.asciidoc b/docs/asciidoc/ClientConcepts/HighLevel/Inferrence/IdsInference.doc.asciidoc deleted file mode 100644 index b5818a65d59..00000000000 --- a/docs/asciidoc/ClientConcepts/HighLevel/Inferrence/IdsInference.doc.asciidoc +++ /dev/null @@ -1,98 +0,0 @@ -# Ids - -Several places in the elasticsearch API expect an Id object to be passed. This is a special box type that you can implicitly convert to and from many value types. - - -Methods that take an Id can be passed longs, ints, strings & Guids and they will implicitly converted to Ids - -[source, csharp] ----- -Id idFromInt = 1; -Id idFromLong = 2L; -Id idFromString = "hello-world"; -Id idFromGuid = new Guid("D70BD3CF-4E38-46F3-91CA-FCBEF29B148E"); -Expect(1).WhenSerializing(idFromInt); -Expect(2).WhenSerializing(idFromLong); -Expect("hello-world").WhenSerializing(idFromString); -Expect("d70bd3cf-4e38-46f3-91ca-fcbef29b148e").WhenSerializing(idFromGuid); ----- -Sometimes a method takes an object and we need an Id from that object to build up a path. -There is no implicit conversion from any object to Id but we can call Id.From. -Imagine your codebase has the following type that we want to index into elasticsearch - -[source, csharp] ----- -class MyDTO -{ - public Guid Id { get; set; } - public string Name { get; set; } - public string OtherName { get; set; } -} ----- -By default NEST will try to find a property called `Id` on the class using reflection -and create a cached fast func delegate based on the properties getter - -[source, csharp] ----- -var dto = new MyDTO { Id =new Guid("D70BD3CF-4E38-46F3-91CA-FCBEF29B148E"), Name = "x", OtherName = "y" }; ----- -[source, csharp] ----- -Expect("d70bd3cf-4e38-46f3-91ca-fcbef29b148e").WhenInferringIdOn(dto); ----- -Using the connection settings you can specify a different property NEST should look for ids. -Here we instruct NEST to infer the Id for MyDTO based on its Name property - -[source, csharp] ----- -WithConnectionSettings(x => x - .InferMappingFor(m => m - .IdProperty(p => p.Name) - ) -).Expect("x").WhenInferringIdOn(dto); ----- -Even though we have a cache at play the cache is per connection settings, so we can create a different config - -[source, csharp] ----- -WithConnectionSettings(x => x - .InferMappingFor(m => m - .IdProperty(p => p.OtherName) - ) -).Expect("y").WhenInferringIdOn(dto); ----- -Another way is to mark the type with an ElasticType attribute, using a string IdProperty - -[source, csharp] ----- -[ElasticsearchType(IdProperty = nameof(Name))] -class MyOtherDTO -{ - public Guid Id { get; set; } - public string Name { get; set; } - public string OtherName { get; set; } -} ----- -Now when we infer the id we expect it to be the Name property without doing any configuration on the ConnectionSettings - -[source, csharp] ----- -var dto = new MyOtherDTO { Id =new Guid("D70BD3CF-4E38-46F3-91CA-FCBEF29B148E"), Name = "x", OtherName = "y" }; ----- -[source, csharp] ----- -Expect("x").WhenInferringIdOn(dto); ----- -This attribute IS cached statically/globally, however connectionsettings with a config for the type will -still win over this static configuration - -[source, csharp] ----- -WithConnectionSettings(x => x - .InferMappingFor(m => m - .IdProperty(p => p.OtherName) - ) -).Expect("y").WhenInferringIdOn(dto); ----- -Eventhough we have a cache at play the cache its per connection settings, so we can create a different config - diff --git a/docs/asciidoc/ClientConcepts/HighLevel/Inferrence/Indices/IndicesPaths.doc.asciidoc b/docs/asciidoc/ClientConcepts/HighLevel/Inferrence/Indices/IndicesPaths.doc.asciidoc deleted file mode 100644 index cc902f17fde..00000000000 --- a/docs/asciidoc/ClientConcepts/HighLevel/Inferrence/Indices/IndicesPaths.doc.asciidoc +++ /dev/null @@ -1,34 +0,0 @@ -# Indices paths - -Some API's in elasticsearch take one or many index name or a special "_all" marker to send the request to all the indices -In nest this is encoded using `Indices` - -Several types implicitly convert to `Indices` - -[source, csharp] ----- -Nest.Indices singleIndexFromString = "name"; -Nest.Indices multipleIndicesFromString = "name1, name2"; -Nest.Indices allFromString = "_all"; -Nest.Indices allWithOthersFromString = "_all, name2"; -singleIndexFromString.Match( - all => all.Should().BeNull(), - many => many.Indices.Should().HaveCount(1).And.Contain("name") - ); ----- -to ease creating Field's from expressions there is a static Property class you can use - - - -[source, csharp] ----- -var all = Nest.Indices.All; ----- -[source, csharp] ----- -var many = Nest.Indices.Index("name1", "name2"); -var manyTyped = Nest.Indices.Index().And(); -var singleTyped = Nest.Indices.Index(); -var singleString = Nest.Indices.Index("name1"); -var invalidSingleString = Nest.Indices.Index("name1, name2"); ----- diff --git a/docs/asciidoc/ClientConcepts/HighLevel/Inferrence/IndicesPaths.doc.asciidoc b/docs/asciidoc/ClientConcepts/HighLevel/Inferrence/IndicesPaths.doc.asciidoc deleted file mode 100644 index c6c5a78f898..00000000000 --- a/docs/asciidoc/ClientConcepts/HighLevel/Inferrence/IndicesPaths.doc.asciidoc +++ /dev/null @@ -1,47 +0,0 @@ -# Indices paths - -Some API's in elasticsearch take one or many index name or a special "_all" marker to send the request to all the indices -In nest this is encoded using `Indices` - - -Several types implicitly convert to `Indices` - -[source, csharp] ----- -Nest.Indices singleIndexFromString = "name"; -Nest.Indices multipleIndicesFromString = "name1, name2"; -Nest.Indices allFromString = "_all"; -Nest.Indices allWithOthersFromString = "_all, name2"; -singleIndexFromString.Match( - all => all.Should().BeNull(), - many => many.Indices.Should().HaveCount(1).And.Contain("name") - ); -multipleIndicesFromString.Match( - all => all.Should().BeNull(), - many => many.Indices.Should().HaveCount(2).And.Contain("name2") - ); -allFromString.Match( - all => all.Should().NotBeNull(), - many => many.Indices.Should().BeNull() - ); -allWithOthersFromString.Match( - all => all.Should().NotBeNull(), - many => many.Indices.Should().BeNull() - ); ----- -to ease creating Field's from expressions there is a static Property class you can use - - - -[source, csharp] ----- -var all = Nest.Indices.All; ----- -[source, csharp] ----- -var many = Nest.Indices.Index("name1", "name2"); -var manyTyped = Nest.Indices.Index().And(); -var singleTyped = Nest.Indices.Index(); -var singleString = Nest.Indices.Index("name1"); -var invalidSingleString = Nest.Indices.Index("name1, name2"); ----- diff --git a/docs/asciidoc/ClientConcepts/HighLevel/Inferrence/PropertyInference.doc.asciidoc b/docs/asciidoc/ClientConcepts/HighLevel/Inferrence/PropertyInference.doc.asciidoc deleted file mode 100644 index d21a723cd1b..00000000000 --- a/docs/asciidoc/ClientConcepts/HighLevel/Inferrence/PropertyInference.doc.asciidoc +++ /dev/null @@ -1,6 +0,0 @@ -[source, csharp] ----- -Expression> expression = p => p.Name.Suffix("raw"); -Expect("raw").WhenSerializing(expression); -Assert.Throws(() => Expect("exception!").WhenSerializing("name.raw")); ----- diff --git a/docs/asciidoc/ClientConcepts/HighLevel/Mapping/AutoMap.doc.asciidoc b/docs/asciidoc/ClientConcepts/HighLevel/Mapping/AutoMap.doc.asciidoc deleted file mode 100644 index 9db88699059..00000000000 --- a/docs/asciidoc/ClientConcepts/HighLevel/Mapping/AutoMap.doc.asciidoc +++ /dev/null @@ -1,904 +0,0 @@ -# Auto mapping properties - -When creating a mapping (either when creating an index or via the put mapping API), -NEST offers a feature called AutoMap(), which will automagically infer the correct -Elasticsearch datatypes of the POCO properties you are mapping. Alternatively, if -you're using attributes to map your properties, then calling AutoMap() is required -in order for your attributes to be applied. We'll look at examples of both. - - - -For these examples, we'll define two POCOS. A Company, which has a name -and a collection of Employees. And Employee, which has various properties of -different types, and itself has a collection of Employees. - -[source, csharp] ----- -public class Company -{ - public string Name { get; set; } - public List Employees { get; set; } -} ----- -[source, csharp] ----- -public class Employee -{ - public string FirstName { get; set; } - public string LastName { get; set; } - public int Salary { get; set; } - public DateTime Birthday { get; set; } - public bool IsManager { get; set; } - public List Employees { get; set; } - public TimeSpan Hours { get; set;} -} ----- -## Manual mapping -To create a mapping for our Company type, we can use the fluent API -and map each property explicitly - -[source, csharp] ----- -var descriptor = new CreateIndexDescriptor("myindex") - .Mappings(ms => ms - .Map(m => m - .Properties(ps => ps - .String(s => s - .Name(c => c.Name) - ) - .Object(o => o - .Name(c => c.Employees) - .Properties(eps => eps - .String(s => s - .Name(e => e.FirstName) - ) - .String(s => s - .Name(e => e.LastName) - ) - .Number(n => n - .Name(e => e.Salary) - .Type(NumberType.Integer) - ) - ) - ) - ) - ) - ); ----- -Which is all fine and dandy, and useful for some use cases. However in most cases -this is becomes too cumbersome of an approach, and you simply just want to map *all* -the properties of your POCO in a single go. - -[source, csharp] ----- -var expected = new -{ - mappings = new - { - company = new - { - properties = new - { - name = new - { - type = "string" - }, - employees = new - { - type = "object", - properties = new - { - firstName = new - { - type = "string" - }, - lastName = new - { - type = "string" - }, - salary = new - { - type = "integer" - } - } - } - } - } - } -}; ----- -[source, csharp] ----- -Expect(expected).WhenSerializing((ICreateIndexRequest) descriptor); ----- -## Simple Automapping -This is exactly where `AutoMap()` becomes useful. Instead of manually mapping each property, -explicitly, we can instead call `.AutoMap()` for each of our mappings and let NEST do all the work - -[source, csharp] ----- -var descriptor = new CreateIndexDescriptor("myindex") - .Mappings(ms => ms - .Map(m => m.AutoMap()) - .Map(m => m.AutoMap()) - ); ----- -Observe that NEST has inferred the Elasticsearch types based on the CLR type of our POCO properties. -In this example, -- Birthday was mapped as a date, -- Hours was mapped as a long (ticks) -- IsManager was mapped as a boolean, -- Salary as an integer -- Employees as an object -and the remaining string properties as strings. - -[source, csharp] ----- -var expected = new -{ - mappings = new - { - company = new - { - properties = new - { - employees = new - { - properties = new - { - birthday = new - { - type = "date" - }, - employees = new - { - properties = new { }, - type = "object" - }, - firstName = new - { - type = "string" - }, - hours = new - { - type = "long" - }, - isManager = new - { - type = "boolean" - }, - lastName = new - { - type = "string" - }, - salary = new - { - type = "integer" - } - }, - type = "object" - }, - name = new - { - type = "string" - } - } - }, - employee = new - { - properties = new - { - birthday = new - { - type = "date" - }, - employees = new - { - properties = new { }, - type = "object" - }, - firstName = new - { - type = "string" - }, - hours = new - { - type = "long" - }, - isManager = new - { - type = "boolean" - }, - lastName = new - { - type = "string" - }, - salary = new - { - type = "integer" - } - } - } - } -}; ----- -[source, csharp] ----- -Expect(expected).WhenSerializing((ICreateIndexRequest) descriptor); ----- -## Automapping with overrides -In most cases, you'll want to map more than just the vanilla datatypes and also provide -various options on your properties (analyzer, doc_values, etc...). In that case, it's -possible to use AutoMap() in conjuction with explicitly mapped properties. - - -Here we are using AutoMap() to automatically map our company type, but then we're -overriding our employee property and making it a `nested` type, since by default, -AutoMap() will infer objects as `object`. - -[source, csharp] ----- -var descriptor = new CreateIndexDescriptor("myindex") - .Mappings(ms => ms - .Map(m => m - .AutoMap() - .Properties(ps => ps - .Nested(n => n - .Name(c => c.Employees) - .Properties(eps => eps - // snip - ) - ) - ) - ) - ); ----- -[source, csharp] ----- -var expected = new - { - mappings = new - { - company = new - { - properties = new - { - name = new - { - type = "string" - }, - employees = new - { - type = "nested", - properties = new {} - } - } - } - } - }; -Expect(expected).WhenSerializing((ICreateIndexRequest) descriptor); ----- -## Automap with attributes -It is also possible to define your mappings using attributes on your POCOS. When you -use attributes, you MUST use AutoMap() in order for the attributes to be applied. -Here we define the same two types but this time using attributes. - -[source, csharp] ----- -[ElasticsearchType(Name = "company")] -public class CompanyWithAttributes -{ - [String(Analyzer = "keyword", NullValue = "null", Similarity = SimilarityOption.BM25)] - public string Name { get; set; } - - [String] - public TimeSpan? HeadOfficeHours { get; set; } - - [Object(Path = "employees", Store = false)] - public List Employees { get; set; } -} ----- -[source, csharp] ----- -[ElasticsearchType(Name = "employee")] -public class EmployeeWithAttributes -{ - [String] - public string FirstName { get; set; } - - [String] - public string LastName { get; set; } - - [Number(DocValues = false, IgnoreMalformed = true, Coerce = true)] - public int Salary { get; set; } - - [Date(Format = "MMddyyyy", NumericResolution = NumericResolutionUnit.Seconds)] - public DateTime Birthday { get; set; } - - [Boolean(NullValue = false, Store = true)] - public bool IsManager { get; set; } - - [Nested(Path = "employees")] - [JsonProperty("empl")] - public List Employees { get; set; } -} ----- -[source, csharp] ----- -var descriptor = new CreateIndexDescriptor("myindex") - .Mappings(ms => ms - .Map(m => m.AutoMap()) - .Map(m => m.AutoMap()) - ); -var expected = new - { - mappings = new - { - company = new - { - properties = new - { - employees = new - { - path = "employees", - properties = new - { - birthday = new - { - type = "date" - }, - employees = new - { - properties = new { }, - type = "object" - }, - firstName = new - { - type = "string" - }, - hours = new - { - type = "long" - }, - isManager = new - { - type = "boolean" - }, - lastName = new - { - type = "string" - }, - salary = new - { - type = "integer" - } - }, - store = false, - type = "object" - }, - name = new - { - analyzer = "keyword", - null_value = "null", - similarity = "BM25", - type = "string" - }, - headOfficeHours = new - { - type = "string" - } - } - }, - employee = new - { - properties = new - { - birthday = new - { - format = "MMddyyyy", - numeric_resolution = "seconds", - type = "date" - }, - empl = new - { - path = "employees", - properties = new - { - birthday = new - { - type = "date" - }, - employees = new - { - properties = new { }, - type = "object" - }, - firstName = new - { - type = "string" - }, - hours = new - { - type = "long" - }, - isManager = new - { - type = "boolean" - }, - lastName = new - { - type = "string" - }, - salary = new - { - type = "integer" - } - }, - type = "nested" - }, - firstName = new - { - type = "string" - }, - isManager = new - { - null_value = false, - store = true, - type = "boolean" - }, - lastName = new - { - type = "string" - }, - salary = new - { - coerce = true, - doc_values = false, - ignore_malformed = true, - type = "double" - } - } - } - } - }; -Expect(expected).WhenSerializing(descriptor as ICreateIndexRequest); ----- - -Just as we were able to override the inferred properties in our earlier example, explicit (manual) -mappings also take precedence over attributes. Therefore we can also override any mappings applied -via any attributes defined on the POCO - - -[source, csharp] ----- -var descriptor = new CreateIndexDescriptor("myindex") - .Mappings(ms => ms - .Map(m => m - .AutoMap() - .Properties(ps => ps - .Nested(n => n - .Name(c => c.Employees) - ) - ) - ) - .Map(m => m - .AutoMap() - .TtlField(ttl => ttl - .Enable() - .Default("10m") - ) - .Properties(ps => ps - .String(s => s - .Name(e => e.FirstName) - .Fields(fs => fs - .String(ss => ss - .Name("firstNameRaw") - .Index(FieldIndexOption.NotAnalyzed) - ) - .TokenCount(t => t - .Name("length") - .Analyzer("standard") - ) - ) - ) - .Number(n => n - .Name(e => e.Salary) - .Type(NumberType.Double) - .IgnoreMalformed(false) - ) - .Date(d => d - .Name(e => e.Birthday) - .Format("MM-dd-yy") - ) - ) - ) - ); -var expected = new - { - mappings = new - { - company = new - { - properties = new - { - employees = new - { - type = "nested" - }, - name = new - { - analyzer = "keyword", - null_value = "null", - similarity = "BM25", - type = "string" - }, - headOfficeHours = new - { - type = "string" - } - } - }, - employee = new - { - _ttl = new - { - enabled = true, - @default = "10m" - }, - properties = new - { - birthday = new - { - format = "MM-dd-yy", - type = "date" - }, - empl = new - { - path = "employees", - properties = new - { - birthday = new - { - type = "date" - }, - employees = new - { - properties = new { }, - type = "object" - }, - firstName = new - { - type = "string" - }, - hours = new - { - type = "long" - }, - isManager = new - { - type = "boolean" - }, - lastName = new - { - type = "string" - }, - salary = new - { - type = "integer" - } - }, - type = "nested" - }, - firstName = new - { - fields = new - { - firstNameRaw = new - { - index = "not_analyzed", - type = "string" - }, - length = new - { - type = "token_count", - analyzer = "standard" - } - }, - type = "string" - }, - isManager = new - { - null_value = false, - store = true, - type = "boolean" - }, - lastName = new - { - type = "string" - }, - salary = new - { - ignore_malformed = false, - type = "double" - } - } - } - } - }; -Expect(expected).WhenSerializing((ICreateIndexRequest) descriptor); ----- -[source, csharp] ----- -[ElasticsearchType(Name = "company")] -public class CompanyWithAttributesAndPropertiesToIgnore -{ - public string Name { get; set; } - - [String(Ignore = true)] - public string PropertyToIgnore { get; set; } - - public string AnotherPropertyToIgnore { get; set; } - - [JsonIgnore] - public string JsonIgnoredProperty { get; set; } -} ----- -== Ignoring Properties -Properties on a POCO can be ignored in a few ways: - - - -- Using the `Ignore` property on a derived `ElasticsearchPropertyAttribute` type applied to the property that should be ignored on the POCO - - - -- Using the `.InferMappingFor(Func, IClrTypeMapping> selector)` on the connection settings - - - -- Using an ignore attribute applied to the POCO property that is understood by the `IElasticsearchSerializer` used and inspected inside of `CreatePropertyMapping()` on the serializer. In the case of the default `JsonNetSerializer`, this is the Json.NET `JsonIgnoreAttribute` - - - -This example demonstrates all ways, using the attribute way to ignore the property `PropertyToIgnore`, the infer mapping way to ignore the -property `AnotherPropertyToIgnore` and the json serializer specific attribute way to ignore the property `JsonIgnoredProperty` - - -[source, csharp] ----- -var descriptor = new CreateIndexDescriptor("myindex") - .Mappings(ms => ms - .Map(m => m - .AutoMap() - ) - ); -var expected = new - { - mappings = new - { - company = new - { - properties = new - { - name = new - { - type = "string" - } - } - } - } - }; -var settings = WithConnectionSettings(s => s - .InferMappingFor(i => i - .Ignore(p => p.AnotherPropertyToIgnore) - ) - ); -settings.Expect(expected).WhenSerializing((ICreateIndexRequest) descriptor); ----- -If you notice in our previous Company/Employee examples, the Employee type is recursive -in that itself contains a collection of type `Employee`. By default, `.AutoMap()` will only -traverse a single depth when it encounters recursive instances like this. Hence, in the -previous examples, the second level of Employee did not get any of its properties mapped. -This is done as a safe-guard to prevent stack overflows and all the fun that comes with -infinite recursion. Additionally, in most cases, when it comes to Elasticsearch mappings, it is -often an edge case to have deeply nested mappings like this. However, you may still have -the need to do this, so you can control the recursion depth of AutoMap(). -Let's introduce a very simple class A, to reduce the noise, which itself has a property -Child of type A. - -[source, csharp] ----- -public class A -{ - public A Child { get; set; } -} ----- -By default, AutoMap() only goes as far as depth 1 - -[source, csharp] ----- -var descriptor = new CreateIndexDescriptor("myindex") - .Mappings(ms => ms - .Map(m => m.AutoMap()) - ); ----- -Thus we do not map properties on the second occurrence of our Child property - -[source, csharp] ----- -var expected = new -{ - mappings = new - { - a = new - { - properties = new - { - child = new - { - properties = new { }, - type = "object" - } - } - } - } -}; ----- -[source, csharp] ----- -Expect(expected).WhenSerializing((ICreateIndexRequest) descriptor); ----- -Now lets specify a maxRecursion of 3 - -[source, csharp] ----- -var withMaxRecursionDescriptor = new CreateIndexDescriptor("myindex") - .Mappings(ms => ms - .Map(m => m.AutoMap(3)) - ); ----- -AutoMap() has now mapped three levels of our Child property - -[source, csharp] ----- -var expectedWithMaxRecursion = new -{ - mappings = new - { - a = new - { - properties = new - { - child = new - { - type = "object", - properties = new - { - child = new - { - type = "object", - properties = new - { - child = new - { - type = "object", - properties = new - { - child = new - { - type = "object", - properties = new { } - } - } - } - } - } - } - } - } - } - } -}; ----- -[source, csharp] ----- -Expect(expectedWithMaxRecursion).WhenSerializing((ICreateIndexRequest) withMaxRecursionDescriptor); ----- -Now we can pass an instance of our custom visitor to AutoMap() - -[source, csharp] ----- -var descriptor = new CreateIndexDescriptor("myindex") - .Mappings(ms => ms - .Map(m => m.AutoMap(new DisableDocValuesPropertyVisitor())) - ); ----- -and anytime it maps a property as a number (INumberProperty) or boolean (IBooleanProperty) -it will apply the transformation defined in each Visit() respectively, which in this example -disables doc values. - -[source, csharp] ----- -var expected = new -{ - mappings = new - { - employee = new - { - properties = new - { - birthday = new - { - type = "date" - }, - employees = new - { - properties = new { }, - type = "object" - }, - firstName = new - { - type = "string" - }, - isManager = new - { - doc_values = false, - type = "boolean" - }, - lastName = new - { - type = "string" - }, - salary = new - { - doc_values = false, - type = "integer" - } - } - } - } -}; ----- -[source, csharp] ----- -var descriptor = new CreateIndexDescriptor("myindex") - .Mappings(ms => ms - .Map(m => m.AutoMap(new EverythingIsAStringPropertyVisitor())) - ); -var expected = new - { - mappings = new - { - employee = new - { - properties = new - { - birthday = new - { - type = "string" - }, - employees = new - { - type = "string" - }, - firstName = new - { - type = "string" - }, - isManager = new - { - type = "string" - }, - lastName = new - { - type = "string" - }, - salary = new - { - type = "string" - } - } - } - } - }; ----- diff --git a/docs/asciidoc/ClientConcepts/LowLevel/Connecting.doc.asciidoc b/docs/asciidoc/ClientConcepts/LowLevel/Connecting.doc.asciidoc deleted file mode 100644 index 41ac4fd0d1b..00000000000 --- a/docs/asciidoc/ClientConcepts/LowLevel/Connecting.doc.asciidoc +++ /dev/null @@ -1,291 +0,0 @@ -# Connecting -Connecting to *Elasticsearch* with `Elasticsearch.Net` is quite easy but has a few toggles and options worth knowing. - -# Choosing the right connection strategy -If you simply new an `ElasticLowLevelClient`, it will be a non-failover connection to `http://localhost:9200` - - -[source, csharp] ----- -var client = new ElasticLowLevelClient(); -var tokenizers = new TokenizersDescriptor(); ----- - -If your Elasticsearch node does not live at `http://localhost:9200` but i.e `http://mynode.example.com:8082/apiKey`, then -you will need to pass in some instance of `IConnectionConfigurationValues`. - -The easiest way to do this is: - - -[source, csharp] ----- -var node = new Uri("http://mynode.example.com:8082/apiKey"); -var config = new ConnectionConfiguration(node); -var client = new ElasticLowLevelClient(config); ----- - -This however is still a non-failover connection. Meaning if that `node` goes down the operation will not be retried on any other nodes in the cluster. - -To get a failover connection we have to pass an `IConnectionPool` instance instead of a `Uri`. - - -[source, csharp] ----- -var node = new Uri("http://mynode.example.com:8082/apiKey"); -var connectionPool = new SniffingConnectionPool(new[] { node }); -var config = new ConnectionConfiguration(connectionPool); -var client = new ElasticLowLevelClient(config); ----- - -Here instead of directly passing `node`, we pass a `SniffingConnectionPool` which will use our `node` to find out the rest of the available cluster nodes. -Be sure to read more about [Connection Pooling and Cluster Failover here](/elasticsearch-net/cluster-failover.html) - -## Options - -Besides either passing a `Uri` or `IConnectionPool` to `ConnectionConfiguration`, you can also fluently control many more options. For instance: - - -[source, csharp] ----- -var node = new Uri("http://mynode.example.com:8082/apiKey"); -var connectionPool = new SniffingConnectionPool(new[] { node }); -var config = new ConnectionConfiguration(connectionPool) - .DisableDirectStreaming() - .BasicAuthentication("user", "pass") - .RequestTimeout(TimeSpan.FromSeconds(5)); ----- - -The following is a list of available connection configuration options: - - -[source, csharp] ----- -var client = new ElasticLowLevelClient(); ----- -[source, csharp] ----- -var config = new ConnectionConfiguration() - - .DisableAutomaticProxyDetection() ----- -Disable automatic proxy detection. Defaults to true. - -[source, csharp] ----- -.EnableHttpCompression() ----- -Enable compressed request and reesponses from Elasticsearch (Note that nodes need to be configured -to allow this. See the [http module settings](http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/modules-http.html) for more info). - -[source, csharp] ----- -.DisableDirectStreaming() ----- -By default responses are deserialized off stream to the object you tell it to. -For debugging purposes it can be very useful to keep a copy of the raw response on the result object. - -[source, csharp] ----- -var result = client.Search>(new { size = 12 }); -var raw = result.ResponseBodyInBytes; ----- -This will only have a value if the client configuration has ExposeRawResponse set - -[source, csharp] ----- -var stringResult = client.Search(new { }); ----- - -Please note that this only make sense if you need a mapped response and the raw response at the same time. -If you need a `string` or `byte[]` response simply call: - -[source, csharp] ----- -config = config - //endhide - .GlobalQueryStringParameters(new NameValueCollection()) ----- -Allows you to set querystring parameters that have to be added to every request. For instance, if you use a hosted elasticserch provider, and you need need to pass an `apiKey` parameter onto every request. - -[source, csharp] ----- -.Proxy(new Uri("http://myproxy"), "username", "pass") ----- -Sets proxy information on the connection. - -[source, csharp] ----- -.RequestTimeout(TimeSpan.FromSeconds(4)) ----- -Sets the global maximum time a connection may take. -Please note that this is the request timeout, the builtin .NET `WebRequest` has no way to set connection timeouts -(see http://msdn.microsoft.com/en-us/library/system.net.httpwebrequest.timeout(v=vs.110).aspx). - -[source, csharp] ----- -.ThrowExceptions() ----- -As an alternative to the C/go like error checking on `response.IsValid`, you can instead tell the client to throw -exceptions. -There are three category of exceptions thay may be thrown: - -1) ElasticsearchClientException: These are known exceptions, either an exception that occurred in the request pipeline -(such as max retries or timeout reached, bad authentication, etc...) or Elasticsearch itself returned an error (could -not parse the request, bad query, missing field, etc...). If it is an Elasticsearch error, the `ServerError` property -on the response will contain the the actual error that was returned. The inner exception will always contain the -root causing exception. - -2) UnexpectedElasticsearchClientException: These are unknown exceptions, for instance a response from Elasticsearch not -properly deserialized. These are usually bugs and should be reported. This excpetion also inherits from ElasticsearchClientException -so an additional catch block isn't necessary, but can be helpful in distinguishing between the two. -3) Development time exceptions: These are CLR exceptions like ArgumentException, NullArgumentException etc... that are thrown -when an API in the client is misused. These should not be handled as you want to know about them during development. - -[source, csharp] ----- -.PrettyJson() ----- -Forces all serialization to be indented and appends `pretty=true` to all the requests so that the responses are indented as well - -[source, csharp] ----- -.BasicAuthentication("username", "password") ----- -Sets the HTTP basic authentication credentials to specify with all requests. - -**Note:** This can alternatively be specified on the node URI directly: - -[source, csharp] ----- -var uri = new Uri("http://username:password@localhost:9200"); ----- -[source, csharp] ----- -var settings = new ConnectionConfiguration(uri); ----- - -...but may become tedious when using connection pooling with multiple nodes. - - - -You can pass a callback of type `Action` that can eaves drop every time a response (good or bad) is created. -If you have complex logging needs this is a good place to add that in. - - -[source, csharp] ----- -var counter = 0; -var connectionPool = new SingleNodeConnectionPool(new Uri("http://localhost:9200")); -var settings = new ConnectionSettings(connectionPool, new InMemoryConnection()) - .OnRequestCompleted(r => counter++); -var client = new ElasticClient(settings); -client.RootNodeInfo(); -counter.Should().Be(1); -client.RootNodeInfoAsync(); -counter.Should().Be(2); ----- - -An example of using `OnRequestCompleted()` for complex logging. Remember, if you would also like -to capture the request and/or response bytes, you also need to set `.DisableDirectStreaming()` -to `true` - - -[source, csharp] ----- -var list = new List(); -var connectionPool = new SingleNodeConnectionPool(new Uri("http://localhost:9200")); -var settings = new ConnectionSettings(connectionPool, new InMemoryConnection()) - .DisableDirectStreaming() - .OnRequestCompleted(response => - { - // log out the request - if (response.RequestBodyInBytes != null) - { - list.Add( - $"{response.HttpMethod} {response.Uri} \n" + - $"{Encoding.UTF8.GetString(response.RequestBodyInBytes)}"); - } - else - { - list.Add($"{response.HttpMethod} {response.Uri}"); - } - - // log out the response - if (response.ResponseBodyInBytes != null) - { - list.Add($"Status: {response.HttpStatusCode}\n" + - $"{Encoding.UTF8.GetString(response.ResponseBodyInBytes)}\n" + - $"{new string('-', 30)}\n"); - } - else - { - list.Add($"Status: {response.HttpStatusCode}\n" + - $"{new string('-', 30)}\n"); - } - }); -list.Add( - $"{response.HttpMethod} {response.Uri} \n" + - $"{Encoding.UTF8.GetString(response.RequestBodyInBytes)}"); -list.Add($"{response.HttpMethod} {response.Uri}"); -list.Add($"Status: {response.HttpStatusCode}\n" + - $"{Encoding.UTF8.GetString(response.ResponseBodyInBytes)}\n" + - $"{new string('-', 30)}\n"); -list.Add($"Status: {response.HttpStatusCode}\n" + - $"{new string('-', 30)}\n"); -var client = new ElasticClient(settings); -var syncResponse = client.Search(s => s - .Scroll("2m") - .Sort(ss => ss - .Ascending(SortSpecialField.DocumentIndexOrder) - ) - ); -list.Count.Should().Be(2); -var asyncResponse = await client.SearchAsync(s => s - .Scroll("2m") - .Sort(ss => ss - .Ascending(SortSpecialField.DocumentIndexOrder) - ) - ); -list.Count.Should().Be(4); -list.ShouldAllBeEquivalentTo(new [] - { - "POST http://localhost:9200/_search?scroll=2m \n{\"sort\":[{\"_doc\":{\"order\":\"asc\"}}]}", - "Status: 200\n------------------------------\n", - "POST http://localhost:9200/_search?scroll=2m \n{\"sort\":[{\"_doc\":{\"order\":\"asc\"}}]}", - "Status: 200\n------------------------------\n" - }); ----- -## Configuring SSL -SSL must be configured outside of the client using .NET's -[ServicePointManager](http://msdn.microsoft.com/en-us/library/system.net.servicepointmanager%28v=vs.110%29.aspx) -class and setting the [ServerCertificateValidationCallback](http://msdn.microsoft.com/en-us/library/system.net.servicepointmanager.servercertificatevalidationcallback.aspx) -property. - -The bare minimum to make .NET accept self-signed SSL certs that are not in the Window's CA store would be to have the callback simply return `true`: - -[source, csharp] ----- -ServicePointManager.ServerCertificateValidationCallback += (sender, cert, chain, errors) => true; ----- - -However, this will accept all requests from the AppDomain to untrusted SSL sites, -therefore we recommend doing some minimal introspection on the passed in certificate. - - - -You can then register a factory on ConnectionSettings to create an instance of your subclass instead. -This is called once per instance of ConnectionSettings. - - -[source, csharp] ----- -var connectionPool = new SingleNodeConnectionPool(new Uri("http://localhost:9200")); -var settings = new ConnectionSettings(connectionPool, new InMemoryConnection(), s => new MyJsonNetSerializer(s)); -var client = new ElasticClient(settings); -client.RootNodeInfo(); -client.RootNodeInfo(); -var serializer = ((IConnectionSettingsValues)settings).Serializer as MyJsonNetSerializer; -serializer.CallToModify.Should().BeGreaterThan(0); -serializer.SerializeToString(new Project { }); -serializer.CallToContractConverter.Should().BeGreaterThan(0); ----- diff --git a/docs/asciidoc/ClientConcepts/LowLevel/Lifetimes.doc.asciidoc b/docs/asciidoc/ClientConcepts/LowLevel/Lifetimes.doc.asciidoc deleted file mode 100644 index 2ea18a0d78a..00000000000 --- a/docs/asciidoc/ClientConcepts/LowLevel/Lifetimes.doc.asciidoc +++ /dev/null @@ -1,38 +0,0 @@ - -## Lifetimes - -If you are using an IOC container its always useful to know the best practices around the lifetime of your objects - -In general we advise folks to register their ElasticClient instances as singleton. The client is thread safe -so sharing this instance over threads is ok. - -Zooming in however the actual moving part that benefits the most of being static for most of the duration of your -application is ConnectionSettings. Caches are per ConnectionSettings. - -In some applications it could make perfect sense to have multiple singleton IElasticClient's registered with different -connectionsettings. e.g if you have 2 functionally isolated Elasticsearch clusters. - - - -[source, csharp] ----- -var connection = new AConnection(); -var connectionPool = new AConnectionPool(new Uri("http://localhost:9200")); -var settings = new AConnectionSettings(connectionPool, connection); -settings.IsDisposed.Should().BeFalse(); -connectionPool.IsDisposed.Should().BeFalse(); -connection.IsDisposed.Should().BeFalse(); ----- - -Disposing the ConnectionSettings will dispose the IConnectionPool and IConnection it has a hold of - - -[source, csharp] ----- -var connection = new AConnection(); -var connectionPool = new AConnectionPool(new Uri("http://localhost:9200")); -var settings = new AConnectionSettings(connectionPool, connection); -settings.IsDisposed.Should().BeTrue(); -connectionPool.IsDisposed.Should().BeTrue(); -connection.IsDisposed.Should().BeTrue(); ----- diff --git a/docs/asciidoc/CodeStandards/Descriptors.doc.asciidoc b/docs/asciidoc/CodeStandards/Descriptors.doc.asciidoc deleted file mode 100644 index 42149e5985e..00000000000 --- a/docs/asciidoc/CodeStandards/Descriptors.doc.asciidoc +++ /dev/null @@ -1,52 +0,0 @@ - -Every descriptor should inherit from `DescriptorBase`, this hides object members from the fluent interface - - -[source, csharp] ----- -var notDescriptors = new[] { typeof(ClusterProcessOpenFileDescriptors).Name, "DescriptorForAttribute" }; -var descriptors = from t in typeof(DescriptorBase<,>).Assembly().Types() - where t.IsClass() - && t.Name.Contains("Descriptor") - && !notDescriptors.Contains(t.Name) - && !t.GetInterfaces().Any(i => i == typeof(IDescriptor)) - select t.FullName; -descriptors.Should().BeEmpty(); ----- - -Methods taking a func should have that func return an interface - - -[source, csharp] ----- -var descriptors = - from t in typeof(DescriptorBase<,>).Assembly().Types() - where t.IsClass() && typeof(IDescriptor).IsAssignableFrom(t) - select t; -var selectorMethods = - from d in descriptors - from m in d.GetMethods() - let parameters = m.GetParameters() - from p in parameters - let type = p.ParameterType - let isGeneric = type.IsGeneric() - where isGeneric - let isFunc = type.GetGenericTypeDefinition() == typeof(Func<,>) - where isFunc - let firstFuncArg = type.GetGenericArguments().First() - let secondFuncArg = type.GetGenericArguments().Last() - let isQueryFunc = firstFuncArg.IsGeneric() && - firstFuncArg.GetGenericTypeDefinition() == typeof(QueryContainerDescriptor<>) && - typeof(QueryContainer).IsAssignableFrom(secondFuncArg) - where !isQueryFunc - let isFluentDictionaryFunc = - firstFuncArg.IsGeneric() && - firstFuncArg.GetGenericTypeDefinition() == typeof(FluentDictionary<,>) && - secondFuncArg.IsGeneric() && - secondFuncArg.GetGenericTypeDefinition() == typeof(FluentDictionary<,>) - where !isFluentDictionaryFunc - let lastArgIsNotInterface = !secondFuncArg.IsInterface() - where lastArgIsNotInterface - select $"{m.Name} on {m.DeclaringType.Name}"; -selectorMethods.Should().BeEmpty(); ----- diff --git a/docs/asciidoc/CodeStandards/NamingConventions.doc.asciidoc b/docs/asciidoc/CodeStandards/NamingConventions.doc.asciidoc deleted file mode 100644 index 79e6d70d120..00000000000 --- a/docs/asciidoc/CodeStandards/NamingConventions.doc.asciidoc +++ /dev/null @@ -1,128 +0,0 @@ -# Naming Conventions - -NEST uses the following naming conventions (with _some_ exceptions). - - -## Class Names - -Abstract class names should end with a `Base` suffix - - -[source, csharp] ----- -var exceptions = new[] - { - typeof(DateMath) - }; -var abstractClasses = typeof(IRequest).Assembly().GetTypes() - .Where(t => t.IsClass() && t.IsAbstract() && !t.IsSealed() && !exceptions.Contains(t)) - .Where(t => !t.Name.Split('`')[0].EndsWith("Base")) - .Select(t => t.Name.Split('`')[0]) - .ToList(); -abstractClasses.Should().BeEmpty(); ----- - -Class names that end with `Base` suffix are abstract - - -[source, csharp] ----- -var exceptions = new[] { typeof(DateMath) }; -var baseClassesNotAbstract = typeof(IRequest).Assembly().GetTypes() - .Where(t => t.IsClass() && !exceptions.Contains(t)) - .Where(t => t.Name.Split('`')[0].EndsWith("Base")) - .Where(t => !t.IsAbstractClass()) - .Select(t => t.Name.Split('`')[0]) - .ToList(); -baseClassesNotAbstract.Should().BeEmpty(); ----- -## Requests and Responses - -Request class names should end with `Request` - - -[source, csharp] ----- -var types = typeof(IRequest).Assembly().GetTypes(); -var requests = types - .Where(t => typeof(IRequest).IsAssignableFrom(t) && !t.IsAbstract()) - .Where(t => !typeof(IDescriptor).IsAssignableFrom(t)) - .Where(t => !t.Name.Split('`')[0].EndsWith("Request")) - .Select(t => t.Name.Split('`')[0]) - .ToList(); -requests.Should().BeEmpty(); ----- - -Response class names should end with `Response` - - -[source, csharp] ----- -var types = typeof(IRequest).Assembly().GetTypes(); -var responses = types - .Where(t => typeof(IResponse).IsAssignableFrom(t) && !t.IsAbstract()) - .Where(t => !t.Name.Split('`')[0].EndsWith("Response")) - .Select(t => t.Name.Split('`')[0]) - .ToList(); -responses.Should().BeEmpty(); ----- - -Request and Response class names should be one to one in *most* cases. -e.g. `ValidateRequest` => `ValidateResponse`, and not `ValidateQueryRequest` => `ValidateResponse` -There are a few exceptions to this rule, most notably the `Cat` prefixed requests and -`Exists` requests. - - -[source, csharp] ----- -var exceptions = new[] - { - typeof(CatAliasesRequest), - typeof(CatAllocationRequest), - typeof(CatCountRequest), - typeof(CatFielddataRequest), - typeof(CatHealthRequest), - typeof(CatHelpRequest), - typeof(CatIndicesRequest), - typeof(CatMasterRequest), - typeof(CatNodesRequest), - typeof(CatPendingTasksRequest), - typeof(CatPluginsRequest), - typeof(CatRecoveryRequest), - typeof(CatSegmentsRequest), - typeof(CatShardsRequest), - typeof(CatThreadPoolRequest), - typeof(DocumentExistsRequest), - typeof(DocumentExistsRequest<>), - typeof(AliasExistsRequest), - typeof(IndexExistsRequest), - typeof(TypeExistsRequest), - typeof(IndexTemplateExistsRequest), - typeof(SearchExistsRequest), - typeof(SearchExistsRequest<>), - typeof(SearchTemplateRequest), - typeof(SearchTemplateRequest<>), - typeof(ScrollRequest), - typeof(SourceRequest), - typeof(SourceRequest<>), - typeof(ValidateQueryRequest<>), - typeof(GetAliasRequest), - typeof(CatNodeattrsRequest), - typeof(IndicesShardStoresRequest), - typeof(RenderSearchTemplateRequest) - }; -var types = typeof(IRequest).Assembly().GetTypes(); -var requests = new HashSet(types - .Where(t => - t.IsClass() && - !t.IsAbstract() && - typeof(IRequest).IsAssignableFrom(t) && - !typeof(IDescriptor).IsAssignableFrom(t) - && !exceptions.Contains(t)) - .Select(t => t.Name.Split('`')[0].Replace("Request", "")) - ); -var responses = types - .Where(t => t.IsClass() && !t.IsAbstract() && typeof(IResponse).IsAssignableFrom(t)) - .Select(t => t.Name.Split('`')[0].Replace("Response", "")); -requests.Except(responses).Should().BeEmpty(); ----- diff --git a/docs/asciidoc/CodeStandards/Queries.doc.asciidoc b/docs/asciidoc/CodeStandards/Queries.doc.asciidoc deleted file mode 100644 index 821fa33f5be..00000000000 --- a/docs/asciidoc/CodeStandards/Queries.doc.asciidoc +++ /dev/null @@ -1,31 +0,0 @@ -[source, csharp] ----- -var properties = from p in QueryProperties - let a = p.GetCustomAttributes().Concat(p.GetCustomAttributes()) - where a.Count() != 1 - select p; -properties.Should().BeEmpty(); -var staticProperties = from p in typeof(Query<>).GetMethods() - let name = p.Name.StartsWith("GeoShape") ? "GeoShape" : p.Name - select name; -var placeHolders = QueryPlaceHolderProperties.Select(p => p.Name.StartsWith("GeoShape") ? "GeoShape" : p.Name); -staticProperties.Distinct().Should().Contain(placeHolders.Distinct()); -var fluentMethods = from p in typeof(QueryContainerDescriptor<>).GetMethods() - let name = p.Name.StartsWith("GeoShape") ? "GeoShape" : p.Name - select name; -var placeHolders = QueryPlaceHolderProperties.Select(p => p.Name.StartsWith("GeoShape") ? "GeoShape" : p.Name); -fluentMethods.Distinct().Should().Contain(placeHolders.Distinct()); -var skipQueryImplementations = new[] { typeof(IFieldNameQuery), typeof(IFuzzyQuery<,>), typeof(IConditionlessQuery) }; -var queries = typeof(IQuery).Assembly().ExportedTypes - .Where(t => t.IsInterface() && typeof(IQuery).IsAssignableFrom(t)) - .Where(t => !skipQueryImplementations.Contains(t)) - .ToList(); -queries.Should().NotBeEmpty(); -var visitMethods = typeof(IQueryVisitor).GetMethods().Where(m => m.Name == "Visit"); -visitMethods.Should().NotBeEmpty(); -var missingTypes = from q in queries - let visitMethod = visitMethods.FirstOrDefault(m => m.GetParameters().First().ParameterType == q) - where visitMethod == null - select q; -missingTypes.Should().BeEmpty(); ----- diff --git a/docs/asciidoc/CommonOptions/DateMath/DateMathExpressions.doc.asciidoc b/docs/asciidoc/CommonOptions/DateMath/DateMathExpressions.doc.asciidoc deleted file mode 100644 index dd78ae09ee8..00000000000 --- a/docs/asciidoc/CommonOptions/DateMath/DateMathExpressions.doc.asciidoc +++ /dev/null @@ -1,97 +0,0 @@ -# Date Expressions -The date type supports using date math expression when using it in a query/filter -Whenever durations need to be specified, eg for a timeout parameter, the duration can be specified - -The expression starts with an "anchor" date, which can be either now or a date string (in the applicable format) ending with ||. -It can then follow by a math expression, supporting +, - and / (rounding). -The units supported are y (year), M (month), w (week), d (day), h (hour), m (minute), and s (second). -as a whole number representing time in milliseconds, or as a time value like `2d` for 2 days. - -Be sure to read the elasticsearch documentation {ref}/mapping-date-format.html#date-math[on this subject here] - - - -You can create simple expressions using any of the static methods on `DateMath` - -[source, csharp] ----- -Expect("now").WhenSerializing(Nest.DateMath.Now); ----- -[source, csharp] ----- -Expect("2015-05-05T00:00:00").WhenSerializing(Nest.DateMath.Anchored(new DateTime(2015,05, 05))); ----- -strings implicitly convert to date maths - -[source, csharp] ----- -Expect("now").WhenSerializing("now"); ----- -but are lenient to bad math expressions - -[source, csharp] ----- -var nonsense = "now||*asdaqwe"; ----- -[source, csharp] ----- -Expect(nonsense).WhenSerializing(nonsense) ----- -the resulting date math will assume the whole string is the anchor - -[source, csharp] ----- -.Result(dateMath => ((IDateMath)dateMath) - .Anchor.Match( - d => d.Should().NotBe(default(DateTime)), - s => s.Should().Be(nonsense) - ) - ); ----- -date's also implicitly convert to simple date math expressions - -[source, csharp] ----- -var date = new DateTime(2015, 05, 05); ----- -[source, csharp] ----- -Expect("2015-05-05T00:00:00").WhenSerializing(date) ----- -the anchor will be an actual DateTime, even after a serialization - deserialization round trip - -[source, csharp] ----- -.Result(dateMath => ((IDateMath)dateMath) - . Anchor.Match( - d => d.Should().Be(date), - s => s.Should().BeNull() - ) - ); ----- -Ranges can be chained on to simple expressions - -[source, csharp] ----- -Expect("now+1d").WhenSerializing(Nest.DateMath.Now.Add("1d")); ----- -plural means that you can chain multiple - -[source, csharp] ----- -Expect("now+1d-1m").WhenSerializing(Nest.DateMath.Now.Add("1d").Subtract(TimeSpan.FromMinutes(1))); ----- -a rounding value can also be chained at the end afterwhich no more ranges can be appended - -[source, csharp] ----- -Expect("now+1d-1m/d").WhenSerializing(Nest.DateMath.Now.Add("1d").Subtract(TimeSpan.FromMinutes(1)).RoundTo(Nest.TimeUnit.Day)); ----- -When anchoring date's we need to append `||` as clear separator between anchor and ranges - -[source, csharp] ----- -Expect("2015-05-05T00:00:00||+1d-1m").WhenSerializing(Nest.DateMath.Anchored(new DateTime(2015,05,05)).Add("1d").Subtract(TimeSpan.FromMinutes(1))); ----- -plural means that you can chain multiple - diff --git a/docs/asciidoc/CommonOptions/TimeUnit/TimeUnits.doc.asciidoc b/docs/asciidoc/CommonOptions/TimeUnit/TimeUnits.doc.asciidoc deleted file mode 100644 index a68dbb3deae..00000000000 --- a/docs/asciidoc/CommonOptions/TimeUnit/TimeUnits.doc.asciidoc +++ /dev/null @@ -1,115 +0,0 @@ -# Time units -Whenever durations need to be specified, eg for a timeout parameter, the duration can be specified -as a whole number representing time in milliseconds, or as a time value like `2d` for 2 days. - -## Using Time units in NEST -NEST uses `Time` to strongly type this and there are several ways to construct one. - -### Constructor -The most straight forward way to construct a `Time` is through its constructor - - -[source, csharp] ----- -var unitString = new Time("2d"); -var unitComposed = new Time(2, Nest.TimeUnit.Day); -var unitTimeSpan = new Time(TimeSpan.FromDays(2)); -var unitMilliseconds = new Time(1000 * 60 * 60 * 24 * 2); ----- -When serializing Time constructed from a string, milliseconds, composition of factor and -interval, or a `TimeSpan` the expression will be serialized as time unit string - -[source, csharp] ----- -Expect("2d") - .WhenSerializing(unitString) - .WhenSerializing(unitComposed) - .WhenSerializing(unitTimeSpan) - .WhenSerializing(unitMilliseconds); ----- -Milliseconds are always calculated even when not using the constructor that takes a long - -[source, csharp] ----- -unitMilliseconds.Milliseconds.Should().Be(1000*60*60*24*2); ----- -[source, csharp] ----- -unitComposed.Milliseconds.Should().Be(1000*60*60*24*2); -unitTimeSpan.Milliseconds.Should().Be(1000*60*60*24*2); -unitString.Milliseconds.Should().Be(1000*60*60*24*2); ----- - -### Implicit conversion -Alternatively `string`, `TimeSpan` and `double` can be implicitly assigned to `Time` properties and variables - - -[source, csharp] ----- -Time oneAndHalfYear = "1.5y"; -Time twoWeeks = TimeSpan.FromDays(14); -Time twoDays = 1000*60*60*24*2; -Expect("1.5y").WhenSerializing(oneAndHalfYear); -Expect("2w").WhenSerializing(twoWeeks); -Expect("2d").WhenSerializing(twoDays); -Time oneAndHalfYear = "1.5y"; -Time twoWeeks = TimeSpan.FromDays(14); -Time twoDays = 1000*60*60*24*2; ----- -Milliseconds are calculated even when values are not passed as long - -[source, csharp] ----- -oneAndHalfYear.Milliseconds.Should().BeGreaterThan(1); ----- -[source, csharp] ----- -twoWeeks.Milliseconds.Should().BeGreaterThan(1); ----- -This allows you to do comparisons on the expressions - -[source, csharp] ----- -oneAndHalfYear.Should().BeGreaterThan(twoWeeks); ----- -[source, csharp] ----- -(oneAndHalfYear > twoWeeks).Should().BeTrue(); -(oneAndHalfYear >= twoWeeks).Should().BeTrue(); -(twoDays >= new Time("2d")).Should().BeTrue(); -twoDays.Should().BeLessThan(twoWeeks); -(twoDays < twoWeeks).Should().BeTrue(); -(twoDays <= twoWeeks).Should().BeTrue(); -(twoDays <= new Time("2d")).Should().BeTrue(); ----- -And assert equality - -[source, csharp] ----- -twoDays.Should().Be(new Time("2d")); ----- -[source, csharp] ----- -(twoDays == new Time("2d")).Should().BeTrue(); -(twoDays != new Time("2.1d")).Should().BeTrue(); -(new Time("2.1d") == new Time(TimeSpan.FromDays(2.1))).Should().BeTrue(); ----- -Time units are specified as a union of either a `DateInterval` or `Time` -both of which implicitly convert to the `Union` of these two. - -[source, csharp] ----- -Expect("month").WhenSerializing>(DateInterval.Month); ----- -[source, csharp] ----- -Expect("day").WhenSerializing>(DateInterval.Day); -Expect("hour").WhenSerializing>(DateInterval.Hour); -Expect("minute").WhenSerializing>(DateInterval.Minute); -Expect("quarter").WhenSerializing>(DateInterval.Quarter); -Expect("second").WhenSerializing>(DateInterval.Second); -Expect("week").WhenSerializing>(DateInterval.Week); -Expect("year").WhenSerializing>(DateInterval.Year); -Expect("2d").WhenSerializing>((Time)"2d"); -Expect("1.16w").WhenSerializing>((Time)TimeSpan.FromDays(8.1)); ----- diff --git a/docs/asciidoc/QueryDsl/BoolDsl/BoolDsl.doc.asciidoc b/docs/asciidoc/QueryDsl/BoolDsl/BoolDsl.doc.asciidoc deleted file mode 100644 index 9dde4bb05c8..00000000000 --- a/docs/asciidoc/QueryDsl/BoolDsl/BoolDsl.doc.asciidoc +++ /dev/null @@ -1,126 +0,0 @@ -Writing boolean queries can grow rather verbose rather quickly using the query DSL e.g - -[source, csharp] ----- -var searchResults = this.Client.Search(s => s - .Query(q => q - .Bool(b => b - .Should( - bs => bs.Term(p => p.Name, "x"), - bs => bs.Term(p => p.Name, "y") - ) - ) - ) - ); ----- -now this is just a single bool with only two clauses, imagine multiple nested bools this quickly becomes an exercise in -hadouken indenting - -[[indent]] -.hadouken indenting example -image::http://i.imgur.com/BtjZedW.jpg[dead indent] - - -For this reason, NEST introduces operator overloading so complex bool queries become easier to write, the previous example will become. - -[source, csharp] ----- -var searchResults = this.Client.Search(s => s - .Query(q => q.Term(p => p.Name, "x") || q.Term(p => p.Name, "y")) - ); ----- -Or using the object initializer syntax - -[source, csharp] ----- -searchResults = this.Client.Search(new SearchRequest -{ - Query = new TermQuery { Field = "name", Value= "x" } - || new TermQuery { Field = Field(p=>p.Name), Value = "y" } -}); ----- -A naive implementation of operator overloading would rewrite - -`term && term && term` to - -> bool -> |___must -> |___term -> |___bool -> |___must -> |___term -> |___term - -As you can image this becomes unwieldy quite fast the more complex a query becomes NEST can spot these and -join them together to become a single bool query - -> bool -> |___must -> |___term -> |___term -> |___term - - - -The bool DSL offers also a short hand notation to mark a query as a must_not using ! - -And to mark a query as a filter using + - -Both of these can be combined with ands to a single bool query - -When combining multiple queries some or all possibly marked as must_not or filter NEST still combines to a single bool query - -> bool -> |___must -> | |___term -> | |___term -> | |___term -> | -> |___must_not -> |___term - - -[source, csharp] ----- -Assert( - q => q.Query() && q.Query() && q.Query() && !q.Query(), - Query && Query && Query && !Query, - c=> - { - c.Bool.Must.Should().HaveCount(3); - c.Bool.MustNot.Should().HaveCount(1); - }); -c.Bool.Must.Should().HaveCount(3); -c.Bool.MustNot.Should().HaveCount(1); ----- -Even more involved `term && term && term && !term && +term && +term` still only results in a single bool query: - -> bool -> |___must -> | |___term -> | |___term -> | |___term -> | -> |___must_not -> | |___term -> | -> |___filter -> |___term -> |___term - - -You can still mix and match actual bool queries with the bool dsl e.g - -`bool(must=term, term, term) && !term` - -it would still merge into a single bool query. - -[source, csharp] ----- -c.Bool.Should.Should().HaveCount(2); -var nestedBool = c.Bool.Should.Cast().First(b=>!string.IsNullOrEmpty(b.Bool?.Name)); -nestedBool.Bool.Should.Should().HaveCount(1); -nestedBool.Bool.Name.Should().Be(firstName); -assert(fluent.InvokeQuery(new QueryContainerDescriptor())); -assert((QueryContainer)ois); ----- diff --git a/docs/asciidoc/QueryDsl/Geo/Distance/DistanceUnits.doc.asciidoc b/docs/asciidoc/QueryDsl/Geo/Distance/DistanceUnits.doc.asciidoc deleted file mode 100644 index c049e778115..00000000000 --- a/docs/asciidoc/QueryDsl/Geo/Distance/DistanceUnits.doc.asciidoc +++ /dev/null @@ -1,98 +0,0 @@ -# Distance Units -Whenever distances need to be specified, e.g. for a geo distance query, the distance unit can be specified -as a double number representing distance in meters, as a new instance of a `Distance`, or as a string -of the form number and distance unit e.g. `"2.72km"` - -## Using Distance units in NEST -NEST uses `Distance` to strongly type distance units and there are several ways to construct one. - -### Constructor -The most straight forward way to construct a `Distance` is through its constructor - - -[source, csharp] ----- -var unitComposed = new Nest.Distance(25); -var unitComposedWithUnits = new Nest.Distance(25, DistanceUnit.Meters); ----- -When serializing Distance constructed from a string, composition of distance value and unit - -[source, csharp] ----- -Expect("25.0m") - .WhenSerializing(unitComposed) - .WhenSerializing(unitComposedWithUnits); ----- - -### Implicit conversion -Alternatively a distance unit `string` can be assigned to a `Distance`, resulting in an implicit conversion to a new `Distance` instance. -If no `DistanceUnit` is specified, the default distance unit is meters - - -[source, csharp] ----- -Nest.Distance distanceString = "25"; -Nest.Distance distanceStringWithUnits = "25m"; -Expect(new Nest.Distance(25)) - .WhenSerializing(distanceString) - .WhenSerializing(distanceStringWithUnits); ----- - -### Supported units -A number of distance units are supported, from millimeters to nautical miles - - -Miles - -[source, csharp] ----- -Expect("0.62mi").WhenSerializing(new Nest.Distance(0.62, DistanceUnit.Miles)); ----- -Yards - -[source, csharp] ----- -Expect("9.0yd").WhenSerializing(new Nest.Distance(9, DistanceUnit.Yards)); ----- -Feet - -[source, csharp] ----- -Expect("3.33ft").WhenSerializing(new Nest.Distance(3.33, DistanceUnit.Feet)); ----- -Inches - -[source, csharp] ----- -Expect("43.23in").WhenSerializing(new Nest.Distance(43.23, DistanceUnit.Inch)); ----- -Kilometers - -[source, csharp] ----- -Expect("0.1km").WhenSerializing(new Nest.Distance(0.1, DistanceUnit.Kilometers)); ----- -Meters - -[source, csharp] ----- -Expect("400.0m").WhenSerializing(new Nest.Distance(400, DistanceUnit.Meters)); ----- -Centimeters - -[source, csharp] ----- -Expect("123.456cm").WhenSerializing(new Nest.Distance(123.456, DistanceUnit.Centimeters)); ----- -Millimeters - -[source, csharp] ----- -Expect("2.0mm").WhenSerializing(new Nest.Distance(2, DistanceUnit.Millimeters)); ----- -Nautical Miles - -[source, csharp] ----- -Expect("45.5nmi").WhenSerializing(new Nest.Distance(45.5, DistanceUnit.NauticalMiles)); ----- diff --git a/docs/asciidoc/aggregations-usage.asciidoc b/docs/asciidoc/aggregations-usage.asciidoc new file mode 100644 index 00000000000..a79afee8902 --- /dev/null +++ b/docs/asciidoc/aggregations-usage.asciidoc @@ -0,0 +1,92 @@ +:includes-from-dirs: aggregations/bucket,aggregations/metric,aggregations/pipeline + +include::../../docs/asciidoc/aggregations/bucket/children/children-aggregation-mapping.asciidoc[] + +include::../../docs/asciidoc/aggregations/bucket/children/children-aggregation-usage.asciidoc[] + +include::../../docs/asciidoc/aggregations/bucket/date-histogram/date-histogram-aggregation-usage.asciidoc[] + +include::../../docs/asciidoc/aggregations/bucket/date-range/date-range-aggregation-usage.asciidoc[] + +include::../../docs/asciidoc/aggregations/bucket/filter/filter-aggregation-usage.asciidoc[] + +include::../../docs/asciidoc/aggregations/bucket/filters/filters-aggregation-usage.asciidoc[] + +include::../../docs/asciidoc/aggregations/bucket/geo-distance/geo-distance-aggregation-usage.asciidoc[] + +include::../../docs/asciidoc/aggregations/bucket/geo-hash-grid/geo-hash-grid-aggregation-usage.asciidoc[] + +include::../../docs/asciidoc/aggregations/bucket/global/global-aggregation-usage.asciidoc[] + +include::../../docs/asciidoc/aggregations/bucket/histogram/histogram-aggregation-usage.asciidoc[] + +include::../../docs/asciidoc/aggregations/bucket/ip-range/ip-range-aggregation-usage.asciidoc[] + +include::../../docs/asciidoc/aggregations/bucket/missing/missing-aggregation-usage.asciidoc[] + +include::../../docs/asciidoc/aggregations/bucket/nested/nested-aggregation-usage.asciidoc[] + +include::../../docs/asciidoc/aggregations/bucket/range/range-aggregation-usage.asciidoc[] + +include::../../docs/asciidoc/aggregations/bucket/reverse-nested/reverse-nested-aggregation-usage.asciidoc[] + +include::../../docs/asciidoc/aggregations/bucket/sampler/sampler-aggregation-usage.asciidoc[] + +include::../../docs/asciidoc/aggregations/bucket/significant-terms/significant-terms-aggregation-usage.asciidoc[] + +include::../../docs/asciidoc/aggregations/bucket/terms/terms-aggregation-usage.asciidoc[] + +include::../../docs/asciidoc/aggregations/metric/average/average-aggregation-usage.asciidoc[] + +include::../../docs/asciidoc/aggregations/metric/cardinality/cardinality-aggregation-usage.asciidoc[] + +include::../../docs/asciidoc/aggregations/metric/extended-stats/extended-stats-aggregation-usage.asciidoc[] + +include::../../docs/asciidoc/aggregations/metric/geo-bounds/geo-bounds-aggregation-usage.asciidoc[] + +include::../../docs/asciidoc/aggregations/metric/max/max-aggregation-usage.asciidoc[] + +include::../../docs/asciidoc/aggregations/metric/min/min-aggregation-usage.asciidoc[] + +include::../../docs/asciidoc/aggregations/metric/percentile-ranks/percentile-ranks-aggregation-usage.asciidoc[] + +include::../../docs/asciidoc/aggregations/metric/percentiles/percentiles-aggregation-usage.asciidoc[] + +include::../../docs/asciidoc/aggregations/metric/scripted-metric/scripted-metric-aggregation-usage.asciidoc[] + +include::../../docs/asciidoc/aggregations/metric/stats/stats-aggregation-usage.asciidoc[] + +include::../../docs/asciidoc/aggregations/metric/sum/sum-aggregation-usage.asciidoc[] + +include::../../docs/asciidoc/aggregations/metric/top-hits/top-hits-aggregation-usage.asciidoc[] + +include::../../docs/asciidoc/aggregations/metric/value-count/value-count-aggregation-usage.asciidoc[] + +include::../../docs/asciidoc/aggregations/pipeline/average-bucket/average-bucket-aggregation-usage.asciidoc[] + +include::../../docs/asciidoc/aggregations/pipeline/bucket-script/bucket-script-aggregation-usage.asciidoc[] + +include::../../docs/asciidoc/aggregations/pipeline/bucket-selector/bucket-selector-aggregation-usage.asciidoc[] + +include::../../docs/asciidoc/aggregations/pipeline/cumulative-sum/cumulative-sum-aggregation-usage.asciidoc[] + +include::../../docs/asciidoc/aggregations/pipeline/derivative/derivative-aggregation-usage.asciidoc[] + +include::../../docs/asciidoc/aggregations/pipeline/max-bucket/max-bucket-aggregation-usage.asciidoc[] + +include::../../docs/asciidoc/aggregations/pipeline/min-bucket/min-bucket-aggregation-usage.asciidoc[] + +include::../../docs/asciidoc/aggregations/pipeline/moving-average/moving-average-ewma-aggregation-usage.asciidoc[] + +include::../../docs/asciidoc/aggregations/pipeline/moving-average/moving-average-holt-linear-aggregation-usage.asciidoc[] + +include::../../docs/asciidoc/aggregations/pipeline/moving-average/moving-average-holt-winters-aggregation-usage.asciidoc[] + +include::../../docs/asciidoc/aggregations/pipeline/moving-average/moving-average-linear-aggregation-usage.asciidoc[] + +include::../../docs/asciidoc/aggregations/pipeline/moving-average/moving-average-simple-aggregation-usage.asciidoc[] + +include::../../docs/asciidoc/aggregations/pipeline/serial-differencing/serial-differencing-aggregation-usage.asciidoc[] + +include::../../docs/asciidoc/aggregations/pipeline/sum-bucket/sum-bucket-aggregation-usage.asciidoc[] + diff --git a/docs/asciidoc/aggregations.asciidoc b/docs/asciidoc/aggregations.asciidoc new file mode 100644 index 00000000000..3fc54ece781 --- /dev/null +++ b/docs/asciidoc/aggregations.asciidoc @@ -0,0 +1,104 @@ +:output-dir: aggregations + +[[aggregations]] += Aggregations + +[partintro] +-- +Aggregations are arguably one of the most powerful features of Elasticsearch and NEST +exposes all of the available Aggregation types + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +-- + +include::{output-dir}/writing-aggregations.asciidoc[] + +include::aggregations-usage.asciidoc[] + diff --git a/docs/asciidoc/aggregations/bucket/children/children-aggregation-mapping.asciidoc b/docs/asciidoc/aggregations/bucket/children/children-aggregation-mapping.asciidoc new file mode 100644 index 00000000000..e4ef930dedb --- /dev/null +++ b/docs/asciidoc/aggregations/bucket/children/children-aggregation-mapping.asciidoc @@ -0,0 +1,27 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[child-aggregation-mapping]] +== Child Aggregation Mapping + +To use the child aggregation you have to make sure + a `_parent` mapping is in place, here we create the project + index with two mapped types, `project` and `commitactivity` and + we add a `_parent` mapping from `commitactivity` to `parent` + +[source,csharp] +---- +var createProjectIndex = TestClient.GetClient().CreateIndex(typeof(Project), c => c + .Mappings(map => map + .Map(tm => tm.AutoMap()) + .Map(tm => tm + .Parent() <1> + ) + ) +); +---- +<1> Set the parent of `CommitActivity` to the `Project` type + diff --git a/docs/asciidoc/aggregations/bucket/children/children-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/bucket/children/children-aggregation-usage.asciidoc new file mode 100644 index 00000000000..03824b12ad6 --- /dev/null +++ b/docs/asciidoc/aggregations/bucket/children/children-aggregation-usage.asciidoc @@ -0,0 +1,70 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[children-aggregation-usage]] +== Children Aggregation Usage + +A special single bucket aggregation that enables aggregating from buckets on parent document types to +buckets on child documents. + +Be sure to read {ref_current}/search-aggregations-bucket-children-aggregation.html[the elasticsearch documentation on Children Aggregation] + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Aggregations(aggs => aggs + .Children("name_of_child_agg", child => child + .Aggregations(childAggs => childAggs + .Average("average_per_child", avg => avg.Field(p => p.ConfidenceFactor)) + .Max("max_per_child", avg => avg.Field(p => p.ConfidenceFactor)) + ) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Aggregations = new ChildrenAggregation("name_of_child_agg", typeof(CommitActivity)) + { + Aggregations = + new AverageAggregation("average_per_child", "confidenceFactor") && + new MaxAggregation("max_per_child", "confidenceFactor") + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "aggs": { + "name_of_child_agg": { + "children": { + "type": "commits" + }, + "aggs": { + "average_per_child": { + "avg": { + "field": "confidenceFactor" + } + }, + "max_per_child": { + "max": { + "field": "confidenceFactor" + } + } + } + } + } +} +---- + diff --git a/docs/asciidoc/aggregations/bucket/date-histogram/date-histogram-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/bucket/date-histogram/date-histogram-aggregation-usage.asciidoc new file mode 100644 index 00000000000..de2e9bc9005 --- /dev/null +++ b/docs/asciidoc/aggregations/bucket/date-histogram/date-histogram-aggregation-usage.asciidoc @@ -0,0 +1,139 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[date-histogram-aggregation-usage]] +== Date Histogram Aggregation Usage + +A multi-bucket aggregation similar to the histogram except it can only be applied on date values. +From a functionality perspective, this histogram supports the same features as the normal histogram. +The main difference is that the interval can be specified by date/time expressions. + +NOTE: When specifying a `format` **and** `extended_bounds`, in order for Elasticsearch to be able to parse +the serialized ``DateTime`` of `extended_bounds` correctly, the `date_optional_time` format is included +as part of the `format` value. + +Be sure to read the elasticsearch documentation on {ref_current}/search-aggregations-bucket-datehistogram-aggregation.html[Date Histogram Aggregation]. + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Size(0) +.Aggregations(aggs => aggs + .DateHistogram("projects_started_per_month", date => date + .Field(p => p.StartedOn) + .Interval(DateInterval.Month) + .MinimumDocumentCount(2) + .Format("yyyy-MM-dd'T'HH:mm:ss") + .ExtendedBounds(FixedDate.AddYears(-1), FixedDate.AddYears(1)) + .Order(HistogramOrder.CountAscending) + .Missing(FixedDate) + .Aggregations(childAggs => childAggs + .Nested("project_tags", n => n + .Path(p => p.Tags) + .Aggregations(nestedAggs => nestedAggs + .Terms("tags", avg => avg.Field(p => p.Tags.First().Name)) + ) + ) + ) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Size = 0, + Aggregations = new DateHistogramAggregation("projects_started_per_month") + { + Field = Field(p => p.StartedOn), + Interval = DateInterval.Month, + MinimumDocumentCount = 2, + Format = "yyyy-MM-dd'T'HH:mm:ss", + ExtendedBounds = new ExtendedBounds + { + Minimum = FixedDate.AddYears(-1), + Maximum = FixedDate.AddYears(1), + }, + Order = HistogramOrder.CountAscending, + Missing = FixedDate, + Aggregations = new NestedAggregation("project_tags") + { + Path = Field(p => p.Tags), + Aggregations = new TermsAggregation("tags") + { + Field = Field(p => p.Tags.First().Name) + } + } + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "size": 0, + "aggs": { + "projects_started_per_month": { + "date_histogram": { + "field": "startedOn", + "interval": "month", + "min_doc_count": 2, + "format": "yyyy-MM-dd'T'HH:mm:ss||date_optional_time", + "order": { + "_count": "asc" + }, + "extended_bounds": { + "min": "2014-06-06T12:01:02.123", + "max": "2016-06-06T12:01:02.123" + }, + "missing": "2015-06-06T12:01:02.123" + }, + "aggs": { + "project_tags": { + "nested": { + "path": "tags" + }, + "aggs": { + "tags": { + "terms": { + "field": "tags.name" + } + } + } + } + } + } + } +} +---- + +=== Handling responses + +Using the `.Aggs` aggregation helper on `ISearchResponse`, we can fetch our aggregation results easily +in the correct type. <> + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); + +var dateHistogram = response.Aggs.DateHistogram("projects_started_per_month"); +dateHistogram.Should().NotBeNull(); +dateHistogram.Buckets.Should().NotBeNull(); +dateHistogram.Buckets.Count.Should().BeGreaterThan(10); +item.Date.Should().NotBe(default(DateTime)); +item.DocCount.Should().BeGreaterThan(0); +var nested = item.Nested("project_tags"); +nested.Should().NotBeNull(); +var nestedTerms = nested.Terms("tags"); +nestedTerms.Buckets.Count.Should().BeGreaterThan(0); +---- + diff --git a/docs/asciidoc/aggregations/bucket/date-range/date-range-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/bucket/date-range/date-range-aggregation-usage.asciidoc new file mode 100644 index 00000000000..35c323fe27b --- /dev/null +++ b/docs/asciidoc/aggregations/bucket/date-range/date-range-aggregation-usage.asciidoc @@ -0,0 +1,116 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[date-range-aggregation-usage]] +== Date Range Aggregation Usage + +A range aggregation that is dedicated for date values. The main difference between this aggregation and the normal range aggregation is that the `from` +and `to` values can be expressed in `DateMath` expressions, and it is also possible to specify a date format by which the from and +to response fields will be returned. + +IMPORTANT: this aggregation includes the `from` value and excludes the `to` value for each range. + +Be sure to read {ref_current}/search-aggregations-bucket-daterange-aggregation.html[the elasticsearch documentation on Date Range Aggregation] + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Aggregations(aggs => aggs + .DateRange("projects_date_ranges", date => date + .Field(p => p.StartedOn) + .Ranges( + r => r.From(DateMath.Anchored(FixedDate).Add("2d")).To(DateMath.Now), + r => r.To(DateMath.Now.Add(TimeSpan.FromDays(1)).Subtract("30m").RoundTo(TimeUnit.Hour)), + r => r.From(DateMath.Anchored("2012-05-05").Add(TimeSpan.FromDays(1)).Subtract("1m")) + ) + .Aggregations(childAggs => childAggs + .Terms("project_tags", avg => avg.Field(p => p.Tags)) + ) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Aggregations = new DateRangeAggregation("projects_date_ranges") + { + Field = Field(p => p.StartedOn), + Ranges = new List + { + new DateRangeExpression { From = DateMath.Anchored(FixedDate).Add("2d"), To = DateMath.Now}, + new DateRangeExpression { To = DateMath.Now.Add(TimeSpan.FromDays(1)).Subtract("30m").RoundTo(TimeUnit.Hour) }, + new DateRangeExpression { From = DateMath.Anchored("2012-05-05").Add(TimeSpan.FromDays(1)).Subtract("1m") } + }, + Aggregations = + new TermsAggregation("project_tags") { Field = Field(p => p.Tags) } + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "aggs": { + "projects_date_ranges": { + "date_range": { + "field": "startedOn", + "ranges": [ + { + "to": "now", + "from": "2015-06-06T12:01:02.123||+2d" + }, + { + "to": "now+1d-30m/h" + }, + { + "from": "2012-05-05||+1d-1m" + } + ] + }, + "aggs": { + "project_tags": { + "terms": { + "field": "tags" + } + } + } + } + } +} +---- + +=== Handling Responses + +Using the `.Agg` aggregation helper we can fetch our aggregation results easily +in the correct type. <> + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); + +var dateHistogram = response.Aggs.DateRange("projects_date_ranges"); +dateHistogram.Should().NotBeNull(); +dateHistogram.Buckets.Should().NotBeNull(); +---- + +We specified three ranges so we expect to have three of them in the response + +=== Handling Responses + +[source,csharp] +---- +dateHistogram.Buckets.Count.Should().Be(3); + +item.DocCount.Should().BeGreaterThan(0); +---- + diff --git a/docs/asciidoc/aggregations/bucket/filter/filter-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/bucket/filter/filter-aggregation-usage.asciidoc new file mode 100644 index 00000000000..39c76950f61 --- /dev/null +++ b/docs/asciidoc/aggregations/bucket/filter/filter-aggregation-usage.asciidoc @@ -0,0 +1,207 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[filter-aggregation-usage]] +== Filter Aggregation Usage + +Defines a single bucket of all the documents in the current document set context that match a specified filter. +Often this will be used to narrow down the current aggregation context to a specific set of documents. + +Be sure to read the Elasticsearch documentation on {ref_current}/search-aggregations-bucket-filter-aggregation.html[Filter Aggregation] + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Aggregations(aggs => aggs + .Filter("bethels_projects", date => date + .Filter(q => q.Term(p => p.LeadDeveloper.FirstName, FirstNameToFind)) + .Aggregations(childAggs => childAggs + .Terms("project_tags", avg => avg.Field(p => p.CuratedTags.First().Name)) + ) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Aggregations = new FilterAggregation("bethels_projects") + { + Filter = new TermQuery {Field = Field(p => p.LeadDeveloper.FirstName), Value = FirstNameToFind}, + Aggregations = + new TermsAggregation("project_tags") {Field = Field(p => p.CuratedTags.First().Name)} + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "aggs": { + "bethels_projects": { + "filter": { + "term": { + "leadDeveloper.firstName": { + "value": "pierce" + } + } + }, + "aggs": { + "project_tags": { + "terms": { + "field": "curatedTags.name" + } + } + } + } + } +} +---- + +=== Handling Responses + +Using the `.Aggs` aggregation helper we can fetch our aggregation results easily +in the correct type. <> + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); + +var filterAgg = response.Aggs.Filter("bethels_projects"); +filterAgg.Should().NotBeNull(); +filterAgg.DocCount.Should().BeGreaterThan(0); +var tags = filterAgg.Terms("project_tags"); +tags.Should().NotBeNull(); +tags.Buckets.Should().NotBeEmpty(); +---- + +[[empty-filter]] +[float] +== Empty Filter + +When the collection of filters is empty or all are conditionless, NEST will serialize them +to an empty object. + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Aggregations(aggs => aggs + .Filter("empty_filter", date => date + .Filter(f => f + .Bool(b => b + .Filter(new QueryContainer[0]) + ) + ) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Aggregations = new FilterAggregation("empty_filter") + { + Filter = new BoolQuery + { + Filter = new List() + } + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "aggs": { + "empty_filter": { + "filter": {} + } + } +} +---- + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +response.Aggs.Filter("empty_filter").DocCount.Should().BeGreaterThan(0); +---- + +[[inline-script-filter]] +[float] +== Inline Script Filter + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Aggregations(aggs => aggs + .Filter(_aggName, date => date + .Filter(f => f + .Script(b => b + .Inline(_ctxNumberofCommits) + ) + ) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Aggregations = new FilterAggregation(_aggName) + { + Filter = new ScriptQuery + { + Inline = _ctxNumberofCommits + } + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "aggs": { + "script_filter": { + "filter": { + "script": { + "script": { + "inline": "_source.numberOfCommits > 0" + } + } + } + } + } +} +---- + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +response.Aggs.Filter(_aggName).DocCount.Should().BeGreaterThan(0); +---- + diff --git a/docs/asciidoc/aggregations/bucket/filters/filters-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/bucket/filters/filters-aggregation-usage.asciidoc new file mode 100644 index 00000000000..c533f4de65e --- /dev/null +++ b/docs/asciidoc/aggregations/bucket/filters/filters-aggregation-usage.asciidoc @@ -0,0 +1,349 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[filters-aggregation-usage]] +== Filters Aggregation Usage + +Defines a multi bucket aggregations where each bucket is associated with a filter. +Each bucket will collect all documents that match its associated filter. For documents +that do not match any filter, these will be collected in the _other bucket_. + +Be sure to read the Elasticsearch documentation on {ref_current}/search-aggregations-bucket-filters-aggregation.html[Filters Aggregation]. + +[[named-filters]] +[float] +== Named filters + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Aggregations(aggs => aggs + .Filters("projects_by_state", agg => agg + .OtherBucket() + .OtherBucketKey("other_states_of_being") + .NamedFilters(filters => filters + .Filter("belly_up", f => f.Term(p => p.State, StateOfBeing.BellyUp)) + .Filter("stable", f => f.Term(p => p.State, StateOfBeing.Stable)) + .Filter("very_active", f => f.Term(p => p.State, StateOfBeing.VeryActive)) + ) + .Aggregations(childAggs => childAggs + .Terms("project_tags", avg => avg.Field(p => p.CuratedTags.First().Name)) + ) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Aggregations = new FiltersAggregation("projects_by_state") + { + OtherBucket = true, + OtherBucketKey = "other_states_of_being", + Filters = new NamedFiltersContainer + { + { "belly_up", Query.Term(p=>p.State, StateOfBeing.BellyUp) }, + { "stable", Query.Term(p=>p.State, StateOfBeing.Stable) }, + { "very_active", Query.Term(p=>p.State, StateOfBeing.VeryActive) } + }, + Aggregations = + new TermsAggregation("project_tags") { Field = Field(p => p.CuratedTags.First().Name) } + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "aggs": { + "projects_by_state": { + "filters": { + "other_bucket": true, + "other_bucket_key": "other_states_of_being", + "filters": { + "belly_up": { + "term": { + "state": { + "value": "BellyUp" + } + } + }, + "stable": { + "term": { + "state": { + "value": "Stable" + } + } + }, + "very_active": { + "term": { + "state": { + "value": "VeryActive" + } + } + } + } + }, + "aggs": { + "project_tags": { + "terms": { + "field": "curatedTags.name" + } + } + } + } + } +} +---- + +=== Handling Responses + +Using the `.Agg` aggregation helper we can fetch our aggregation results easily +in the correct type. <> + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); + +var filterAgg = response.Aggs.Filters("projects_by_state"); +filterAgg.Should().NotBeNull(); +var namedResult = filterAgg.NamedBucket("belly_up"); +namedResult.Should().NotBeNull(); +namedResult.DocCount.Should().BeGreaterThan(0); +namedResult = filterAgg.NamedBucket("stable"); +namedResult.Should().NotBeNull(); +namedResult.DocCount.Should().BeGreaterThan(0); +namedResult = filterAgg.NamedBucket("very_active"); +namedResult.Should().NotBeNull(); +namedResult.DocCount.Should().BeGreaterThan(0); +namedResult = filterAgg.NamedBucket("other_states_of_being"); +namedResult.Should().NotBeNull(); +namedResult.DocCount.Should().Be(0); +---- + +[[anonymous-filters]] +[float] +== Anonymous filters + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Aggregations(aggs => aggs + .Filters("projects_by_state", agg => agg + .OtherBucket() + .AnonymousFilters( + f => f.Term(p => p.State, StateOfBeing.BellyUp), + f => f.Term(p => p.State, StateOfBeing.Stable), + f => f.Term(p => p.State, StateOfBeing.VeryActive) + ) + .Aggregations(childAggs => childAggs + .Terms("project_tags", avg => avg.Field(p => p.CuratedTags.First().Name)) + ) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Aggregations = new FiltersAggregation("projects_by_state") + { + OtherBucket = true, + Filters = new List + { + Query.Term(p=>p.State, StateOfBeing.BellyUp) , + Query.Term(p=>p.State, StateOfBeing.Stable) , + Query.Term(p=>p.State, StateOfBeing.VeryActive) + }, + Aggregations = + new TermsAggregation("project_tags") + { + Field = Field(p => p.CuratedTags.First().Name) + } + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "aggs": { + "projects_by_state": { + "filters": { + "other_bucket": true, + "filters": [ + { + "term": { + "state": { + "value": "BellyUp" + } + } + }, + { + "term": { + "state": { + "value": "Stable" + } + } + }, + { + "term": { + "state": { + "value": "VeryActive" + } + } + } + ] + }, + "aggs": { + "project_tags": { + "terms": { + "field": "curatedTags.name" + } + } + } + } + } +} +---- + +=== Handling Responses + +Using the `.Agg` aggregation helper we can fetch our aggregation results easily +in the correct type. <> + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); + +var filterAgg = response.Aggs.Filters("projects_by_state"); +filterAgg.Should().NotBeNull(); +var results = filterAgg.AnonymousBuckets(); +results.Count.Should().Be(4); +singleBucket.DocCount.Should().BeGreaterThan(0); +results.Last().DocCount.Should().Be(0); <1> +---- +<1> The last bucket is the _other bucket_ + +[[empty-filters]] +[float] +== Empty Filters + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Aggregations(aggs => aggs + .Filters("empty_filters", agg => agg + .AnonymousFilters() + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Aggregations = new FiltersAggregation("empty_filters") + { + Filters = new List() + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "aggs": { + "empty_filters": { + "filters": { + "filters": [] + } + } + } +} +---- + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +response.Aggs.Filters("empty_filters").Buckets.Should().BeEmpty(); +---- + +[[conditionless-filters]] +[float] +== Conditionless Filters + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Aggregations(aggs => aggs + .Filters("conditionless_filters", agg => agg + .AnonymousFilters( + q => new QueryContainer() + ) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Aggregations = new FiltersAggregation("conditionless_filters") + { + Filters = new List + { + new QueryContainer() + } + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "aggs": { + "conditionless_filters": { + "filters": { + "filters": [] + } + } + } +} +---- + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +response.Aggs.Filters("conditionless_filters").Buckets.Should().BeEmpty(); +---- + diff --git a/docs/asciidoc/aggregations/bucket/geo-distance/geo-distance-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/bucket/geo-distance/geo-distance-aggregation-usage.asciidoc new file mode 100644 index 00000000000..c02be78bf43 --- /dev/null +++ b/docs/asciidoc/aggregations/bucket/geo-distance/geo-distance-aggregation-usage.asciidoc @@ -0,0 +1,89 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[geo-distance-aggregation-usage]] +== Geo Distance Aggregation Usage + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Aggregations(a => a + .GeoDistance("rings_around_amsterdam", g => g + .Field(p => p.Location) + .Origin(52.376, 4.894) + .Ranges( + r => r.To(100), + r => r.From(100).To(300), + r => r.From(300) + ) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Aggregations = new GeoDistanceAggregation("rings_around_amsterdam") + { + Field = Field((Project p) => p.Location), + Origin = "52.376, 4.894", + Ranges = new List + { + new Nest.Range { To = 100 }, + new Nest.Range { From = 100, To = 300 }, + new Nest.Range { From = 300 } + } + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "aggs": { + "rings_around_amsterdam": { + "geo_distance": { + "field": "location", + "origin": { + "lat": 52.376, + "lon": 4.894 + }, + "ranges": [ + { + "to": 100.0 + }, + { + "from": 100.0, + "to": 300.0 + }, + { + "from": 300.0 + } + ] + } + } + } +} +---- + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +var ringsAroundAmsterdam = response.Aggs.GeoDistance("rings_around_amsterdam"); +ringsAroundAmsterdam.Should().NotBeNull(); +ringsAroundAmsterdam.Buckets.Where(r => r.Key == "*-100.0").FirstOrDefault().Should().NotBeNull(); +ringsAroundAmsterdam.Buckets.Where(r => r.Key == "100.0-300.0").FirstOrDefault().Should().NotBeNull(); +ringsAroundAmsterdam.Buckets.Where(r => r.Key == "300.0-*").FirstOrDefault().Should().NotBeNull(); +---- + diff --git a/docs/asciidoc/aggregations/bucket/geo-hash-grid/geo-hash-grid-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/bucket/geo-hash-grid/geo-hash-grid-aggregation-usage.asciidoc new file mode 100644 index 00000000000..9eb3bdafbf6 --- /dev/null +++ b/docs/asciidoc/aggregations/bucket/geo-hash-grid/geo-hash-grid-aggregation-usage.asciidoc @@ -0,0 +1,66 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[geo-hash-grid-aggregation-usage]] +== Geo Hash Grid Aggregation Usage + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Aggregations(a => a + .GeoHash("my_geohash_grid", g => g + .Field(p => p.Location) + .GeoHashPrecision(GeoHashPrecision.Precision3) + .Size(1000) + .ShardSize(100) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Aggregations = new GeoHashGridAggregation("my_geohash_grid") + { + Field = Field(p => p.Location), + Precision = GeoHashPrecision.Precision3, + Size = 1000, + ShardSize = 100 + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "aggs": { + "my_geohash_grid": { + "geohash_grid": { + "field": "location", + "precision": 3, + "size": 1000, + "shard_size": 100 + } + } + } +} +---- + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +var myGeoHashGrid = response.Aggs.GeoHash("my_geohash_grid"); +myGeoHashGrid.Should().NotBeNull(); +---- + diff --git a/docs/asciidoc/aggregations/bucket/global/global-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/bucket/global/global-aggregation-usage.asciidoc new file mode 100644 index 00000000000..6bfd96b6823 --- /dev/null +++ b/docs/asciidoc/aggregations/bucket/global/global-aggregation-usage.asciidoc @@ -0,0 +1,71 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[global-aggregation-usage]] +== Global Aggregation Usage + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Aggregations(a => a + .Global("all_projects", g => g + .Aggregations(aa => aa + .Terms("names", t => t + .Field(p => p.Name) + ) + ) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Aggregations = new GlobalAggregation("all_projects") + { + Aggregations = new TermsAggregation("names") + { + Field = Field(p => p.Name) + } + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "aggs": { + "all_projects": { + "global": {}, + "aggs": { + "names": { + "terms": { + "field": "name" + } + } + } + } + } +} +---- + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +var allProjects = response.Aggs.Global("all_projects"); +allProjects.Should().NotBeNull(); +var names = allProjects.Terms("names"); +names.Should().NotBeNull(); +---- + diff --git a/docs/asciidoc/aggregations/bucket/histogram/histogram-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/bucket/histogram/histogram-aggregation-usage.asciidoc new file mode 100644 index 00000000000..eecbd9d2b7d --- /dev/null +++ b/docs/asciidoc/aggregations/bucket/histogram/histogram-aggregation-usage.asciidoc @@ -0,0 +1,69 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[histogram-aggregation-usage]] +== Histogram Aggregation Usage + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Aggregations(a => a + .Histogram("commits", h => h + .Field(p => p.NumberOfCommits) + .Interval(100) + .Missing(0) + .Order(HistogramOrder.KeyDescending) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Aggregations = new HistogramAggregation("commits") + { + Field = Field(p => p.NumberOfCommits), + Interval = 100, + Missing = 0, + Order = HistogramOrder.KeyDescending + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "aggs": { + "commits": { + "histogram": { + "field": "numberOfCommits", + "interval": 100.0, + "missing": 0.0, + "order": { + "_key": "desc" + } + } + } + } +} +---- + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +var commits = response.Aggs.Histogram("commits"); +commits.Should().NotBeNull(); +item.DocCount.Should().BeGreaterThan(0); +---- + diff --git a/docs/asciidoc/aggregations/bucket/ip-range/ip-range-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/bucket/ip-range/ip-range-aggregation-usage.asciidoc new file mode 100644 index 00000000000..56904224efd --- /dev/null +++ b/docs/asciidoc/aggregations/bucket/ip-range/ip-range-aggregation-usage.asciidoc @@ -0,0 +1,77 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[ip-range-aggregation-usage]] +== Ip Range Aggregation Usage + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Aggregations(a => a + .IpRange("ip_ranges", ip => ip + .Field(p => p.LeadDeveloper.IPAddress) + .Ranges( + r => r.To("10.0.0.5"), + r => r.From("10.0.0.5") + ) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Aggregations = new IpRangeAggregation("ip_ranges") + { + Field = Field((Project p) => p.LeadDeveloper.IPAddress), + Ranges = new List + { + new Nest.IpRange { To = "10.0.0.5" }, + new Nest.IpRange { From = "10.0.0.5" } + } + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "aggs": { + "ip_ranges": { + "ip_range": { + "field": "leadDeveloper.iPAddress", + "ranges": [ + { + "to": "10.0.0.5" + }, + { + "from": "10.0.0.5" + } + ] + } + } + } +} +---- + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +var ipRanges = response.Aggs.IpRange("ip_ranges"); +ipRanges.Should().NotBeNull(); +ipRanges.Buckets.Should().NotBeNull(); +ipRanges.Buckets.Count.Should().BeGreaterThan(0); +range.DocCount.Should().BeGreaterThan(0); +---- + diff --git a/docs/asciidoc/aggregations/bucket/missing/missing-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/bucket/missing/missing-aggregation-usage.asciidoc new file mode 100644 index 00000000000..e4f735ab277 --- /dev/null +++ b/docs/asciidoc/aggregations/bucket/missing/missing-aggregation-usage.asciidoc @@ -0,0 +1,57 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[missing-aggregation-usage]] +== Missing Aggregation Usage + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Aggregations(a => a + .Missing("projects_without_a_description", m => m + .Field(p => p.Description) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Aggregations = new MissingAggregation("projects_without_a_description") + { + Field = Field(p => p.Description) + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "aggs": { + "projects_without_a_description": { + "missing": { + "field": "description" + } + } + } +} +---- + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +var projectsWithoutDesc = response.Aggs.Missing("projects_without_a_description"); +projectsWithoutDesc.Should().NotBeNull(); +---- + diff --git a/docs/asciidoc/aggregations/bucket/nested/nested-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/bucket/nested/nested-aggregation-usage.asciidoc new file mode 100644 index 00000000000..d9d2836935d --- /dev/null +++ b/docs/asciidoc/aggregations/bucket/nested/nested-aggregation-usage.asciidoc @@ -0,0 +1,77 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[nested-aggregation-usage]] +== Nested Aggregation Usage + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Aggregations(a => a + .Nested("tags", n => n + .Path(p => p.Tags) + .Aggregations(aa => aa + .Terms("tag_names", t => t + .Field(p => p.Tags.Suffix("name")) + ) + ) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Aggregations = new NestedAggregation("tags") + { + Path = "tags", + Aggregations = new TermsAggregation("tag_names") + { + Field = "tags.name" + } + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "aggs": { + "tags": { + "nested": { + "path": "tags" + }, + "aggs": { + "tag_names": { + "terms": { + "field": "tags.name" + } + } + } + } + } +} +---- + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +var tags = response.Aggs.Nested("tags"); +tags.Should().NotBeNull(); +var tagNames = tags.Terms("tag_names"); +tagNames.Should().NotBeNull(); +item.Key.Should().NotBeNullOrEmpty(); +item.DocCount.Should().BeGreaterThan(0); +---- + diff --git a/docs/asciidoc/aggregations/bucket/range/range-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/bucket/range/range-aggregation-usage.asciidoc new file mode 100644 index 00000000000..54bbe77c892 --- /dev/null +++ b/docs/asciidoc/aggregations/bucket/range/range-aggregation-usage.asciidoc @@ -0,0 +1,84 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[range-aggregation-usage]] +== Range Aggregation Usage + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Aggregations(a => a + .Range("commit_ranges", ra => ra + .Field(p => p.NumberOfCommits) + .Ranges( + r => r.To(100), + r => r.From(100).To(500), + r => r.From(500) + ) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Aggregations = new RangeAggregation("commit_ranges") + { + Field = Field(p => p.NumberOfCommits), + Ranges = new List + { + { new Nest.Range { To = 100 } }, + { new Nest.Range { From = 100, To = 500 } }, + { new Nest.Range { From = 500 } } + } + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "aggs": { + "commit_ranges": { + "range": { + "field": "numberOfCommits", + "ranges": [ + { + "to": 100.0 + }, + { + "from": 100.0, + "to": 500.0 + }, + { + "from": 500.0 + } + ] + } + } + } +} +---- + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +var commitRanges = response.Aggs.Range("commit_ranges"); +commitRanges.Should().NotBeNull(); +commitRanges.Buckets.Count.Should().Be(3); +commitRanges.Buckets.Where(r => r.Key == "*-100.0").FirstOrDefault().Should().NotBeNull(); +commitRanges.Buckets.Where(r => r.Key == "100.0-500.0").FirstOrDefault().Should().NotBeNull(); +commitRanges.Buckets.Where(r => r.Key == "500.0-*").FirstOrDefault().Should().NotBeNull(); +---- + diff --git a/docs/asciidoc/aggregations/bucket/reverse-nested/reverse-nested-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/bucket/reverse-nested/reverse-nested-aggregation-usage.asciidoc new file mode 100644 index 00000000000..f51fd140d5a --- /dev/null +++ b/docs/asciidoc/aggregations/bucket/reverse-nested/reverse-nested-aggregation-usage.asciidoc @@ -0,0 +1,111 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[reverse-nested-aggregation-usage]] +== Reverse Nested Aggregation Usage + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Aggregations(a => a + .Nested("tags", n => n + .Path(p => p.Tags) + .Aggregations(aa => aa + .Terms("tag_names", t => t + .Field(p => p.Tags.Suffix("name")) + .Aggregations(aaa => aaa + .ReverseNested("tags_to_project", r => r + .Aggregations(aaaa => aaaa + .Terms("top_projects_per_tag", tt => tt + .Field(p => p.Name) + ) + ) + ) + ) + ) + ) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Aggregations = new NestedAggregation("tags") + { + Path = "tags", + Aggregations = new TermsAggregation("tag_names") + { + Field = "tags.name", + Aggregations = new ReverseNestedAggregation("tags_to_project") + { + Aggregations = new TermsAggregation("top_projects_per_tag") + { + Field = Field(p => p.Name) + } + } + } + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "aggs": { + "tags": { + "nested": { + "path": "tags" + }, + "aggs": { + "tag_names": { + "terms": { + "field": "tags.name" + }, + "aggs": { + "tags_to_project": { + "reverse_nested": {}, + "aggs": { + "top_projects_per_tag": { + "terms": { + "field": "name" + } + } + } + } + } + } + } + } + } +} +---- + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +var tags = response.Aggs.Nested("tags"); +tags.Should().NotBeNull(); +var tagNames = tags.Terms("tag_names"); +tagNames.Should().NotBeNull(); +tagName.Key.Should().NotBeNullOrEmpty(); +tagName.DocCount.Should().BeGreaterThan(0); +var tagsToProjects = tagName.ReverseNested("tags_to_project"); +tagsToProjects.Should().NotBeNull(); +var topProjectsPerTag = tagsToProjects.Terms("top_projects_per_tag"); +topProjectsPerTag.Should().NotBeNull(); +topProject.Key.Should().NotBeNullOrEmpty(); +topProject.DocCount.Should().BeGreaterThan(0); +---- + diff --git a/docs/asciidoc/aggregations/bucket/sampler/sampler-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/bucket/sampler/sampler-aggregation-usage.asciidoc new file mode 100644 index 00000000000..685d639ab54 --- /dev/null +++ b/docs/asciidoc/aggregations/bucket/sampler/sampler-aggregation-usage.asciidoc @@ -0,0 +1,78 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[sampler-aggregation-usage]] +== Sampler Aggregation Usage + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Aggregations(aggs => aggs + .Sampler("sample", sm => sm + .ShardSize(200) + .Field(p => p.Name) + .Aggregations(aa => aa + .SignificantTerms("significant_names", st => st + .Field(p => p.Name) + ) + ) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Aggregations = new SamplerAggregation("sample") + { + ShardSize = 200, + Field = "name", + Aggregations = new SignificantTermsAggregation("significant_names") + { + Field = "name" + } + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "aggs": { + "sample": { + "sampler": { + "shard_size": 200, + "field": "name" + }, + "aggs": { + "significant_names": { + "significant_terms": { + "field": "name" + } + } + } + } + } +} +---- + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +var sample = response.Aggs.Sampler("sample"); +sample.Should().NotBeNull(); +var sigTags = sample.SignificantTerms("significant_names"); +sigTags.Should().NotBeNull(); +---- + diff --git a/docs/asciidoc/aggregations/bucket/significant-terms/significant-terms-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/bucket/significant-terms/significant-terms-aggregation-usage.asciidoc new file mode 100644 index 00000000000..33b6cc3225a --- /dev/null +++ b/docs/asciidoc/aggregations/bucket/significant-terms/significant-terms-aggregation-usage.asciidoc @@ -0,0 +1,74 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[significant-terms-aggregation-usage]] +== Significant Terms Aggregation Usage + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Aggregations(a => a + .SignificantTerms("significant_names", st => st + .Field(p => p.Name) + .MinimumDocumentCount(10) + .MutualInformation(mi => mi + .BackgroundIsSuperSet() + .IncludeNegatives() + ) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Aggregations = new SignificantTermsAggregation("significant_names") + { + Field = Field(p => p.Name), + MinimumDocumentCount = 10, + MutualInformation = new MutualInformationHeuristic + { + BackgroundIsSuperSet = true, + IncludeNegatives = true + } + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "aggs": { + "significant_names": { + "significant_terms": { + "field": "name", + "min_doc_count": 10, + "mutual_information": { + "background_is_superset": true, + "include_negatives": true + } + } + } + } +} +---- + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +var sigNames = response.Aggs.SignificantTerms("significant_names"); +sigNames.Should().NotBeNull(); +sigNames.DocCount.Should().BeGreaterThan(0); +---- + diff --git a/docs/asciidoc/aggregations/bucket/terms/terms-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/bucket/terms/terms-aggregation-usage.asciidoc new file mode 100644 index 00000000000..d0ad32dd1af --- /dev/null +++ b/docs/asciidoc/aggregations/bucket/terms/terms-aggregation-usage.asciidoc @@ -0,0 +1,111 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[terms-aggregation-usage]] +== Terms Aggregation Usage + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Aggregations(a => a + .Terms("states", st => st + .Field(p => p.State) + .MinimumDocumentCount(2) + .Size(5) + .ShardSize(100) + .ShowTermDocumentCountError() + .ExecutionHint(TermsAggregationExecutionHint.Map) + .Missing("n/a") + .Script("'State of Being: '+_value") + .Order(TermsOrder.TermAscending) + .Order(TermsOrder.CountDescending) + .Meta(m => m + .Add("foo", "bar") + ) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Aggregations = new TermsAggregation("states") + { + Field = Field(p => p.State), + MinimumDocumentCount = 2, + Size = 5, + ShardSize = 100, + ShowTermDocumentCountError = true, + ExecutionHint = TermsAggregationExecutionHint.Map, + Missing = "n/a", + Script = new InlineScript("'State of Being: '+_value"), + Order = new List + { + TermsOrder.TermAscending, + TermsOrder.CountDescending + }, + Meta = new Dictionary + { + { "foo", "bar" } + } + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "aggs": { + "states": { + "meta": { + "foo": "bar" + }, + "terms": { + "field": "state", + "min_doc_count": 2, + "size": 5, + "shard_size": 100, + "show_term_doc_error_count": true, + "execution_hint": "map", + "missing": "n/a", + "script": { + "inline": "'State of Being: '+_value" + }, + "order": [ + { + "_term": "asc" + }, + { + "_count": "desc" + } + ] + } + } + } +} +---- + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +var states = response.Aggs.Terms("states"); +states.Should().NotBeNull(); +states.DocCountErrorUpperBound.Should().HaveValue(); +states.SumOtherDocCount.Should().HaveValue(); +item.Key.Should().NotBeNullOrEmpty(); +item.DocCount.Should().BeGreaterOrEqualTo(1); +states.Meta.Should().NotBeNull().And.HaveCount(1); +states.Meta["foo"].Should().Be("bar"); +---- + diff --git a/docs/asciidoc/aggregations/metric/average/average-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/metric/average/average-aggregation-usage.asciidoc new file mode 100644 index 00000000000..734b5e8cba7 --- /dev/null +++ b/docs/asciidoc/aggregations/metric/average/average-aggregation-usage.asciidoc @@ -0,0 +1,77 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[average-aggregation-usage]] +== Average Aggregation Usage + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Aggregations(a => a + .Average("average_commits", avg => avg + .Meta(m => m + .Add("foo", "bar") + ) + .Field(p => p.NumberOfCommits) + .Missing(10) + .Script("_value * 1.2") + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Aggregations = new AverageAggregation("average_commits", Field(p => p.NumberOfCommits)) + { + Meta = new Dictionary + { + { "foo", "bar" } + }, + Missing = 10, + Script = new InlineScript("_value * 1.2") + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "aggs": { + "average_commits": { + "meta": { + "foo": "bar" + }, + "avg": { + "field": "numberOfCommits", + "missing": 10.0, + "script": { + "inline": "_value * 1.2" + } + } + } + } +} +---- + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +var commitsAvg = response.Aggs.Average("average_commits"); +commitsAvg.Should().NotBeNull(); +commitsAvg.Value.Should().BeGreaterThan(0); +commitsAvg.Meta.Should().NotBeNull().And.HaveCount(1); +commitsAvg.Meta["foo"].Should().Be("bar"); +---- + diff --git a/docs/asciidoc/aggregations/metric/cardinality/cardinality-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/metric/cardinality/cardinality-aggregation-usage.asciidoc new file mode 100644 index 00000000000..fb0810351cc --- /dev/null +++ b/docs/asciidoc/aggregations/metric/cardinality/cardinality-aggregation-usage.asciidoc @@ -0,0 +1,60 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[cardinality-aggregation-usage]] +== Cardinality Aggregation Usage + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Aggregations(a => a + .Cardinality("state_count", c => c + .Field(p => p.State) + .PrecisionThreshold(100) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Aggregations = new CardinalityAggregation("state_count", Field(p => p.State)) + { + PrecisionThreshold = 100 + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "aggs": { + "state_count": { + "cardinality": { + "field": "state", + "precision_threshold": 100 + } + } + } +} +---- + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +var projectCount = response.Aggs.Cardinality("state_count"); +projectCount.Should().NotBeNull(); +projectCount.Value.Should().Be(3); +---- + diff --git a/docs/asciidoc/aggregations/metric/extended-stats/extended-stats-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/metric/extended-stats/extended-stats-aggregation-usage.asciidoc new file mode 100644 index 00000000000..fa6c851ad10 --- /dev/null +++ b/docs/asciidoc/aggregations/metric/extended-stats/extended-stats-aggregation-usage.asciidoc @@ -0,0 +1,64 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[extended-stats-aggregation-usage]] +== Extended Stats Aggregation Usage + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Aggregations(a => a + .ExtendedStats("commit_stats", es => es + .Field(p => p.NumberOfCommits) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Aggregations = new ExtendedStatsAggregation("commit_stats", Field(p => p.NumberOfCommits)) +} +---- + +[source,javascript] +.Example json output +---- +{ + "aggs": { + "commit_stats": { + "extended_stats": { + "field": "numberOfCommits" + } + } + } +} +---- + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +var commitStats = response.Aggs.ExtendedStats("commit_stats"); +commitStats.Should().NotBeNull(); +commitStats.Average.Should().BeGreaterThan(0); +commitStats.Max.Should().BeGreaterThan(0); +commitStats.Min.Should().BeGreaterThan(0); +commitStats.Count.Should().BeGreaterThan(0); +commitStats.Sum.Should().BeGreaterThan(0); +commitStats.SumOfSquares.Should().BeGreaterThan(0); +commitStats.StdDeviation.Should().BeGreaterThan(0); +commitStats.StdDeviationBounds.Should().NotBeNull(); +commitStats.StdDeviationBounds.Upper.Should().BeGreaterThan(0); +commitStats.StdDeviationBounds.Lower.Should().NotBe(0); +---- + diff --git a/docs/asciidoc/aggregations/metric/geo-bounds/geo-bounds-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/metric/geo-bounds/geo-bounds-aggregation-usage.asciidoc new file mode 100644 index 00000000000..dcc46cf5830 --- /dev/null +++ b/docs/asciidoc/aggregations/metric/geo-bounds/geo-bounds-aggregation-usage.asciidoc @@ -0,0 +1,72 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[geo-bounds-aggregation-usage]] +== Geo Bounds Aggregation Usage + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Aggregations(a => a + .GeoBounds("viewport", gb => gb + .Field(p => p.Location) + .WrapLongitude(true) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Aggregations = new GeoBoundsAggregation("viewport", Field(p => p.Location)) + { + WrapLongitude = true + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "aggs": { + "viewport": { + "geo_bounds": { + "field": "location", + "wrap_longitude": true + } + } + } +} +---- + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +var viewport = response.Aggs.GeoBounds("viewport"); +viewport.Should().NotBeNull(); +viewport.Bounds.Should().NotBeNull(); +var bottomRight = viewport.Bounds.BottomRight; +bottomRight.Should().NotBeNull(); +bottomRight.Lat.Should().HaveValue(); +GeoLocation.IsValidLatitude(bottomRight.Lat.Value).Should().BeTrue(); +bottomRight.Lon.Should().HaveValue(); +GeoLocation.IsValidLongitude(bottomRight.Lon.Value).Should().BeTrue(); +var topLeft = viewport.Bounds.TopLeft; +topLeft.Should().NotBeNull(); +topLeft.Lat.Should().HaveValue(); +GeoLocation.IsValidLatitude(topLeft.Lat.Value).Should().BeTrue(); +topLeft.Lon.Should().HaveValue(); +GeoLocation.IsValidLongitude(topLeft.Lon.Value).Should().BeTrue(); +---- + diff --git a/docs/asciidoc/aggregations/metric/max/max-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/metric/max/max-aggregation-usage.asciidoc new file mode 100644 index 00000000000..d015404711c --- /dev/null +++ b/docs/asciidoc/aggregations/metric/max/max-aggregation-usage.asciidoc @@ -0,0 +1,55 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[max-aggregation-usage]] +== Max Aggregation Usage + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Aggregations(a => a + .Max("max_commits", m => m + .Field(p => p.NumberOfCommits) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Aggregations = new MaxAggregation("max_commits", Field(p => p.NumberOfCommits)) +} +---- + +[source,javascript] +.Example json output +---- +{ + "aggs": { + "max_commits": { + "max": { + "field": "numberOfCommits" + } + } + } +} +---- + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +var max = response.Aggs.Max("max_commits"); +max.Should().NotBeNull(); +max.Value.Should().BeGreaterThan(0); +---- + diff --git a/docs/asciidoc/aggregations/metric/min/min-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/metric/min/min-aggregation-usage.asciidoc new file mode 100644 index 00000000000..4212daf58f2 --- /dev/null +++ b/docs/asciidoc/aggregations/metric/min/min-aggregation-usage.asciidoc @@ -0,0 +1,55 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[min-aggregation-usage]] +== Min Aggregation Usage + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Aggregations(a => a + .Min("min_commits", m => m + .Field(p => p.NumberOfCommits) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Aggregations = new MinAggregation("min_commits", Field(p => p.NumberOfCommits)) +} +---- + +[source,javascript] +.Example json output +---- +{ + "aggs": { + "min_commits": { + "min": { + "field": "numberOfCommits" + } + } + } +} +---- + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +var min = response.Aggs.Max("min_commits"); +min.Should().NotBeNull(); +min.Value.Should().BeGreaterThan(0); +---- + diff --git a/docs/asciidoc/aggregations/metric/percentile-ranks/percentile-ranks-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/metric/percentile-ranks/percentile-ranks-aggregation-usage.asciidoc new file mode 100644 index 00000000000..537a99e3ec5 --- /dev/null +++ b/docs/asciidoc/aggregations/metric/percentile-ranks/percentile-ranks-aggregation-usage.asciidoc @@ -0,0 +1,84 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[percentile-ranks-aggregation-usage]] +== Percentile Ranks Aggregation Usage + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Aggregations(a => a + .PercentileRanks("commits_outlier", pr => pr + .Field(p => p.NumberOfCommits) + .Values(15, 30) + .Method(m => m + .TDigest(td => td + .Compression(200) + ) + ) + .Script("doc['numberOfCommits'].value * 1.2") + .Missing(0) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Aggregations = new PercentileRanksAggregation("commits_outlier", Field(p => p.NumberOfCommits)) + { + Values = new List { 15, 30 }, + Method = new TDigestMethod + { + Compression = 200 + }, + Script = (InlineScript)"doc['numberOfCommits'].value * 1.2", + Missing = 0 + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "aggs": { + "commits_outlier": { + "percentile_ranks": { + "field": "numberOfCommits", + "values": [ + 15.0, + 30.0 + ], + "tdigest": { + "compression": 200.0 + }, + "script": { + "inline": "doc['numberOfCommits'].value * 1.2" + }, + "missing": 0.0 + } + } + } +} +---- + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +var commitsOutlier = response.Aggs.PercentileRanks("commits_outlier"); +commitsOutlier.Should().NotBeNull(); +commitsOutlier.Items.Should().NotBeNullOrEmpty(); +item.Should().NotBeNull(); +---- + diff --git a/docs/asciidoc/aggregations/metric/percentiles/percentiles-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/metric/percentiles/percentiles-aggregation-usage.asciidoc new file mode 100644 index 00000000000..34d361d36f5 --- /dev/null +++ b/docs/asciidoc/aggregations/metric/percentiles/percentiles-aggregation-usage.asciidoc @@ -0,0 +1,85 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[percentiles-aggregation-usage]] +== Percentiles Aggregation Usage + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Aggregations(a => a + .Percentiles("commits_outlier", pr => pr + .Field(p => p.NumberOfCommits) + .Percents(95, 99, 99.9) + .Method(m => m + .HDRHistogram(hdr => hdr + .NumberOfSignificantValueDigits(3) + ) + ) + .Script("doc['numberOfCommits'].value * 1.2") + .Missing(0) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Aggregations = new PercentilesAggregation("commits_outlier", Field(p => p.NumberOfCommits)) + { + Percents = new[] { 95, 99, 99.9 }, + Method = new HDRHistogramMethod + { + NumberOfSignificantValueDigits = 3 + }, + Script = new InlineScript("doc['numberOfCommits'].value * 1.2"), + Missing = 0 + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "aggs": { + "commits_outlier": { + "percentiles": { + "field": "numberOfCommits", + "percents": [ + 95.0, + 99.0, + 99.9 + ], + "hdr": { + "number_of_significant_value_digits": 3 + }, + "script": { + "inline": "doc['numberOfCommits'].value * 1.2" + }, + "missing": 0.0 + } + } + } +} +---- + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +var commitsOutlier = response.Aggs.Percentiles("commits_outlier"); +commitsOutlier.Should().NotBeNull(); +commitsOutlier.Items.Should().NotBeNullOrEmpty(); +item.Value.Should().BeGreaterThan(0); +---- + diff --git a/docs/asciidoc/aggregations/metric/scripted-metric/scripted-metric-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/metric/scripted-metric/scripted-metric-aggregation-usage.asciidoc new file mode 100644 index 00000000000..b84ed678d92 --- /dev/null +++ b/docs/asciidoc/aggregations/metric/scripted-metric/scripted-metric-aggregation-usage.asciidoc @@ -0,0 +1,75 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[scripted-metric-aggregation-usage]] +== Scripted Metric Aggregation Usage + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Aggregations(a => a + .ScriptedMetric("sum_the_hard_way", sm => sm + .InitScript("_agg['commits'] = []") + .MapScript("if (doc['state'].value == \"Stable\") { _agg.commits.add(doc['numberOfCommits']) }") + .CombineScript("sum = 0; for (c in _agg.commits) { sum += c }; return sum") + .ReduceScript("sum = 0; for (a in _aggs) { sum += a }; return sum") + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Aggregations = new ScriptedMetricAggregation("sum_the_hard_way") + { + InitScript = new InlineScript("_agg['commits'] = []"), + MapScript = new InlineScript("if (doc['state'].value == \"Stable\") { _agg.commits.add(doc['numberOfCommits']) }"), + CombineScript = new InlineScript("sum = 0; for (c in _agg.commits) { sum += c }; return sum"), + ReduceScript = new InlineScript("sum = 0; for (a in _aggs) { sum += a }; return sum") + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "aggs": { + "sum_the_hard_way": { + "scripted_metric": { + "init_script": { + "inline": "_agg['commits'] = []" + }, + "map_script": { + "inline": "if (doc['state'].value == \"Stable\") { _agg.commits.add(doc['numberOfCommits']) }" + }, + "combine_script": { + "inline": "sum = 0; for (c in _agg.commits) { sum += c }; return sum" + }, + "reduce_script": { + "inline": "sum = 0; for (a in _aggs) { sum += a }; return sum" + } + } + } + } +} +---- + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +var sumTheHardWay = response.Aggs.ScriptedMetric("sum_the_hard_way"); +sumTheHardWay.Should().NotBeNull(); +sumTheHardWay.Value().Should().BeGreaterThan(0); +---- + diff --git a/docs/asciidoc/aggregations/metric/stats/stats-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/metric/stats/stats-aggregation-usage.asciidoc new file mode 100644 index 00000000000..a74cb818864 --- /dev/null +++ b/docs/asciidoc/aggregations/metric/stats/stats-aggregation-usage.asciidoc @@ -0,0 +1,59 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[stats-aggregation-usage]] +== Stats Aggregation Usage + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Aggregations(a => a + .Stats("commit_stats", st => st + .Field(p => p.NumberOfCommits) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Aggregations = new StatsAggregation("commit_stats", Field(p => p.NumberOfCommits)) +} +---- + +[source,javascript] +.Example json output +---- +{ + "aggs": { + "commit_stats": { + "stats": { + "field": "numberOfCommits" + } + } + } +} +---- + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +var commitStats = response.Aggs.Stats("commit_stats"); +commitStats.Should().NotBeNull(); +commitStats.Average.Should().BeGreaterThan(0); +commitStats.Max.Should().BeGreaterThan(0); +commitStats.Min.Should().BeGreaterThan(0); +commitStats.Count.Should().BeGreaterThan(0); +commitStats.Sum.Should().BeGreaterThan(0); +---- + diff --git a/docs/asciidoc/aggregations/metric/sum/sum-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/metric/sum/sum-aggregation-usage.asciidoc new file mode 100644 index 00000000000..2fbce89b3bb --- /dev/null +++ b/docs/asciidoc/aggregations/metric/sum/sum-aggregation-usage.asciidoc @@ -0,0 +1,55 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[sum-aggregation-usage]] +== Sum Aggregation Usage + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Aggregations(a => a + .Sum("commits_sum", sm => sm + .Field(p => p.NumberOfCommits) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Aggregations = new SumAggregation("commits_sum", Field(p => p.NumberOfCommits)) +} +---- + +[source,javascript] +.Example json output +---- +{ + "aggs": { + "commits_sum": { + "sum": { + "field": "numberOfCommits" + } + } + } +} +---- + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +var commitsSum = response.Aggs.Sum("commits_sum"); +commitsSum.Should().NotBeNull(); +commitsSum.Value.Should().BeGreaterThan(0); +---- + diff --git a/docs/asciidoc/aggregations/metric/top-hits/top-hits-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/metric/top-hits/top-hits-aggregation-usage.asciidoc new file mode 100644 index 00000000000..df0b796d11c --- /dev/null +++ b/docs/asciidoc/aggregations/metric/top-hits/top-hits-aggregation-usage.asciidoc @@ -0,0 +1,171 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[top-hits-aggregation-usage]] +== Top Hits Aggregation Usage + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Aggregations(a => a + .Terms("states", t => t + .Field(p => p.State) + .Aggregations(aa => aa + .TopHits("top_state_hits", th => th + .Sort(srt => srt + .Field(p => p.StartedOn) + .Order(SortOrder.Descending) + ) + .Source(src => src + .Include(fs => fs + .Field(p => p.Name) + .Field(p => p.StartedOn) + ) + ) + .Size(1) + .Version() + .Explain() + .FielddataFields(fd => fd + .Field(p => p.State) + .Field(p => p.NumberOfCommits) + ) + .Highlight(h => h + .Fields( + hf => hf.Field(p => p.Tags), + hf => hf.Field(p => p.Description) + ) + ) + .ScriptFields(sfs => sfs + .ScriptField("commit_factor", sf => sf + .Inline("doc['numberOfCommits'].value * 2") + ) + ) + ) + ) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Aggregations = new TermsAggregation("states") + { + Field = Field(p => p.State), + Aggregations = new TopHitsAggregation("top_state_hits") + { + Sort = new List + { + { + new SortField { Field = Field(p => p.StartedOn), Order = SortOrder.Descending } + } + }, + Source = new SourceFilter + { + Include = new [] { "name", "startedOn" } + }, + Size = 1, + Version = true, + Explain = true, + FielddataFields = new [] { "state", "numberOfCommits" }, + Highlight = new Highlight + { + Fields = new Dictionary + { + { Field(p => p.Tags), new HighlightField() }, + { Field(p => p.Description), new HighlightField() } + } + }, + ScriptFields = new ScriptFields + { + { "commit_factor", new ScriptField { Script = new InlineScript("doc['numberOfCommits'].value * 2") } } + } + } + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "aggs": { + "states": { + "terms": { + "field": "state" + }, + "aggs": { + "top_state_hits": { + "top_hits": { + "sort": [ + { + "startedOn": { + "order": "desc" + } + } + ], + "_source": { + "include": [ + "name", + "startedOn" + ] + }, + "size": 1, + "version": true, + "explain": true, + "fielddata_fields": [ + "state", + "numberOfCommits" + ], + "highlight": { + "fields": { + "tags": {}, + "description": {} + } + }, + "script_fields": { + "commit_factor": { + "script": { + "inline": "doc['numberOfCommits'].value * 2" + } + } + } + } + } + } + } + } +} +---- + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +var states = response.Aggs.Terms("states"); +states.Should().NotBeNull(); +states.Buckets.Should().NotBeNullOrEmpty(); +state.Key.Should().NotBeNullOrEmpty(); +state.DocCount.Should().BeGreaterThan(0); +var topStateHits = state.TopHits("top_state_hits"); +topStateHits.Should().NotBeNull(); +topStateHits.Total.Should().BeGreaterThan(0); +var hits = topStateHits.Hits(); +hits.Should().NotBeNullOrEmpty(); +hits.All(h => h.Explanation != null).Should().BeTrue(); +hits.All(h => h.Version.HasValue).Should().BeTrue(); +hits.All(h => h.Fields.ValuesOf("state").Any()).Should().BeTrue(); +hits.All(h => h.Fields.ValuesOf("numberOfCommits").Any()).Should().BeTrue(); +hits.All(h => h.Fields.ValuesOf("commit_factor").Any()).Should().BeTrue(); +topStateHits.Documents().Should().NotBeEmpty(); +---- + diff --git a/docs/asciidoc/aggregations/metric/value-count/value-count-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/metric/value-count/value-count-aggregation-usage.asciidoc new file mode 100644 index 00000000000..543960f3202 --- /dev/null +++ b/docs/asciidoc/aggregations/metric/value-count/value-count-aggregation-usage.asciidoc @@ -0,0 +1,55 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[value-count-aggregation-usage]] +== Value Count Aggregation Usage + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Aggregations(a => a + .ValueCount("commit_count", c => c + .Field(p => p.NumberOfCommits) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Aggregations = new ValueCountAggregation("commit_count", Field(p => p.NumberOfCommits)) +} +---- + +[source,javascript] +.Example json output +---- +{ + "aggs": { + "commit_count": { + "value_count": { + "field": "numberOfCommits" + } + } + } +} +---- + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +var commitCount = response.Aggs.ValueCount("commit_count"); +commitCount.Should().NotBeNull(); +commitCount.Value.Should().BeGreaterThan(0); +---- + diff --git a/docs/asciidoc/aggregations/pipeline/average-bucket/average-bucket-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/pipeline/average-bucket/average-bucket-aggregation-usage.asciidoc new file mode 100644 index 00000000000..5627246d18e --- /dev/null +++ b/docs/asciidoc/aggregations/pipeline/average-bucket/average-bucket-aggregation-usage.asciidoc @@ -0,0 +1,95 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[average-bucket-aggregation-usage]] +== Average Bucket Aggregation Usage + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Size(0) +.Aggregations(a => a + .DateHistogram("projects_started_per_month", dh => dh + .Field(p => p.StartedOn) + .Interval(DateInterval.Month) + .Aggregations(aa => aa + .Sum("commits", sm => sm + .Field(p => p.NumberOfCommits) + ) + ) + ) + .AverageBucket("average_commits_per_month", aaa => aaa + .BucketsPath("projects_started_per_month>commits") + .GapPolicy(GapPolicy.InsertZeros) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest() +{ + Size = 0, + Aggregations = new DateHistogramAggregation("projects_started_per_month") + { + Field = "startedOn", + Interval = DateInterval.Month, + Aggregations = new SumAggregation("commits", "numberOfCommits") + } + && new AverageBucketAggregation("average_commits_per_month", "projects_started_per_month>commits") + { + GapPolicy = GapPolicy.InsertZeros + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "size": 0, + "aggs": { + "projects_started_per_month": { + "date_histogram": { + "field": "startedOn", + "interval": "month" + }, + "aggs": { + "commits": { + "sum": { + "field": "numberOfCommits" + } + } + } + }, + "average_commits_per_month": { + "avg_bucket": { + "buckets_path": "projects_started_per_month>commits", + "gap_policy": "insert_zeros" + } + } + } +} +---- + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +var projectsPerMonth = response.Aggs.DateHistogram("projects_started_per_month"); +projectsPerMonth.Should().NotBeNull(); +projectsPerMonth.Buckets.Should().NotBeNull(); +projectsPerMonth.Buckets.Count.Should().BeGreaterThan(0); +var averageCommits = response.Aggs.AverageBucket("average_commits_per_month"); +averageCommits.Should().NotBeNull(); +averageCommits.Value.Should().BeGreaterThan(0); +---- + diff --git a/docs/asciidoc/aggregations/pipeline/bucket-script/bucket-script-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/pipeline/bucket-script/bucket-script-aggregation-usage.asciidoc new file mode 100644 index 00000000000..5667d998671 --- /dev/null +++ b/docs/asciidoc/aggregations/pipeline/bucket-script/bucket-script-aggregation-usage.asciidoc @@ -0,0 +1,143 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[bucket-script-aggregation-usage]] +== Bucket Script Aggregation Usage + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Size(0) +.Aggregations(a => a + .DateHistogram("projects_started_per_month", dh => dh + .Field(p => p.StartedOn) + .Interval(DateInterval.Month) + .Aggregations(aa => aa + .Sum("commits", sm => sm + .Field(p => p.NumberOfCommits) + ) + .Filter("stable_state", f => f + .Filter(ff => ff + .Term(p => p.State, "Stable") + ) + .Aggregations(aaa => aaa + .Sum("commits", sm => sm + .Field(p => p.NumberOfCommits) + ) + ) + ) + .BucketScript("stable_percentage", bs => bs + .BucketsPath(bp => bp + .Add("totalCommits", "commits") + .Add("stableCommits", "stable_state>commits") + ) + .Script("stableCommits / totalCommits * 100") + ) + ) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest() +{ + Size = 0, + Aggregations = new DateHistogramAggregation("projects_started_per_month") + { + Field = "startedOn", + Interval = DateInterval.Month, + Aggregations = + new SumAggregation("commits", "numberOfCommits") && + new FilterAggregation("stable_state") + { + Filter = new TermQuery + { + Field = "state", + Value = "Stable" + }, + Aggregations = new SumAggregation("commits", "numberOfCommits") + } && + new BucketScriptAggregation("stable_percentage", new MultiBucketsPath + { + { "totalCommits", "commits" }, + { "stableCommits", "stable_state>commits" } + }) + { + Script = (InlineScript)"stableCommits / totalCommits * 100" + } + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "size": 0, + "aggs": { + "projects_started_per_month": { + "date_histogram": { + "field": "startedOn", + "interval": "month" + }, + "aggs": { + "commits": { + "sum": { + "field": "numberOfCommits" + } + }, + "stable_state": { + "filter": { + "term": { + "state": { + "value": "Stable" + } + } + }, + "aggs": { + "commits": { + "sum": { + "field": "numberOfCommits" + } + } + } + }, + "stable_percentage": { + "bucket_script": { + "buckets_path": { + "totalCommits": "commits", + "stableCommits": "stable_state>commits" + }, + "script": { + "inline": "stableCommits / totalCommits * 100" + } + } + } + } + } + } +} +---- + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +var projectsPerMonth = response.Aggs.DateHistogram("projects_started_per_month"); +projectsPerMonth.Should().NotBeNull(); +projectsPerMonth.Buckets.Should().NotBeNull(); +projectsPerMonth.Buckets.Count.Should().BeGreaterThan(0); +var stablePercentage = item.BucketScript("stable_percentage"); +stablePercentage.Should().NotBeNull(); +stablePercentage.Value.Should().HaveValue(); +---- + diff --git a/docs/asciidoc/aggregations/pipeline/bucket-selector/bucket-selector-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/pipeline/bucket-selector/bucket-selector-aggregation-usage.asciidoc new file mode 100644 index 00000000000..bf611810360 --- /dev/null +++ b/docs/asciidoc/aggregations/pipeline/bucket-selector/bucket-selector-aggregation-usage.asciidoc @@ -0,0 +1,105 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[bucket-selector-aggregation-usage]] +== Bucket Selector Aggregation Usage + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Size(0) +.Aggregations(a => a + .DateHistogram("projects_started_per_month", dh => dh + .Field(p => p.StartedOn) + .Interval(DateInterval.Month) + .Aggregations(aa => aa + .Sum("commits", sm => sm + .Field(p => p.NumberOfCommits) + ) + .BucketSelector("commits_bucket_filter", bs => bs + .BucketsPath(bp => bp + .Add("totalCommits", "commits") + ) + .Script("totalCommits >= 500") + ) + ) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest() +{ + Size = 0, + Aggregations = new DateHistogramAggregation("projects_started_per_month") + { + Field = "startedOn", + Interval = DateInterval.Month, + Aggregations = + new SumAggregation("commits", "numberOfCommits") && + new BucketSelectorAggregation("commits_bucket_filter", new MultiBucketsPath + { + { "totalCommits", "commits" }, + }) + { + Script = (InlineScript)"totalCommits >= 500" + } + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "size": 0, + "aggs": { + "projects_started_per_month": { + "date_histogram": { + "field": "startedOn", + "interval": "month" + }, + "aggs": { + "commits": { + "sum": { + "field": "numberOfCommits" + } + }, + "commits_bucket_filter": { + "bucket_selector": { + "buckets_path": { + "totalCommits": "commits" + }, + "script": { + "inline": "totalCommits >= 500" + } + } + } + } + } + } +} +---- + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +var projectsPerMonth = response.Aggs.DateHistogram("projects_started_per_month"); +projectsPerMonth.Should().NotBeNull(); +projectsPerMonth.Buckets.Should().NotBeNull(); +projectsPerMonth.Buckets.Count.Should().BeGreaterThan(0); +var commits = item.Sum("commits"); +commits.Should().NotBeNull(); +commits.Value.Should().BeGreaterOrEqualTo(500); +---- + diff --git a/docs/asciidoc/aggregations/pipeline/cumulative-sum/cumulative-sum-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/pipeline/cumulative-sum/cumulative-sum-aggregation-usage.asciidoc new file mode 100644 index 00000000000..c3e67635a82 --- /dev/null +++ b/docs/asciidoc/aggregations/pipeline/cumulative-sum/cumulative-sum-aggregation-usage.asciidoc @@ -0,0 +1,91 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[cumulative-sum-aggregation-usage]] +== Cumulative Sum Aggregation Usage + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +var projectsPerMonth = response.Aggs.DateHistogram("projects_started_per_month"); +projectsPerMonth.Should().NotBeNull(); +projectsPerMonth.Buckets.Should().NotBeNull(); +projectsPerMonth.Buckets.Count.Should().BeGreaterThan(0); +var commitsDerivative = item.Derivative("cumulative_commits"); +commitsDerivative.Should().NotBeNull(); +commitsDerivative.Value.Should().NotBe(null); +---- + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Size(0) +.Aggregations(a => a + .DateHistogram("projects_started_per_month", dh => dh + .Field(p => p.StartedOn) + .Interval(DateInterval.Month) + .Aggregations(aa => aa + .Sum("commits", sm => sm + .Field(p => p.NumberOfCommits) + ) + .CumulativeSum("cumulative_commits", d => d + .BucketsPath("commits") + ) + ) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Size = 0, + Aggregations = new DateHistogramAggregation("projects_started_per_month") + { + Field = "startedOn", + Interval = DateInterval.Month, + Aggregations = + new SumAggregation("commits", "numberOfCommits") && + new CumulativeSumAggregation("cumulative_commits", "commits") + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "size": 0, + "aggs": { + "projects_started_per_month": { + "date_histogram": { + "field": "startedOn", + "interval": "month" + }, + "aggs": { + "commits": { + "sum": { + "field": "numberOfCommits" + } + }, + "cumulative_commits": { + "cumulative_sum": { + "buckets_path": "commits" + } + } + } + } + } +} +---- + diff --git a/docs/asciidoc/aggregations/pipeline/derivative/derivative-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/pipeline/derivative/derivative-aggregation-usage.asciidoc new file mode 100644 index 00000000000..4731cc39da5 --- /dev/null +++ b/docs/asciidoc/aggregations/pipeline/derivative/derivative-aggregation-usage.asciidoc @@ -0,0 +1,91 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[derivative-aggregation-usage]] +== Derivative Aggregation Usage + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +var projectsPerMonth = response.Aggs.DateHistogram("projects_started_per_month"); +projectsPerMonth.Should().NotBeNull(); +projectsPerMonth.Buckets.Should().NotBeNull(); +projectsPerMonth.Buckets.Count.Should().BeGreaterThan(0); +var commitsDerivative = item.Derivative("commits_derivative"); +commitsDerivative.Should().NotBeNull(); +commitsDerivative.Value.Should().NotBe(null); +---- + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Size(0) +.Aggregations(a => a + .DateHistogram("projects_started_per_month", dh => dh + .Field(p => p.StartedOn) + .Interval(DateInterval.Month) + .Aggregations(aa => aa + .Sum("commits", sm => sm + .Field(p => p.NumberOfCommits) + ) + .Derivative("commits_derivative", d => d + .BucketsPath("commits") + ) + ) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Size = 0, + Aggregations = new DateHistogramAggregation("projects_started_per_month") + { + Field = "startedOn", + Interval = DateInterval.Month, + Aggregations = + new SumAggregation("commits", "numberOfCommits") && + new DerivativeAggregation("commits_derivative", "commits") + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "size": 0, + "aggs": { + "projects_started_per_month": { + "date_histogram": { + "field": "startedOn", + "interval": "month" + }, + "aggs": { + "commits": { + "sum": { + "field": "numberOfCommits" + } + }, + "commits_derivative": { + "derivative": { + "buckets_path": "commits" + } + } + } + } + } +} +---- + diff --git a/docs/asciidoc/aggregations/pipeline/max-bucket/max-bucket-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/pipeline/max-bucket/max-bucket-aggregation-usage.asciidoc new file mode 100644 index 00000000000..fcfa9117d4d --- /dev/null +++ b/docs/asciidoc/aggregations/pipeline/max-bucket/max-bucket-aggregation-usage.asciidoc @@ -0,0 +1,93 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[max-bucket-aggregation-usage]] +== Max Bucket Aggregation Usage + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Size(0) +.Aggregations(a => a + .DateHistogram("projects_started_per_month", dh => dh + .Field(p => p.StartedOn) + .Interval(DateInterval.Month) + .Aggregations(aa => aa + .Sum("commits", sm => sm + .Field(p => p.NumberOfCommits) + ) + ) + ) + .MaxBucket("max_commits_per_month", aaa => aaa + .BucketsPath("projects_started_per_month>commits") + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest() +{ + Size = 0, + Aggregations = new DateHistogramAggregation("projects_started_per_month") + { + Field = "startedOn", + Interval = DateInterval.Month, + Aggregations = new SumAggregation("commits", "numberOfCommits") + } + && new MaxBucketAggregation("max_commits_per_month", "projects_started_per_month>commits") +} +---- + +[source,javascript] +.Example json output +---- +{ + "size": 0, + "aggs": { + "projects_started_per_month": { + "date_histogram": { + "field": "startedOn", + "interval": "month" + }, + "aggs": { + "commits": { + "sum": { + "field": "numberOfCommits" + } + } + } + }, + "max_commits_per_month": { + "max_bucket": { + "buckets_path": "projects_started_per_month>commits" + } + } + } +} +---- + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +var projectsPerMonth = response.Aggs.DateHistogram("projects_started_per_month"); +projectsPerMonth.Should().NotBeNull(); +projectsPerMonth.Buckets.Should().NotBeNull(); +projectsPerMonth.Buckets.Count.Should().BeGreaterThan(0); +var maxCommits = response.Aggs.MaxBucket("max_commits_per_month"); +maxCommits.Should().NotBeNull(); +maxCommits.Value.Should().BeGreaterThan(0); +maxCommits.Keys.Should().NotBeNull(); +maxCommits.Keys.Count.Should().BeGreaterOrEqualTo(1); +key.Should().NotBeNull(); +---- + diff --git a/docs/asciidoc/aggregations/pipeline/min-bucket/min-bucket-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/pipeline/min-bucket/min-bucket-aggregation-usage.asciidoc new file mode 100644 index 00000000000..ac8d91496e8 --- /dev/null +++ b/docs/asciidoc/aggregations/pipeline/min-bucket/min-bucket-aggregation-usage.asciidoc @@ -0,0 +1,93 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[min-bucket-aggregation-usage]] +== Min Bucket Aggregation Usage + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Size(0) +.Aggregations(a => a + .DateHistogram("projects_started_per_month", dh => dh + .Field(p => p.StartedOn) + .Interval(DateInterval.Month) + .Aggregations(aa => aa + .Sum("commits", sm => sm + .Field(p => p.NumberOfCommits) + ) + ) + ) + .MinBucket("min_commits_per_month", aaa => aaa + .BucketsPath("projects_started_per_month>commits") + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest() +{ + Size = 0, + Aggregations = new DateHistogramAggregation("projects_started_per_month") + { + Field = "startedOn", + Interval = DateInterval.Month, + Aggregations = new SumAggregation("commits", "numberOfCommits") + } + && new MinBucketAggregation("min_commits_per_month", "projects_started_per_month>commits") +} +---- + +[source,javascript] +.Example json output +---- +{ + "size": 0, + "aggs": { + "projects_started_per_month": { + "date_histogram": { + "field": "startedOn", + "interval": "month" + }, + "aggs": { + "commits": { + "sum": { + "field": "numberOfCommits" + } + } + } + }, + "min_commits_per_month": { + "min_bucket": { + "buckets_path": "projects_started_per_month>commits" + } + } + } +} +---- + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +var projectsPerMonth = response.Aggs.DateHistogram("projects_started_per_month"); +projectsPerMonth.Should().NotBeNull(); +projectsPerMonth.Buckets.Should().NotBeNull(); +projectsPerMonth.Buckets.Count.Should().BeGreaterThan(0); +var minCommits = response.Aggs.MinBucket("min_commits_per_month"); +minCommits.Should().NotBeNull(); +minCommits.Value.Should().BeGreaterThan(0); +minCommits.Keys.Should().NotBeNull(); +minCommits.Keys.Count.Should().BeGreaterOrEqualTo(1); +key.Should().NotBeNullOrEmpty(); +---- + diff --git a/docs/asciidoc/aggregations/pipeline/moving-average/moving-average-ewma-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/pipeline/moving-average/moving-average-ewma-aggregation-usage.asciidoc new file mode 100644 index 00000000000..74a76fe555a --- /dev/null +++ b/docs/asciidoc/aggregations/pipeline/moving-average/moving-average-ewma-aggregation-usage.asciidoc @@ -0,0 +1,106 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[moving-average-ewma-aggregation-usage]] +== Moving Average Ewma Aggregation Usage + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Size(0) +.Aggregations(a => a + .DateHistogram("projects_started_per_month", dh => dh + .Field(p => p.StartedOn) + .Interval(DateInterval.Month) + .Aggregations(aa => aa + .Sum("commits", sm => sm + .Field(p => p.NumberOfCommits) + ) + .MovingAverage("commits_moving_avg", mv => mv + .BucketsPath("commits") + .Model(m => m + .Ewma(e => e + .Alpha(0.3f) + ) + ) + ) + ) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest() +{ + Size = 0, + Aggregations = new DateHistogramAggregation("projects_started_per_month") + { + Field = "startedOn", + Interval = DateInterval.Month, + Aggregations = + new SumAggregation("commits", "numberOfCommits") && + new MovingAverageAggregation("commits_moving_avg", "commits") + { + Model = new EwmaModel + { + Alpha = 0.3f, + } + } + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "size": 0, + "aggs": { + "projects_started_per_month": { + "date_histogram": { + "field": "startedOn", + "interval": "month" + }, + "aggs": { + "commits": { + "sum": { + "field": "numberOfCommits" + } + }, + "commits_moving_avg": { + "moving_avg": { + "buckets_path": "commits", + "model": "ewma", + "settings": { + "alpha": 0.3 + } + } + } + } + } + } +} +---- + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +var projectsPerMonth = response.Aggs.DateHistogram("projects_started_per_month"); +projectsPerMonth.Should().NotBeNull(); +projectsPerMonth.Buckets.Should().NotBeNull(); +projectsPerMonth.Buckets.Count.Should().BeGreaterThan(0); +var movingAvg = item.MovingAverage("commits_moving_avg"); +movingAvg.Should().NotBeNull(); +movingAvg.Value.Should().BeGreaterThan(0); +---- + diff --git a/docs/asciidoc/aggregations/pipeline/moving-average/moving-average-holt-linear-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/pipeline/moving-average/moving-average-holt-linear-aggregation-usage.asciidoc new file mode 100644 index 00000000000..0d93a7c0880 --- /dev/null +++ b/docs/asciidoc/aggregations/pipeline/moving-average/moving-average-holt-linear-aggregation-usage.asciidoc @@ -0,0 +1,109 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[moving-average-holt-linear-aggregation-usage]] +== Moving Average Holt Linear Aggregation Usage + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Size(0) +.Aggregations(a => a + .DateHistogram("projects_started_per_month", dh => dh + .Field(p => p.StartedOn) + .Interval(DateInterval.Month) + .Aggregations(aa => aa + .Sum("commits", sm => sm + .Field(p => p.NumberOfCommits) + ) + .MovingAverage("commits_moving_avg", mv => mv + .BucketsPath("commits") + .Model(m => m + .HoltLinear(hl => hl + .Alpha(0.5f) + .Beta(0.5f) + ) + ) + ) + ) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest() +{ + Size = 0, + Aggregations = new DateHistogramAggregation("projects_started_per_month") + { + Field = "startedOn", + Interval = DateInterval.Month, + Aggregations = + new SumAggregation("commits", "numberOfCommits") && + new MovingAverageAggregation("commits_moving_avg", "commits") + { + Model = new HoltLinearModel + { + Alpha = 0.5f, + Beta = 0.5f, + } + } + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "size": 0, + "aggs": { + "projects_started_per_month": { + "date_histogram": { + "field": "startedOn", + "interval": "month" + }, + "aggs": { + "commits": { + "sum": { + "field": "numberOfCommits" + } + }, + "commits_moving_avg": { + "moving_avg": { + "buckets_path": "commits", + "model": "holt", + "settings": { + "alpha": 0.5, + "beta": 0.5 + } + } + } + } + } + } +} +---- + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +var projectsPerMonth = response.Aggs.DateHistogram("projects_started_per_month"); +projectsPerMonth.Should().NotBeNull(); +projectsPerMonth.Buckets.Should().NotBeNull(); +projectsPerMonth.Buckets.Count.Should().BeGreaterThan(0); +var movingAvg = item.MovingAverage("commits_moving_avg"); +movingAvg.Should().NotBeNull(); +movingAvg.Value.Should().BeGreaterThan(0); +---- + diff --git a/docs/asciidoc/aggregations/pipeline/moving-average/moving-average-holt-winters-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/pipeline/moving-average/moving-average-holt-winters-aggregation-usage.asciidoc new file mode 100644 index 00000000000..21eeddf84d0 --- /dev/null +++ b/docs/asciidoc/aggregations/pipeline/moving-average/moving-average-holt-winters-aggregation-usage.asciidoc @@ -0,0 +1,117 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[moving-average-holt-winters-aggregation-usage]] +== Moving Average Holt Winters Aggregation Usage + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Size(0) +.Aggregations(a => a + .DateHistogram("projects_started_per_month", dh => dh + .Field(p => p.StartedOn) + .Interval(DateInterval.Month) + .Aggregations(aa => aa + .Sum("commits", sm => sm + .Field(p => p.NumberOfCommits) + ) + .MovingAverage("commits_moving_avg", mv => mv + .BucketsPath("commits") + .Window(60) + .Model(m => m + .HoltWinters(hw => hw + .Type(HoltWintersType.Multiplicative) + .Alpha(0.5f) + .Beta(0.5f) + .Gamma(0.5f) + .Period(30) + .Pad(false) + ) + ) + ) + ) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest() +{ + Size = 0, + Aggregations = new DateHistogramAggregation("projects_started_per_month") + { + Field = "startedOn", + Interval = DateInterval.Month, + Aggregations = + new SumAggregation("commits", "numberOfCommits") && + new MovingAverageAggregation("commits_moving_avg", "commits") + { + Window = 60, + Model = new HoltWintersModel + { + Type = HoltWintersType.Multiplicative, + Alpha = 0.5f, + Beta = 0.5f, + Gamma = 0.5f, + Period = 30, + Pad = false + } + } + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "size": 0, + "aggs": { + "projects_started_per_month": { + "date_histogram": { + "field": "startedOn", + "interval": "month" + }, + "aggs": { + "commits": { + "sum": { + "field": "numberOfCommits" + } + }, + "commits_moving_avg": { + "moving_avg": { + "buckets_path": "commits", + "window": 60, + "model": "holt_winters", + "settings": { + "type": "mult", + "alpha": 0.5, + "beta": 0.5, + "gamma": 0.5, + "period": 30, + "pad": false + } + } + } + } + } + } +} +---- + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +---- + diff --git a/docs/asciidoc/aggregations/pipeline/moving-average/moving-average-linear-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/pipeline/moving-average/moving-average-linear-aggregation-usage.asciidoc new file mode 100644 index 00000000000..0e0a76591ff --- /dev/null +++ b/docs/asciidoc/aggregations/pipeline/moving-average/moving-average-linear-aggregation-usage.asciidoc @@ -0,0 +1,102 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[moving-average-linear-aggregation-usage]] +== Moving Average Linear Aggregation Usage + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Size(0) +.Aggregations(a => a + .DateHistogram("projects_started_per_month", dh => dh + .Field(p => p.StartedOn) + .Interval(DateInterval.Month) + .Aggregations(aa => aa + .Sum("commits", sm => sm + .Field(p => p.NumberOfCommits) + ) + .MovingAverage("commits_moving_avg", mv => mv + .BucketsPath("commits") + .GapPolicy(GapPolicy.InsertZeros) + .Model(m => m + .Linear() + ) + ) + ) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest() +{ + Size = 0, + Aggregations = new DateHistogramAggregation("projects_started_per_month") + { + Field = "startedOn", + Interval = DateInterval.Month, + Aggregations = + new SumAggregation("commits", "numberOfCommits") && + new MovingAverageAggregation("commits_moving_avg", "commits") + { + GapPolicy = GapPolicy.InsertZeros, + Model = new LinearModel() + } + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "size": 0, + "aggs": { + "projects_started_per_month": { + "date_histogram": { + "field": "startedOn", + "interval": "month" + }, + "aggs": { + "commits": { + "sum": { + "field": "numberOfCommits" + } + }, + "commits_moving_avg": { + "moving_avg": { + "buckets_path": "commits", + "gap_policy": "insert_zeros", + "model": "linear", + "settings": {} + } + } + } + } + } +} +---- + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +var projectsPerMonth = response.Aggs.DateHistogram("projects_started_per_month"); +projectsPerMonth.Should().NotBeNull(); +projectsPerMonth.Buckets.Should().NotBeNull(); +projectsPerMonth.Buckets.Count.Should().BeGreaterThan(0); +var movingAvg = item.MovingAverage("commits_moving_avg"); +movingAvg.Should().NotBeNull(); +movingAvg.Value.Should().BeGreaterThan(0); +---- + diff --git a/docs/asciidoc/aggregations/pipeline/moving-average/moving-average-simple-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/pipeline/moving-average/moving-average-simple-aggregation-usage.asciidoc new file mode 100644 index 00000000000..0081d2d43a7 --- /dev/null +++ b/docs/asciidoc/aggregations/pipeline/moving-average/moving-average-simple-aggregation-usage.asciidoc @@ -0,0 +1,105 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[moving-average-simple-aggregation-usage]] +== Moving Average Simple Aggregation Usage + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Size(0) +.Aggregations(a => a + .DateHistogram("projects_started_per_month", dh => dh + .Field(p => p.StartedOn) + .Interval(DateInterval.Month) + .Aggregations(aa => aa + .Sum("commits", sm => sm + .Field(p => p.NumberOfCommits) + ) + .MovingAverage("commits_moving_avg", mv => mv + .BucketsPath("commits") + .Window(30) + .Predict(10) + .Model(m => m + .Simple() + ) + ) + ) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest() +{ + Size = 0, + Aggregations = new DateHistogramAggregation("projects_started_per_month") + { + Field = "startedOn", + Interval = DateInterval.Month, + Aggregations = + new SumAggregation("commits", "numberOfCommits") && + new MovingAverageAggregation("commits_moving_avg", "commits") + { + Window = 30, + Predict = 10, + Model = new SimpleModel() + } + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "size": 0, + "aggs": { + "projects_started_per_month": { + "date_histogram": { + "field": "startedOn", + "interval": "month" + }, + "aggs": { + "commits": { + "sum": { + "field": "numberOfCommits" + } + }, + "commits_moving_avg": { + "moving_avg": { + "buckets_path": "commits", + "model": "simple", + "window": 30, + "predict": 10, + "settings": {} + } + } + } + } + } +} +---- + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +var projectsPerMonth = response.Aggs.DateHistogram("projects_started_per_month"); +projectsPerMonth.Should().NotBeNull(); +projectsPerMonth.Buckets.Should().NotBeNull(); +projectsPerMonth.Buckets.Count.Should().BeGreaterThan(0); +var movingAvg = item.Sum("commits_moving_avg"); +movingAvg.Should().NotBeNull(); +movingAvg.Value.Should().BeGreaterThan(0); +---- + diff --git a/docs/asciidoc/aggregations/pipeline/serial-differencing/serial-differencing-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/pipeline/serial-differencing/serial-differencing-aggregation-usage.asciidoc new file mode 100644 index 00000000000..9970836ed9f --- /dev/null +++ b/docs/asciidoc/aggregations/pipeline/serial-differencing/serial-differencing-aggregation-usage.asciidoc @@ -0,0 +1,96 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[serial-differencing-aggregation-usage]] +== Serial Differencing Aggregation Usage + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +var projectsPerMonth = response.Aggs.DateHistogram("projects_started_per_month"); +projectsPerMonth.Should().NotBeNull(); +projectsPerMonth.Buckets.Should().NotBeNull(); +projectsPerMonth.Buckets.Count.Should().BeGreaterThan(0); +var commits = item.Sum("commits"); +commits.Should().NotBeNull(); +commits.Value.Should().NotBe(null); +---- + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Size(0) +.Aggregations(a => a + .DateHistogram("projects_started_per_month", dh => dh + .Field(p => p.StartedOn) + .Interval(DateInterval.Month) + .Aggregations(aa => aa + .Sum("commits", sm => sm + .Field(p => p.NumberOfCommits) + ) + .SerialDifferencing("thirtieth_difference", d => d + .BucketsPath("commits") + .Lag(30) + ) + ) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Size = 0, + Aggregations = new DateHistogramAggregation("projects_started_per_month") + { + Field = "startedOn", + Interval = DateInterval.Month, + Aggregations = + new SumAggregation("commits", "numberOfCommits") && + new SerialDifferencingAggregation("thirtieth_difference", "commits") + { + Lag = 30 + } + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "size": 0, + "aggs": { + "projects_started_per_month": { + "date_histogram": { + "field": "startedOn", + "interval": "month" + }, + "aggs": { + "commits": { + "sum": { + "field": "numberOfCommits" + } + }, + "thirtieth_difference": { + "serial_diff": { + "buckets_path": "commits", + "lag": 30 + } + } + } + } + } +} +---- + diff --git a/docs/asciidoc/aggregations/pipeline/sum-bucket/sum-bucket-aggregation-usage.asciidoc b/docs/asciidoc/aggregations/pipeline/sum-bucket/sum-bucket-aggregation-usage.asciidoc new file mode 100644 index 00000000000..be50897a8cf --- /dev/null +++ b/docs/asciidoc/aggregations/pipeline/sum-bucket/sum-bucket-aggregation-usage.asciidoc @@ -0,0 +1,90 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[sum-bucket-aggregation-usage]] +== Sum Bucket Aggregation Usage + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Size(0) +.Aggregations(a => a + .DateHistogram("projects_started_per_month", dh => dh + .Field(p => p.StartedOn) + .Interval(DateInterval.Month) + .Aggregations(aa => aa + .Sum("commits", sm => sm + .Field(p => p.NumberOfCommits) + ) + ) + ) + .SumBucket("sum_of_commits", aaa => aaa + .BucketsPath("projects_started_per_month>commits") + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest() +{ + Size = 0, + Aggregations = new DateHistogramAggregation("projects_started_per_month") + { + Field = "startedOn", + Interval = DateInterval.Month, + Aggregations = new SumAggregation("commits", "numberOfCommits") + } + && new SumBucketAggregation("sum_of_commits", "projects_started_per_month>commits") +} +---- + +[source,javascript] +.Example json output +---- +{ + "size": 0, + "aggs": { + "projects_started_per_month": { + "date_histogram": { + "field": "startedOn", + "interval": "month" + }, + "aggs": { + "commits": { + "sum": { + "field": "numberOfCommits" + } + } + } + }, + "sum_of_commits": { + "sum_bucket": { + "buckets_path": "projects_started_per_month>commits" + } + } + } +} +---- + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +var projectsPerMonth = response.Aggs.DateHistogram("projects_started_per_month"); +projectsPerMonth.Should().NotBeNull(); +projectsPerMonth.Buckets.Should().NotBeNull(); +projectsPerMonth.Buckets.Count.Should().BeGreaterThan(0); +var commitsSum = response.Aggs.SumBucket("sum_of_commits"); +commitsSum.Should().NotBeNull(); +commitsSum.Value.Should().BeGreaterThan(0); +---- + diff --git a/docs/asciidoc/aggregations/writing-aggregations.asciidoc b/docs/asciidoc/aggregations/writing-aggregations.asciidoc new file mode 100644 index 00000000000..489f5ae7f32 --- /dev/null +++ b/docs/asciidoc/aggregations/writing-aggregations.asciidoc @@ -0,0 +1,156 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[writing-aggregations]] +== Writing Aggregations + +NEST allows you to write your aggregations using + +* a strict fluent DSL + +* a verbatim object initializer syntax that maps verbatim to the elasticsearch API + +* a more terse object initializer aggregation DSL + +Three different ways, yikes that's a lot to take in! Lets go over them one by one and explain when you might +want to use each. + +This is the json output for each example + +=== Fluent DSL + +The fluent lambda syntax is the most terse way to write aggregations. +It benefits from types that are carried over to sub aggregations + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Aggregations(aggs => aggs + .Children("name_of_child_agg", child => child + .Aggregations(childAggs => childAggs + .Average("average_per_child", avg => avg.Field(p => p.ConfidenceFactor)) + .Max("max_per_child", avg => avg.Field(p => p.ConfidenceFactor)) + ) + ) +) +---- + +=== Object Initializer syntax + +The object initializer syntax (OIS) is a one-to-one mapping with how aggregations +have to be represented in the Elasticsearch API. While it has the benefit of being a one-to-one +mapping, being dictionary based in C# means it can grow exponentially in complexity rather quickly. + +[source,csharp] +---- +new SearchRequest +{ + Aggregations = new ChildrenAggregation("name_of_child_agg", typeof(CommitActivity)) + { + Aggregations = + new AverageAggregation("average_per_child", "confidenceFactor") + && new MaxAggregation("max_per_child", "confidenceFactor") + } +} +---- + +=== Terse Object Initializer DSL + +For this reason the OIS syntax can be shortened dramatically by using `*Agg` related family, +These allow you to forego introducing intermediary Dictionaries to represent the aggregation DSL. +It also allows you to combine multiple aggregations using bitwise AND `&&`) operator. + +Compare the following example with the previous vanilla OIS syntax + +[source,csharp] +---- +new SearchRequest +{ + Aggregations = new ChildrenAggregation("name_of_child_agg", typeof(CommitActivity)) + { + Aggregations = + new AverageAggregation("average_per_child", Field(p => p.ConfidenceFactor)) + && new MaxAggregation("max_per_child", Field(p => p.ConfidenceFactor)) + } +} +---- + +=== Aggregating over a collection of aggregations + +An advanced scenario may involve an existing collection of aggregation functions that should be set as aggregations +on the request. Using LINQ's `.Aggregate()` method, each function can be applied to the aggregation descriptor +`childAggs` below) in turn, returning the descriptor after each function application. + +[source,csharp] +---- +var aggregations = new List, IAggregationContainer>> <1> +{ + a => a.Average("average_per_child", avg => avg.Field(p => p.ConfidenceFactor)), + a => a.Max("max_per_child", avg => avg.Field(p => p.ConfidenceFactor)) +}; +return s => s + .Aggregations(aggs => aggs + .Children("name_of_child_agg", child => child + .Aggregations(childAggs => + aggregations.Aggregate(childAggs, (acc, agg) => { agg(acc); return acc; }) <2> + ) + ) + ); +---- +<1> a list of aggregation functions to apply + +<2> Using LINQ's `Aggregate()` function to accumulate/apply all of the aggregation functions + +[[aggs-vs-aggregations]] +=== Aggs vs. Aggregations + +The response exposesboth `.Aggregations` and `.Aggs` properties for handling aggregations. Why two properties you ask? +Well, the former is a dictionary of aggregation names to `IAggregate` types, a common interface for +aggregation responses (termed __Aggregates__ in NEST), and the latter, is a convenience helper to get the right type +of aggregation response out of the dictionary based on a key name. + +This is better illustrated with an example + +Let's imagine we make the following request. + +[source,csharp] +---- +s => s + .Aggregations(aggs => aggs +.Children("name_of_child_agg", child => child + .Aggregations(childAggs => childAggs + .Average("average_per_child", avg => avg.Field(p => p.ConfidenceFactor)) + .Max("max_per_child", avg => avg.Field(p => p.ConfidenceFactor)) + ) +) + ) +---- + +=== Aggs usage + +Now, using `.Aggs`, we can easily get the `Children` aggregation response out and from that, +the `Average` and `Max` sub aggregations. + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); + +var childAggregation = response.Aggs.Children("name_of_child_agg"); + +var averagePerChild = childAggregation.Average("average_per_child"); + +averagePerChild.Should().NotBeNull(); <1> + +var maxPerChild = childAggregation.Max("max_per_child"); + +maxPerChild.Should().NotBeNull(); <2> +---- +<1> Do something with the average per child. Here we just assert it's not null + +<2> Do something with the max per child. Here we just assert it's not null + diff --git a/docs/asciidoc/analysis/analyzers/analyzer-usage.asciidoc b/docs/asciidoc/analysis/analyzers/analyzer-usage.asciidoc new file mode 100644 index 00000000000..223b62e0370 --- /dev/null +++ b/docs/asciidoc/analysis/analyzers/analyzer-usage.asciidoc @@ -0,0 +1,77 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[analyzer-usage]] +== Analyzer Usage + +=== Fluent DSL Example + +[source,csharp] +---- +FluentExample +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +InitializerExample +---- + +[source,javascript] +.Example json output +---- +{ + "analysis": { + "analyzer": { + "myCustom": { + "type": "custom", + "tokenizer": "ng", + "filter": [ + "myAscii", + "kstem" + ], + "char_filter": [ + "stripMe", + "patterned" + ] + }, + "myKeyword": { + "type": "keyword" + }, + "myPattern": { + "type": "pattern", + "pattern": "\\w" + }, + "mySimple": { + "type": "simple" + }, + "myLanguage": { + "type": "dutch" + }, + "mySnow": { + "type": "snowball", + "language": "Dutch" + }, + "myStandard": { + "type": "standard", + "max_token_length": 2 + }, + "myStop": { + "type": "stop", + "stopwords_path": "analysis/stopwords.txt" + }, + "myWhiteSpace": { + "type": "whitespace" + }, + "myWhiteSpace2": { + "type": "whitespace" + } + } + } +} +---- + diff --git a/docs/asciidoc/analysis/char-filters/char-filter-usage.asciidoc b/docs/asciidoc/analysis/char-filters/char-filter-usage.asciidoc new file mode 100644 index 00000000000..430437db32b --- /dev/null +++ b/docs/asciidoc/analysis/char-filters/char-filter-usage.asciidoc @@ -0,0 +1,48 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[char-filter-usage]] +== Char Filter Usage + +=== Fluent DSL Example + +[source,csharp] +---- +FluentExample +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +InitializerExample +---- + +[source,javascript] +.Example json output +---- +{ + "analysis": { + "char_filter": { + "stripMe": { + "type": "html_strip" + }, + "patterned": { + "pattern": "x", + "replacement": "y", + "type": "pattern_replace" + }, + "mapped": { + "mappings": [ + "a=>b" + ], + "type": "mapping" + } + } + } +} +---- + diff --git a/docs/asciidoc/analysis/token-filters/token-filter-usage.asciidoc b/docs/asciidoc/analysis/token-filters/token-filter-usage.asciidoc new file mode 100644 index 00000000000..9b8870f4a3b --- /dev/null +++ b/docs/asciidoc/analysis/token-filters/token-filter-usage.asciidoc @@ -0,0 +1,240 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[token-filter-usage]] +== Token Filter Usage + +=== Fluent DSL Example + +[source,csharp] +---- +FluentExample +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +InitializerExample +---- + +[source,javascript] +.Example json output +---- +{ + "analysis": { + "filter": { + "myAscii": { + "type": "asciifolding", + "preserveOriginal": true + }, + "myCommonGrams": { + "type": "common_grams", + "common_words": [ + "x", + "y", + "z" + ], + "ignore_case": true, + "query_mode": true + }, + "mydp": { + "type": "delimited_payload_filter", + "delimiter": "-", + "encoding": "identity" + }, + "dcc": { + "type": "dictionary_decompounder", + "word_list": [ + "x", + "y", + "z" + ], + "min_word_size": 2, + "min_subword_size": 2, + "max_subword_size": 2, + "only_longest_match": true + }, + "etf": { + "type": "edge_ngram", + "min_gram": 1, + "max_gram": 2 + }, + "elision": { + "type": "elision", + "articles": [ + "a", + "b", + "c" + ] + }, + "hunspell": { + "type": "hunspell", + "ignore_case": true, + "locale": "en_US", + "dictionary": "path_to_dict", + "dedup": true, + "longest_only": true + }, + "hypdecomp": { + "type": "hyphenation_decompounder", + "word_list": [ + "x", + "y", + "z" + ], + "min_word_size": 2, + "min_subword_size": 2, + "max_subword_size": 2, + "only_longest_match": true, + "hyphenation_patterns_path": "analysis/fop.xml" + }, + "keeptypes": { + "type": "keep_types", + "types": [ + "", + "" + ] + }, + "keepwords": { + "type": "keep", + "keep_words": [ + "a", + "b", + "c" + ], + "keep_words_case": true + }, + "marker": { + "type": "keyword_marker", + "keywords": [ + "a", + "b" + ], + "ignore_case": true + }, + "kstem": { + "type": "kstem" + }, + "length": { + "type": "length", + "min": 10, + "max": 200 + }, + "limit": { + "type": "limit", + "max_token_count": 12, + "consume_all_tokens": true + }, + "lc": { + "type": "lowercase" + }, + "ngram": { + "type": "ngram", + "min_gram": 3, + "max_gram": 30 + }, + "pc": { + "type": "pattern_capture", + "patterns": [ + "\\d", + "\\w" + ], + "preserve_original": true + }, + "pr": { + "type": "pattern_replace", + "pattern": "(\\d|\\w)", + "replacement": "replacement" + }, + "porter": { + "type": "porter_stem" + }, + "rev": { + "type": "reverse" + }, + "shing": { + "type": "shingle", + "min_shingle_size": 8, + "max_shingle_size": 12, + "output_unigrams": true, + "output_unigrams_if_no_shingles": true, + "token_separator": "|", + "filler_token": "x" + }, + "snow": { + "type": "snowball", + "language": "Dutch" + }, + "standard": { + "type": "standard" + }, + "stem": { + "type": "stemmer", + "language": "arabic" + }, + "stemo": { + "type": "stemmer_override", + "rules_path": "analysis/custom_stems.txt" + }, + "stop": { + "type": "stop", + "stopwords": [ + "x", + "y", + "z" + ], + "ignore_case": true, + "remove_trailing": true + }, + "syn": { + "type": "synonym", + "synonyms_path": "analysis/stopwords.txt", + "format": "wordnet", + "synonyms": [ + "x=>y", + "z=>s" + ], + "ignore_case": true, + "expand": true, + "tokenizer": "whitespace" + }, + "trimmer": { + "type": "trim" + }, + "truncer": { + "type": "truncate", + "length": 100 + }, + "uq": { + "type": "unique", + "only_on_same_position": true + }, + "upper": { + "type": "uppercase" + }, + "wd": { + "type": "word_delimiter", + "generate_word_parts": true, + "generate_number_parts": true, + "catenate_words": true, + "catenate_numbers": true, + "catenate_all": true, + "split_on_case_change": true, + "preserve_original": true, + "split_on_numerics": true, + "stem_english_possessive": true, + "protected_words": [ + "x", + "y", + "z" + ] + } + } + } +} +---- + diff --git a/docs/asciidoc/analysis/tokenizers/tokenizer-usage.asciidoc b/docs/asciidoc/analysis/tokenizers/tokenizer-usage.asciidoc new file mode 100644 index 00000000000..61048a3a42e --- /dev/null +++ b/docs/asciidoc/analysis/tokenizers/tokenizer-usage.asciidoc @@ -0,0 +1,76 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[tokenizer-usage]] +== Tokenizer Usage + +=== Fluent DSL Example + +[source,csharp] +---- +FluentExample +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +InitializerExample +---- + +[source,javascript] +.Example json output +---- +{ + "analysis": { + "tokenizer": { + "endgen": { + "min_gram": 1, + "max_gram": 2, + "token_chars": [ + "digit", + "letter" + ], + "type": "edge_ngram" + }, + "ng": { + "min_gram": 1, + "max_gram": 2, + "token_chars": [ + "digit", + "letter" + ], + "type": "ngram" + }, + "path": { + "delimiter": "|", + "replacement": "-", + "buffer_size": 2048, + "reverse": true, + "skip": 1, + "type": "path_hierarchy" + }, + "pattern": { + "pattern": "\\W+", + "flags": "CASE_INSENSITIVE", + "group": 1, + "type": "pattern" + }, + "standard": { + "type": "standard" + }, + "uax": { + "max_token_length": 12, + "type": "uax_url_email" + }, + "whitespace": { + "type": "whitespace" + } + } + } +} +---- + diff --git a/docs/asciidoc/ClientConcepts/LowLevel/class.png b/docs/asciidoc/class.png similarity index 100% rename from docs/asciidoc/ClientConcepts/LowLevel/class.png rename to docs/asciidoc/class.png diff --git a/docs/asciidoc/client-concepts.asciidoc b/docs/asciidoc/client-concepts.asciidoc new file mode 100644 index 00000000000..9c20575e2e1 --- /dev/null +++ b/docs/asciidoc/client-concepts.asciidoc @@ -0,0 +1,4 @@ +include::low-level.asciidoc[] + +include::high-level.asciidoc[] + diff --git a/docs/asciidoc/ClientConcepts/ConnectionPooling/BuildingBlocks/ConnectionPooling.Doc.asciidoc b/docs/asciidoc/client-concepts/connection-pooling/building-blocks/connection-pooling.asciidoc similarity index 66% rename from docs/asciidoc/ClientConcepts/ConnectionPooling/BuildingBlocks/ConnectionPooling.Doc.asciidoc rename to docs/asciidoc/client-concepts/connection-pooling/building-blocks/connection-pooling.asciidoc index 8d4ec9d1de7..20dd5810a89 100644 --- a/docs/asciidoc/ClientConcepts/ConnectionPooling/BuildingBlocks/ConnectionPooling.Doc.asciidoc +++ b/docs/asciidoc/client-concepts/connection-pooling/building-blocks/connection-pooling.asciidoc @@ -1,15 +1,29 @@ -= Connection Pooling +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[connection-pooling]] +== Connection Pooling + Connection pooling is the internal mechanism that takes care of registering what nodes there are in the cluster and which -we can use to issue client calls on. +NEST can use to issue client calls on. There are 3 types of connection pool + +* <> + +* <> + +* <> +[[single-node-connection-pool]] +=== SingleNodeConnectionPool -== SingleNodeConnectionPool The simplest of all connection pools, this takes a single `Uri` and uses that to connect to elasticsearch for all the calls It doesn't opt in to sniffing and pinging behavior, and will never mark nodes dead or alive. The one `Uri` it holds is always -ready to go. +ready to go. - -[source, csharp] +[source,csharp] ---- var uri = new Uri("http://localhost:9201"); var pool = new SingleNodeConnectionPool(uri); @@ -17,145 +31,153 @@ pool.Nodes.Should().HaveCount(1); var node = pool.Nodes.First(); node.Uri.Port.Should().Be(9201); ---- + This type of pool is hardwired to opt out of sniffing -[source, csharp] +[source,csharp] ---- pool.SupportsReseeding.Should().BeFalse(); ---- + and pinging -[source, csharp] +[source,csharp] ---- pool.SupportsPinging.Should().BeFalse(); ---- -When you use the low ceremony ElasticClient constructor that takes a single Uri, -We default to this SingleNodeConnectionPool -[source, csharp] +When you use the low ceremony `ElasticClient` constructor that takes a single `Uri`, +We default to using `SingleNodeConnectionPool` + +[source,csharp] ---- var client = new ElasticClient(uri); ----- -[source, csharp] ----- + client.ConnectionSettings.ConnectionPool.Should().BeOfType(); ---- + However we urge that you always pass your connection settings explicitly -[source, csharp] +[source,csharp] ---- client = new ElasticClient(new ConnectionSettings(uri)); ----- -[source, csharp] ----- + client.ConnectionSettings.ConnectionPool.Should().BeOfType(); ---- + or even better pass the connection pool explicitly -[source, csharp] +[source,csharp] ---- client = new ElasticClient(new ConnectionSettings(pool)); ----- -[source, csharp] ----- + client.ConnectionSettings.ConnectionPool.Should().BeOfType(); ---- -== StaticConnectionPool -The static connection pool is great if you have a known small sized cluster and do no want to enable -sniffing to find out the cluster topology. +[[static-connection-pool]] + +=== StaticConnectionPool -[source, csharp] +The static connection pool is great if you have a known small sized cluster and do no want to enable +sniffing to find out the cluster topology. + +[source,csharp] ---- var uris = Enumerable.Range(9200, 5).Select(p => new Uri("http://localhost:" + p)); ---- + a connection pool can be seeded using an enumerable of `Uri`s -[source, csharp] +[source,csharp] ---- var pool = new StaticConnectionPool(uris); ---- -Or using an enumerable of `Node` -[source, csharp] ----- -var nodes = uris.Select(u=>new Node(u)); ----- -[source, csharp] +Or using an enumerable of `Node`s + +[source,csharp] ---- +var nodes = uris.Select(u => new Node(u)); + pool = new StaticConnectionPool(nodes); ---- + This type of pool is hardwired to opt out of sniffing -[source, csharp] +[source,csharp] ---- pool.SupportsReseeding.Should().BeFalse(); ---- + but supports pinging when enabled -[source, csharp] +[source,csharp] ---- pool.SupportsPinging.Should().BeTrue(); ---- -To create a client using this static connection pool pass -the connection pool to the connectionsettings you pass to ElasticClient -[source, csharp] +To create a client using this static connection pool, pass +the connection pool to the `ConnectionSettings` you pass to `ElasticClient` + +[source,csharp] ---- var client = new ElasticClient(new ConnectionSettings(pool)); ----- -[source, csharp] ----- + client.ConnectionSettings.ConnectionPool.Should().BeOfType(); ---- -== SniffingConnectionPool -A subclass of StaticConnectionPool that allows itself to be reseeded at run time. -It comes with a very minor overhead of a `ReaderWriterLockSlim` to ensure thread safety. +[[sniffing-connection-pool]] + +=== SniffingConnectionPool -[source, csharp] +A subclass of `StaticConnectionPool` that allows itself to be reseeded at run time. +It comes with a very minor overhead of a `ReaderWriterLockSlim` to ensure thread safety. + +[source,csharp] ---- var uris = Enumerable.Range(9200, 5).Select(p => new Uri("http://localhost:" + p)); ---- + a connection pool can be seeded using an enumerable of `Uri` -[source, csharp] +[source,csharp] ---- var pool = new SniffingConnectionPool(uris); ---- -Or using an enumerable of `Node` -A major benefit here is you can include known node roles when seeding + +Or using an enumerable of `Node`s. +A major benefit here is you can include known node roles when seeding and NEST can use this information to favour sniffing on master eligible nodes first and take master only nodes out of rotation for issuing client calls on. -[source, csharp] +[source,csharp] ---- var nodes = uris.Select(u=>new Node(u)); ----- -[source, csharp] ----- + pool = new SniffingConnectionPool(nodes); ---- + This type of pool is hardwired to opt in to sniffing -[source, csharp] +[source,csharp] ---- pool.SupportsReseeding.Should().BeTrue(); ---- + and pinging -[source, csharp] +[source,csharp] ---- pool.SupportsPinging.Should().BeTrue(); ---- + To create a client using the sniffing connection pool pass -the connection pool to the connectionsettings you pass to ElasticClient +the connection pool to the `ConnectionSettings` you pass to `ElasticClient` -[source, csharp] +[source,csharp] ---- var client = new ElasticClient(new ConnectionSettings(pool)); ----- -[source, csharp] ----- + client.ConnectionSettings.ConnectionPool.Should().BeOfType(); ---- + diff --git a/docs/asciidoc/ClientConcepts/ConnectionPooling/BuildingBlocks/DateTimeProviders.Doc.asciidoc b/docs/asciidoc/client-concepts/connection-pooling/building-blocks/date-time-providers.asciidoc similarity index 53% rename from docs/asciidoc/ClientConcepts/ConnectionPooling/BuildingBlocks/DateTimeProviders.Doc.asciidoc rename to docs/asciidoc/client-concepts/connection-pooling/building-blocks/date-time-providers.asciidoc index 6865704b143..78a65cb7c73 100644 --- a/docs/asciidoc/ClientConcepts/ConnectionPooling/BuildingBlocks/DateTimeProviders.Doc.asciidoc +++ b/docs/asciidoc/client-concepts/connection-pooling/building-blocks/date-time-providers.asciidoc @@ -1,56 +1,63 @@ -= Date time providers - -Not typically something you'll have to pass to the client but all calls to `System.DateTime.UtcNow` -in the client have been abstracted by `IDateTimeProvider`. This allows us to unit test timeouts and clusterfailover -in run time not being bound to wall clock time. - - -[source, csharp] ----- -var dateTimeProvider = DateTimeProvider.Default; ----- -dates are always returned in UTC - -[source, csharp] ----- -dateTimeProvider.Now().Should().BeCloseTo(DateTime.UtcNow); ----- - -Another responsibility of this interface is to calculate the time a node has to be taken out of rotation -based on the number of attempts to revive it. For very advanced use cases, this might be something of interest -to provide a custom implementation for. - - -[source, csharp] ----- -var dateTimeProvider = DateTimeProvider.Default; ----- - -The default timeout calculation is: `min(timeout * 2 ^ (attempts * 0.5 -1), maxTimeout)` -The default values for `timeout` and `maxTimeout` are - -[source, csharp] ----- -var timeout = TimeSpan.FromMinutes(1); ----- -[source, csharp] ----- -var maxTimeout = TimeSpan.FromMinutes(30); ----- -Plotting these defaults looks as followed: -[[timeout]] -.Default formula, x-axis time in minutes, y-axis number of attempts to revive -image::timeoutplot.png[dead timeout] -The goal here is that whenever a node is resurrected and is found to still be offline, we send it -_back to the doghouse_ for an ever increasingly long period, until we hit a bounded maximum. - -[source, csharp] ----- -var timeouts = Enumerable.Range(0, 30) - .Select(attempt => dateTimeProvider.DeadTime(attempt, timeout, maxTimeout)) - .ToList(); ----- -[source, csharp] ----- -increasedTimeout.Should().BeWithin(maxTimeout); ----- +:section-number: 4.5 + +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[date-time-providers]] +== Date time providers + +Not typically something you'll have to pass to the client but all calls to `System.DateTime.UtcNow` +in the client have been abstracted by `IDateTimeProvider`. This allows us to unit test timeouts and cluster failover +without being bound to wall clock time as calculated by using `System.DateTime.UtcNow` directly. + +[source,csharp] +---- +var dateTimeProvider = DateTimeProvider.Default; +---- + +dates are always returned in UTC + +[source,csharp] +---- +dateTimeProvider.Now().Should().BeCloseTo(DateTime.UtcNow); +---- + +Another responsibility of this interface is to calculate the time a node has to be taken out of rotation +based on the number of attempts to revive it. For very advanced use cases, this might be something of interest +to provide a custom implementation for. + +[source,csharp] +---- +var dateTimeProvider = DateTimeProvider.Default; +---- + +The default timeout calculation is: `min(timeout * 2 ^ (attempts * 0.5 -1), maxTimeout)`, where the +default values for `timeout` and `maxTimeout` are + +[source,csharp] +---- +var timeout = TimeSpan.FromMinutes(1); + +var maxTimeout = TimeSpan.FromMinutes(30); +---- + +Plotting these defaults looks as followed: + +[[timeout]] +.Default formula, x-axis time in minutes, y-axis number of attempts to revive +image::timeoutplot.png[dead timeout] + +The goal here is that whenever a node is resurrected and is found to still be offline, we send it _back to the doghouse_ for an ever increasingly long period, until we hit a bounded maximum. + +[source,csharp] +---- +var timeouts = Enumerable.Range(0, 30) + .Select(attempt => dateTimeProvider.DeadTime(attempt, timeout, maxTimeout)) + .ToList(); + +increasedTimeout.Should().BeWithin(maxTimeout); +---- + diff --git a/docs/asciidoc/ClientConcepts/ConnectionPooling/BuildingBlocks/KeepingTrackOfNodes.Doc.asciidoc b/docs/asciidoc/client-concepts/connection-pooling/building-blocks/keeping-track-of-nodes.asciidoc similarity index 69% rename from docs/asciidoc/ClientConcepts/ConnectionPooling/BuildingBlocks/KeepingTrackOfNodes.Doc.asciidoc rename to docs/asciidoc/client-concepts/connection-pooling/building-blocks/keeping-track-of-nodes.asciidoc index ab9533441a7..2bbeb6c13a1 100644 --- a/docs/asciidoc/ClientConcepts/ConnectionPooling/BuildingBlocks/KeepingTrackOfNodes.Doc.asciidoc +++ b/docs/asciidoc/client-concepts/connection-pooling/building-blocks/keeping-track-of-nodes.asciidoc @@ -1,106 +1,139 @@ -= Keeping track of nodes +:section-number: 4.4 +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current +:github: https://github.com/elastic/elasticsearch-net -[source, csharp] +:nuget: https://www.nuget.org/packages + +[[keeping-track-of-nodes]] +== Keeping track of nodes + +=== Creating a Node + +A `Node` can be instantiated by passing it a `Uri` + +[source,csharp] ---- var node = new Node(new Uri("http://localhost:9200")); + node.Uri.Should().NotBeNull(); node.Uri.Port.Should().Be(9200); ---- + By default master eligible and holds data is presumed to be true * -[source, csharp] +[source,csharp] ---- node.MasterEligible.Should().BeTrue(); ----- -[source, csharp] ----- + node.HoldsData.Should().BeTrue(); ---- + Is resurrected is true on first usage, hints to the transport that a ping might be useful -[source, csharp] +[source,csharp] ---- node.IsResurrected.Should().BeTrue(); ---- -When instantiating your connection pool you could switch these to false to initialize the client to -a known cluster topology. +When instantiating your connection pool you could switch these to false to initialize the client to +a known cluster topology. -passing a node with a path should be preserved. Sometimes an elasticsearch node lives behind a proxy +=== Building a Node path -[source, csharp] +passing a node with a path should be preserved. +Sometimes an elasticsearch node lives behind a proxy + +[source,csharp] ---- var node = new Node(new Uri("http://test.example/elasticsearch")); ----- -[source, csharp] ----- + node.Uri.Port.Should().Be(80); + node.Uri.AbsolutePath.Should().Be("/elasticsearch/"); ---- + We force paths to end with a forward slash so that they can later be safely combined -[source, csharp] +[source,csharp] ---- var combinedPath = new Uri(node.Uri, "index/type/_search"); ----- -[source, csharp] ----- + combinedPath.AbsolutePath.Should().Be("/elasticsearch/index/type/_search"); ---- + which is exactly what the `CreatePath` method does on `Node` -[source, csharp] +[source,csharp] ---- combinedPath = node.CreatePath("index/type/_search"); + +combinedPath.AbsolutePath.Should().Be("/elasticsearch/index/type/_search"); ---- -[source, csharp] + +=== Marking Nodes + +[source,csharp] ---- -combinedPath.AbsolutePath.Should().Be("/elasticsearch/index/type/_search"); var node = new Node(new Uri("http://localhost:9200")); + node.FailedAttempts.Should().Be(0); + node.IsAlive.Should().BeTrue(); ---- every time a node is marked dead the number of attempts should increase and the passed datetime should be exposed. - -[source, csharp] +[source,csharp] ---- var deadUntil = DateTime.Now.AddMinutes(1); + node.MarkDead(deadUntil); + node.FailedAttempts.Should().Be(i + 1); + node.IsAlive.Should().BeFalse(); + node.DeadUntil.Should().Be(deadUntil); ---- -however when marking a node alive deaduntil should be reset and attempts reset to 0 -[source, csharp] +however when marking a node alive, the `DeadUntil` property should be reset and `FailedAttempts` reset to 0 + +[source,csharp] ---- node.MarkAlive(); ----- -[source, csharp] ----- + node.FailedAttempts.Should().Be(0); + node.DeadUntil.Should().Be(default(DateTime)); + node.IsAlive.Should().BeTrue(); ---- -Nodes are considered equal if they have the same endpoint no matter what other metadata is associated -[source, csharp] +=== Node Equality + +Nodes are considered equal if they have the same endpoint, no matter what other metadata is associated + +[source,csharp] ---- var node = new Node(new Uri("http://localhost:9200")) { MasterEligible = false }; ----- -[source, csharp] ----- + var nodeAsMaster = new Node(new Uri("http://localhost:9200")) { MasterEligible = true }; + (node == nodeAsMaster).Should().BeTrue(); + (node != nodeAsMaster).Should().BeFalse(); + var uri = new Uri("http://localhost:9200"); + (node == uri).Should().BeTrue(); + var differentUri = new Uri("http://localhost:9201"); + (node != differentUri).Should().BeTrue(); + node.Should().Be(nodeAsMaster); ---- + diff --git a/docs/asciidoc/ClientConcepts/ConnectionPooling/BuildingBlocks/RequestPipelines.doc.asciidoc b/docs/asciidoc/client-concepts/connection-pooling/building-blocks/request-pipelines.asciidoc similarity index 77% rename from docs/asciidoc/ClientConcepts/ConnectionPooling/BuildingBlocks/RequestPipelines.doc.asciidoc rename to docs/asciidoc/client-concepts/connection-pooling/building-blocks/request-pipelines.asciidoc index a8bcb9555d8..9f6a863b999 100644 --- a/docs/asciidoc/ClientConcepts/ConnectionPooling/BuildingBlocks/RequestPipelines.doc.asciidoc +++ b/docs/asciidoc/client-concepts/connection-pooling/building-blocks/request-pipelines.asciidoc @@ -1,169 +1,226 @@ -= Request pipeline -Every request is executed in the context of `RequestPipeline` when using the default `ITransport` implementation. +:section-number: 4.2 +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current +:github: https://github.com/elastic/elasticsearch-net -[source, csharp] +:nuget: https://www.nuget.org/packages + +[[request-pipeline]] +== Request Pipeline + +Every request is executed in the context of a `RequestPipeline` when using the +default `ITransport` implementation. + +[source,csharp] ---- var settings = TestClient.CreateSettings(); ---- -When calling Request(Async) on Transport the whole coordination of the request is deferred to a new instance in a `using` block. -[source, csharp] +When calling Request/RequestAsync on Transport the whole coordination of the request is deferred to a new instance in a `using` block. + +[source,csharp] ---- var pipeline = new RequestPipeline(settings, DateTimeProvider.Default, new MemoryStreamFactory(), new SearchRequestParameters()); ----- -[source, csharp] ----- + pipeline.GetType().Should().Implement(); ---- -However the transport does not instantiate RequestPipeline directly, it uses a pluggable `IRequestPipelineFactory` -[source, csharp] +However the transport does not instantiate `RequestPipeline` directly; it uses a pluggable `IRequestPipelineFactory` +to create it + +[source,csharp] ---- var requestPipelineFactory = new RequestPipelineFactory(); ----- -[source, csharp] ----- -var requestPipeline = requestPipelineFactory.Create(settings, DateTimeProvider.Default, new MemoryStreamFactory(), new SearchRequestParameters()); + +var requestPipeline = requestPipelineFactory.Create( + settings, + DateTimeProvider.Default, <1> + new MemoryStreamFactory(), + new SearchRequestParameters()); requestPipeline.Should().BeOfType(); requestPipeline.GetType().Should().Implement(); ---- -which can be passed to the transport when instantiating a client +<1> An <> + +you can pass your own `IRequestPipeline` implementation to the transport when instantiating a client +allowing you to have requests executed on your own custom request pipeline -[source, csharp] +[source,csharp] ---- var transport = new Transport(settings, requestPipelineFactory, DateTimeProvider.Default, new MemoryStreamFactory()); ---- -this allows you to have requests executed on your own custom request pipeline -[source, csharp] +[source,csharp] ---- var pool = setupPool(new[] { TestClient.CreateNode(), TestClient.CreateNode(9201) }); + var settings = new ConnectionSettings(pool, TestClient.CreateConnection()); + settings = settingsSelector?.Invoke(settings) ?? settings; +---- + +[source,csharp] +---- var singleNodePipeline = CreatePipeline(uris => new SingleNodeConnectionPool(uris.First())); + var staticPipeline = CreatePipeline(uris => new StaticConnectionPool(uris)); + var sniffingPipeline = CreatePipeline(uris => new SniffingConnectionPool(uris)); ---- + Here we have setup three pipelines using three different connection pools, lets see how they behave -[source, csharp] +[source,csharp] ---- singleNodePipeline.FirstPoolUsageNeedsSniffing.Should().BeFalse(); ----- -[source, csharp] ----- + staticPipeline.FirstPoolUsageNeedsSniffing.Should().BeFalse(); + sniffingPipeline.FirstPoolUsageNeedsSniffing.Should().BeTrue(); ---- + Only the cluster that supports reseeding will opt in to FirstPoolUsageNeedsSniffing() You can however disable this on ConnectionSettings -[source, csharp] +[source,csharp] ---- sniffingPipeline = CreatePipeline(uris => new SniffingConnectionPool(uris), s => s.SniffOnStartup(false)); + +sniffingPipeline.FirstPoolUsageNeedsSniffing.Should().BeFalse(); ---- -[source, csharp] + +[source,csharp] ---- -sniffingPipeline.FirstPoolUsageNeedsSniffing.Should().BeFalse(); var singleNodePipeline = CreatePipeline(uris => new SingleNodeConnectionPool(uris.First())); + var staticPipeline = CreatePipeline(uris => new StaticConnectionPool(uris)); + var sniffingPipeline = CreatePipeline(uris => new SniffingConnectionPool(uris)); + singleNodePipeline.SniffsOnConnectionFailure.Should().BeFalse(); + staticPipeline.SniffsOnConnectionFailure.Should().BeFalse(); + sniffingPipeline.SniffsOnConnectionFailure.Should().BeTrue(); ---- + Only the cluster that supports reseeding will opt in to SniffsOnConnectionFailure() You can however disable this on ConnectionSettings -[source, csharp] +[source,csharp] ---- sniffingPipeline = CreatePipeline(uris => new SniffingConnectionPool(uris), s => s.SniffOnConnectionFault(false)); + +sniffingPipeline.SniffsOnConnectionFailure.Should().BeFalse(); ---- -[source, csharp] + +[source,csharp] ---- -sniffingPipeline.SniffsOnConnectionFailure.Should().BeFalse(); var dateTime = new TestableDateTimeProvider(); + var singleNodePipeline = CreatePipeline(uris => new SingleNodeConnectionPool(uris.First(), dateTime), dateTimeProvider: dateTime); + var staticPipeline = CreatePipeline(uris => new StaticConnectionPool(uris, dateTimeProvider: dateTime), dateTimeProvider: dateTime); + var sniffingPipeline = CreatePipeline(uris => new SniffingConnectionPool(uris, dateTimeProvider: dateTime), dateTimeProvider: dateTime); + singleNodePipeline.SniffsOnStaleCluster.Should().BeFalse(); + staticPipeline.SniffsOnStaleCluster.Should().BeFalse(); + sniffingPipeline.SniffsOnStaleCluster.Should().BeTrue(); + singleNodePipeline.StaleClusterState.Should().BeFalse(); + staticPipeline.StaleClusterState.Should().BeFalse(); + sniffingPipeline.StaleClusterState.Should().BeFalse(); ---- + go one hour into the future -[source, csharp] +[source,csharp] ---- dateTime.ChangeTime(d => d.Add(TimeSpan.FromHours(2))); ---- + connection pools that do not support reseeding never go stale -[source, csharp] +[source,csharp] ---- singleNodePipeline.StaleClusterState.Should().BeFalse(); ----- -[source, csharp] ----- + staticPipeline.StaleClusterState.Should().BeFalse(); ---- + the sniffing connection pool supports reseeding so the pipeline will signal the state is out of date -[source, csharp] +[source,csharp] ---- sniffingPipeline.StaleClusterState.Should().BeTrue(); ---- -A request pipeline also checks whether the overall time across multiple retries exceeds the request timeout -See the maxretry documentation for more details, here we assert that our request pipeline exposes this propertly +A request pipeline also checks whether the overall time across multiple retries exceeds the request timeout. +See the <> for more details, here we assert that our request pipeline exposes this propertly -[source, csharp] +[source,csharp] ---- var dateTime = new TestableDateTimeProvider(); + var singleNodePipeline = CreatePipeline(uris => new SingleNodeConnectionPool(uris.First(), dateTime), dateTimeProvider: dateTime); + var staticPipeline = CreatePipeline(uris => new StaticConnectionPool(uris, dateTimeProvider: dateTime), dateTimeProvider: dateTime); + var sniffingPipeline = CreatePipeline(uris => new SniffingConnectionPool(uris, dateTimeProvider: dateTime), dateTimeProvider: dateTime); + singleNodePipeline.IsTakingTooLong.Should().BeFalse(); + staticPipeline.IsTakingTooLong.Should().BeFalse(); + sniffingPipeline.IsTakingTooLong.Should().BeFalse(); ---- + go one hour into the future -[source, csharp] +[source,csharp] ---- dateTime.ChangeTime(d => d.Add(TimeSpan.FromHours(2))); ---- + connection pools that do not support reseeding never go stale -[source, csharp] +[source,csharp] ---- singleNodePipeline.IsTakingTooLong.Should().BeTrue(); ----- -[source, csharp] ----- + staticPipeline.IsTakingTooLong.Should().BeTrue(); ---- + the sniffing connection pool supports reseeding so the pipeline will signal the state is out of date -[source, csharp] +[source,csharp] ---- sniffingPipeline.IsTakingTooLong.Should().BeTrue(); ---- + request pipeline exposes the DateTime it started, here we assert it started 2 hours in the past -[source, csharp] +[source,csharp] ---- (dateTime.Now() - singleNodePipeline.StartedOn).Should().BePositive().And.BeCloseTo(TimeSpan.FromHours(2)); ----- -[source, csharp] ----- + (dateTime.Now() - staticPipeline.StartedOn).Should().BePositive().And.BeCloseTo(TimeSpan.FromHours(2)); + (dateTime.Now() - sniffingPipeline.StartedOn).Should().BePositive().And.BeCloseTo(TimeSpan.FromHours(2)); +---- + +[source,csharp] +---- var dateTime = new TestableDateTimeProvider(); + var sniffingPipeline = CreatePipeline(uris => new SniffingConnectionPool(uris, dateTimeProvider: dateTime), dateTimeProvider: dateTime) as RequestPipeline; + sniffingPipeline.SniffPath.Should().Be("_nodes/_all/settings?flat_settings&timeout=2s"); ---- + diff --git a/docs/asciidoc/ClientConcepts/ConnectionPooling/BuildingBlocks/timeoutplot.png b/docs/asciidoc/client-concepts/connection-pooling/building-blocks/timeoutplot.png similarity index 100% rename from docs/asciidoc/ClientConcepts/ConnectionPooling/BuildingBlocks/timeoutplot.png rename to docs/asciidoc/client-concepts/connection-pooling/building-blocks/timeoutplot.png diff --git a/docs/asciidoc/client-concepts/connection-pooling/building-blocks/transports.asciidoc b/docs/asciidoc/client-concepts/connection-pooling/building-blocks/transports.asciidoc new file mode 100644 index 00000000000..97e299baa27 --- /dev/null +++ b/docs/asciidoc/client-concepts/connection-pooling/building-blocks/transports.asciidoc @@ -0,0 +1,51 @@ +:section-number: 4.3 + +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[transports]] +== Transports + +The `ITransport` interface can be seen as the motor block of the client. It's interface is deceitfully simple and +it's ultimately responsible from translating a client call to a response. + +If for some reason you do not agree with the way we wrote the internals of the client, +by implementing a custom `ITransport`, you can circumvent all of it and introduce your own. + +Transport is generically typed to a type that implements `IConnectionConfigurationValues` +This is the minimum `ITransport` needs to report back for the client to function. + +e.g in the low level client, Elasticsearch.Net, transport is instantiated like this: + +[source,csharp] +---- +var lowLevelTransport = new Transport(new ConnectionConfiguration()); +---- + +and in the high level client, NEST, like this: + +[source,csharp] +---- +var highlevelTransport = new Transport(new ConnectionSettings()); + +var connectionPool = new SingleNodeConnectionPool(new Uri("http://localhost:9200")); +var inMemoryTransport = new Transport(new ConnectionSettings(connectionPool, new InMemoryConnection())); +---- + +The only two methods on `ITransport` are `Request()` and `RequestAsync()`, the default `ITransport` implementation is responsible for introducing +many of the building blocks in the client, if these do not work for you can swap them out for your own custom `ITransport` implementation. +If you feel this need, {github}/issues[please let us know] as we'd love to learn why you've go down this route! + +[source,csharp] +---- +var response = inMemoryTransport.Request>(HttpMethod.GET, "/_search", new { query = new { match_all = new { } } }); + +response = await inMemoryTransport.RequestAsync>( + HttpMethod.GET, + "/_search", + new { query = new { match_all = new { } } }); +---- + diff --git a/docs/asciidoc/client-concepts/connection-pooling/exceptions/unexpected-exceptions.asciidoc b/docs/asciidoc/client-concepts/connection-pooling/exceptions/unexpected-exceptions.asciidoc new file mode 100644 index 00000000000..a49b874df69 --- /dev/null +++ b/docs/asciidoc/client-concepts/connection-pooling/exceptions/unexpected-exceptions.asciidoc @@ -0,0 +1,125 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[unexpected-exceptions]] +== Unexpected exceptions + +When a client call throws an exception that the IConnction can not handle, this exception will bubble +out the client as an UnexpectedElasticsearchClientException, regardless whether the client is configured to throw or not. +An IConnection is in charge of knowning what exceptions it can recover from or not. The default IConnection that is based on WebRequest can and +will recover from WebExceptions but others will be grounds for immediately exiting the pipeline. + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(10) + .ClientCalls(r => r.SucceedAlways()) + .ClientCalls(r => r.OnPort(9201).FailAlways(new Exception("boom!"))) + .StaticConnectionPool() + .Settings(s => s.DisablePing()) +); +audit = await audit.TraceCall( + new ClientCall { + { AuditEvent.HealthyResponse, 9200 }, + } +); +audit = await audit.TraceUnexpectedException( + new ClientCall { + { AuditEvent.BadResponse, 9201 }, + }, + (e) => + { + e.FailureReason.Should().Be(PipelineFailure.Unexpected); + e.InnerException.Should().NotBeNull(); + e.InnerException.Message.Should().Be("boom!"); + } +); +e.FailureReason.Should().Be(PipelineFailure.Unexpected); +e.InnerException.Should().NotBeNull(); +e.InnerException.Message.Should().Be("boom!"); +---- + +Sometimes an unexpected exception happens further down in the pipeline, this is why we +wrap them inside an UnexpectedElasticsearchClientException so that information about where +in the pipeline the unexpected exception is not lost, here a call to 9200 fails using a webexception. +It then falls over to 9201 which throws an hard exception from within IConnection. We assert that we +can still see the audit trail for the whole coordinated request. + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(10) +#if DOTNETCORE + .ClientCalls(r => r.OnPort(9200).FailAlways(new System.Net.Http.HttpRequestException("recover"))) +#else + .ClientCalls(r => r.OnPort(9200).FailAlways(new WebException("recover"))) +#endif + .ClientCalls(r => r.OnPort(9201).FailAlways(new Exception("boom!"))) + .StaticConnectionPool() + .Settings(s => s.DisablePing()) +); + +audit = await audit.TraceUnexpectedException( + new ClientCall { + { AuditEvent.BadResponse, 9200 }, + { AuditEvent.BadResponse, 9201 }, + }, + (e) => + { + e.FailureReason.Should().Be(PipelineFailure.Unexpected); + e.InnerException.Should().NotBeNull(); + e.InnerException.Message.Should().Be("boom!"); + } +); + +e.FailureReason.Should().Be(PipelineFailure.Unexpected); + +e.InnerException.Should().NotBeNull(); + +e.InnerException.Message.Should().Be("boom!"); +---- + +An unexpected hard exception on ping and sniff is something we *do* try to revover from and failover. +Here pinging nodes on first use is enabled and 9200 throws on ping, we still fallover to 9201's ping succeeds. +However the client call on 9201 throws a hard exception we can not recover from + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(10) + .Ping(r => r.OnPort(9200).FailAlways(new Exception("ping exception"))) + .Ping(r => r.OnPort(9201).SucceedAlways()) + .ClientCalls(r => r.OnPort(9201).FailAlways(new Exception("boom!"))) + .StaticConnectionPool() + .AllDefaults() +); +---- + +[source,csharp] +---- +audit = await audit.TraceUnexpectedException( + new ClientCall { + { AuditEvent.PingFailure, 9200 }, + { AuditEvent.PingSuccess, 9201 }, + { AuditEvent.BadResponse, 9201 }, + }, + (e) => + { + e.FailureReason.Should().Be(PipelineFailure.Unexpected); +e.InnerException.Should().NotBeNull(); + e.InnerException.Message.Should().Be("boom!"); +e.SeenExceptions.Should().NotBeEmpty(); + var pipelineException = e.SeenExceptions.First(); + pipelineException.FailureReason.Should().Be(PipelineFailure.PingFailure); + pipelineException.InnerException.Message.Should().Be("ping exception"); +var pingException = e.AuditTrail.First(a => a.Event == AuditEvent.PingFailure).Exception; + pingException.Should().NotBeNull(); + pingException.Message.Should().Be("ping exception"); + + } +); +---- + diff --git a/docs/asciidoc/client-concepts/connection-pooling/exceptions/unrecoverable-exceptions.asciidoc b/docs/asciidoc/client-concepts/connection-pooling/exceptions/unrecoverable-exceptions.asciidoc new file mode 100644 index 00000000000..3a1f5c3a497 --- /dev/null +++ b/docs/asciidoc/client-concepts/connection-pooling/exceptions/unrecoverable-exceptions.asciidoc @@ -0,0 +1,188 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[unrecoverable-exceptions]] +== Unrecoverable exceptions + +Unrecoverable exceptions are _excepted_ exceptions that are grounds to exit the client pipeline immediately. +By default, the client won't throw on any `ElasticsearchClientException` but instead return an invalid response which +can be detected by checking `.IsValid` on the response +You can configure the client to throw using `ThrowExceptions()` on `ConnectionSettings`. The following test +both a client that throws and one that returns an invalid response with an `.OriginalException` exposed + +The following are recoverable exceptions + +[source,csharp] +---- +var recoverablExceptions = new[] +{ + new PipelineException(PipelineFailure.BadResponse), + new PipelineException(PipelineFailure.PingFailure), +}; + +recoverablExceptions.Should().OnlyContain(e => e.Recoverable); +---- + +and the unrecoverable exceptions + +[source,csharp] +---- +var unrecoverableExceptions = new[] +{ + new PipelineException(PipelineFailure.CouldNotStartSniffOnStartup), + new PipelineException(PipelineFailure.SniffFailure), + new PipelineException(PipelineFailure.Unexpected), + new PipelineException(PipelineFailure.BadAuthentication), + new PipelineException(PipelineFailure.MaxRetriesReached), + new PipelineException(PipelineFailure.MaxTimeoutReached) +}; + +unrecoverableExceptions.Should().OnlyContain(e => !e.Recoverable); +---- + +As an example, let's set up a 10 node cluster that will always succeed when pinged but + will fail with a 401 response when making client calls + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(10) + .Ping(r => r.SucceedAlways()) + .ClientCalls(r => r.FailAlways(401)) + .StaticConnectionPool() + .AllDefaults() +); +---- + +Here we make a client call and determine that the first audit event was a successful ping, +followed by a bad response as a result of a bad authentication response + +[source,csharp] +---- +audit = await audit.TraceElasticsearchException( + new ClientCall { + { AuditEvent.PingSuccess, 9200 }, + { AuditEvent.BadResponse, 9200 }, + }, + (e) => + { + e.FailureReason.Should().Be(PipelineFailure.BadAuthentication); + } +); +---- + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(10) + .Ping(r => r.SucceedAlways()) + .ClientCalls(r => r.FailAlways(401).ReturnResponse(ResponseHtml)) + .StaticConnectionPool() + .AllDefaults() +); + +audit = await audit.TraceElasticsearchException( + new ClientCall { + { AuditEvent.PingSuccess, 9200 }, + { AuditEvent.BadResponse, 9200 }, + }, + (e) => + { + e.FailureReason.Should().Be(PipelineFailure.BadAuthentication); + e.Response.HttpStatusCode.Should().Be(401); + e.Response.ResponseBodyInBytes.Should().BeNull(); + } +); + +e.FailureReason.Should().Be(PipelineFailure.BadAuthentication); + +e.Response.HttpStatusCode.Should().Be(401); + +e.Response.ResponseBodyInBytes.Should().BeNull(); +---- + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(10) + .Ping(r => r.SucceedAlways()) + .ClientCalls(r => r.FailAlways(401).ReturnResponse(ResponseHtml)) + .StaticConnectionPool() + .Settings(s=>s.DisableDirectStreaming()) +); + +audit = await audit.TraceElasticsearchException( + new ClientCall { + { AuditEvent.PingSuccess, 9200 }, + { AuditEvent.BadResponse, 9200 }, + }, + (e) => + { + e.FailureReason.Should().Be(PipelineFailure.BadAuthentication); + e.Response.HttpStatusCode.Should().Be(401); + e.Response.ResponseBodyInBytes.Should().NotBeNull(); + var responseString = Encoding.UTF8.GetString(e.Response.ResponseBodyInBytes); + responseString.Should().Contain("nginx/"); + e.DebugInformation.Should().Contain("nginx/"); + } +); + +e.FailureReason.Should().Be(PipelineFailure.BadAuthentication); + +e.Response.HttpStatusCode.Should().Be(401); + +e.Response.ResponseBodyInBytes.Should().NotBeNull(); + +var responseString = Encoding.UTF8.GetString(e.Response.ResponseBodyInBytes); + +responseString.Should().Contain("nginx/"); + +e.DebugInformation.Should().Contain("nginx/"); +---- + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(10) + .Ping(r => r.SucceedAlways()) + .ClientCalls(r => r.FailAlways(401).ReturnResponse(ResponseHtml)) + .StaticConnectionPool() + .Settings(s=>s.DisableDirectStreaming().DefaultIndex("default-index")) + .ClientProxiesTo( + (c, r) => c.Get("1", s=>s.RequestConfiguration(r)), + async (c, r) => await c.GetAsync("1", s=>s.RequestConfiguration(r)) as IResponse + ) +); + +audit = await audit.TraceElasticsearchException( + new ClientCall { + { AuditEvent.PingSuccess, 9200 }, + { AuditEvent.BadResponse, 9200 }, + }, + (e) => + { + e.FailureReason.Should().Be(PipelineFailure.BadAuthentication); + e.Response.HttpStatusCode.Should().Be(401); + e.Response.ResponseBodyInBytes.Should().NotBeNull(); + var responseString = Encoding.UTF8.GetString(e.Response.ResponseBodyInBytes); + responseString.Should().Contain("nginx/"); + e.DebugInformation.Should().Contain("nginx/"); + } +); + +e.FailureReason.Should().Be(PipelineFailure.BadAuthentication); + +e.Response.HttpStatusCode.Should().Be(401); + +e.Response.ResponseBodyInBytes.Should().NotBeNull(); + +var responseString = Encoding.UTF8.GetString(e.Response.ResponseBodyInBytes); + +responseString.Should().Contain("nginx/"); + +e.DebugInformation.Should().Contain("nginx/"); +---- + diff --git a/docs/asciidoc/client-concepts/connection-pooling/failover/falling-over.asciidoc b/docs/asciidoc/client-concepts/connection-pooling/failover/falling-over.asciidoc new file mode 100644 index 00000000000..981bec1cebc --- /dev/null +++ b/docs/asciidoc/client-concepts/connection-pooling/failover/falling-over.asciidoc @@ -0,0 +1,92 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[falling-over]] +== Fail over + +When using connection pooling and the pool has sufficient nodes a request will be retried if +the call to a node throws an exception or returns a 502 or 503 + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(10) + .ClientCalls(r => r.FailAlways()) + .ClientCalls(r => r.OnPort(9201).SucceedAlways()) + .StaticConnectionPool() + .Settings(s => s.DisablePing()) +); +audit = await audit.TraceCall( + new ClientCall { + { BadResponse, 9200 }, + { HealthyResponse, 9201 }, + } +); +---- + +=== 502 Bad Gateway + +Will be treated as an error that requires retrying + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(10) + .ClientCalls(r => r.FailAlways(502)) + .ClientCalls(r => r.OnPort(9201).SucceedAlways()) + .StaticConnectionPool() + .Settings(s => s.DisablePing()) +); + +audit = await audit.TraceCall( + new ClientCall { + { BadResponse, 9200 }, + { HealthyResponse, 9201 }, + } +); +---- + +=== 503 Service Unavailable + +Will be treated as an error that requires retrying + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(10) + .ClientCalls(r => r.FailAlways(503)) + .ClientCalls(r => r.OnPort(9201).SucceedAlways()) + .StaticConnectionPool() + .Settings(s => s.DisablePing()) +); + +audit = await audit.TraceCall( + new ClientCall { + { BadResponse, 9200 }, + { HealthyResponse, 9201 }, + } +); +---- + +If a call returns a valid _for the request*_) http status code other then 502/503. the request won't be retried. + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(10) + .ClientCalls(r => r.FailAlways(418)) + .ClientCalls(r => r.OnPort(9201).SucceedAlways()) + .StaticConnectionPool() + .Settings(s => s.DisablePing()) +); + +audit = await audit.TraceCall( + new ClientCall { + { BadResponse, 9200 }, + } +); +---- + diff --git a/docs/asciidoc/client-concepts/connection-pooling/max-retries/respects-max-retry.asciidoc b/docs/asciidoc/client-concepts/connection-pooling/max-retries/respects-max-retry.asciidoc new file mode 100644 index 00000000000..a932cc42591 --- /dev/null +++ b/docs/asciidoc/client-concepts/connection-pooling/max-retries/respects-max-retry.asciidoc @@ -0,0 +1,158 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[max-retries]] +== Max Retries + +By default, NEST will retry as many times as there are nodes in the cluster that the client knows about. +Retries still respects the request timeout however, +meaning if you have a 100 node cluster and a request timeout of 20 seconds, +the client will retry as many times as it before giving up at the request timeout of 20 seconds. + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(10) + .ClientCalls(r => r.FailAlways()) + .ClientCalls(r => r.OnPort(9209).SucceedAlways()) + .StaticConnectionPool() + .Settings(s => s.DisablePing()) +); +audit = await audit.TraceCall( + new ClientCall { + { BadResponse, 9200 }, + { BadResponse, 9201 }, + { BadResponse, 9202 }, + { BadResponse, 9203 }, + { BadResponse, 9204 }, + { BadResponse, 9205 }, + { BadResponse, 9206 }, + { BadResponse, 9207 }, + { BadResponse, 9208 }, + { HealthyResponse, 9209 } + } +); +---- + +When you have a 100 node cluster, you might want to ensure a fixed number of retries. + +IMPORTANT: the actual number of requests is **initial attempt + set number of retries** + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(10) + .ClientCalls(r => r.FailAlways()) + .ClientCalls(r => r.OnPort(9209).SucceedAlways()) + .StaticConnectionPool() + .Settings(s => s.DisablePing().MaximumRetries(3)) +); + +audit = await audit.TraceCall( + new ClientCall { + { BadResponse, 9200 }, + { BadResponse, 9201 }, + { BadResponse, 9202 }, + { BadResponse, 9203 }, + { MaxRetriesReached } + } +); +---- + +In our previous test we simulated very fast failures, but in the real world a call might take upwards of a second. +In this next example, we simulate a particular heavy search that takes 10 seconds to fail, and set a request timeout of 20 seconds. +We see that the request is tried twice and gives up before a third call is attempted, since the call takes 10 seconds and thus can be +tried twice (initial call and one retry) before the request timeout. + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(10) + .ClientCalls(r => r.FailAlways().Takes(TimeSpan.FromSeconds(10))) + .ClientCalls(r => r.OnPort(9209).SucceedAlways()) + .StaticConnectionPool() + .Settings(s => s.DisablePing().RequestTimeout(TimeSpan.FromSeconds(20))) +); + +audit = await audit.TraceCall( + new ClientCall { + { BadResponse, 9200 }, + { BadResponse, 9201 }, + { MaxTimeoutReached } + } +); +---- + +If you set a smaller request timeout you might not want it to also affect the retry timeout. +In cases like this, you can configure the `MaxRetryTimeout` separately. +Here we simulate calls taking 3 seconds, a request timeout of 2 seconds and a max retry timeout of 10 seconds. +We should see 5 attempts to perform this query, testing that our request timeout cuts the query off short and that +our max retry timeout of 10 seconds wins over the configured request timeout + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(10) + .ClientCalls(r => r.FailAlways().Takes(TimeSpan.FromSeconds(3))) + .ClientCalls(r => r.OnPort(9209).FailAlways()) + .StaticConnectionPool() + .Settings(s => s.DisablePing().RequestTimeout(TimeSpan.FromSeconds(2)).MaxRetryTimeout(TimeSpan.FromSeconds(10))) +); + +audit = await audit.TraceCall( + new ClientCall { + { BadResponse, 9200 }, + { BadResponse, 9201 }, + { BadResponse, 9202 }, + { BadResponse, 9203 }, + { BadResponse, 9204 }, + { MaxTimeoutReached } + } +); +---- + +If your retry policy expands beyond the number of available nodes, the client **won't** retry the same node twice + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(2) + .ClientCalls(r => r.FailAlways().Takes(TimeSpan.FromSeconds(3))) + .ClientCalls(r => r.OnPort(9209).SucceedAlways()) + .StaticConnectionPool() + .Settings(s => s.DisablePing().RequestTimeout(TimeSpan.FromSeconds(2)).MaxRetryTimeout(TimeSpan.FromSeconds(10))) +); + +audit = await audit.TraceCall( + new ClientCall { + { BadResponse, 9200 }, + { BadResponse, 9201 }, + { MaxRetriesReached } + } +); +---- + +This makes setting any retry setting on a single node connection pool a no-op by design! +Connection pooling and failover is all about trying to fail sanely whilst still utilizing the available resources and +not giving up on the fail fast principle; **It is NOT a mechanism for forcing requests to succeed.** + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(10) + .ClientCalls(r => r.FailAlways().Takes(TimeSpan.FromSeconds(3))) + .ClientCalls(r => r.OnPort(9209).SucceedAlways()) + .SingleNodeConnection() + .Settings(s => s.DisablePing().MaximumRetries(10)) +); + +audit = await audit.TraceCall( + new ClientCall { + { BadResponse, 9200 } + } +); +---- + diff --git a/docs/asciidoc/client-concepts/connection-pooling/pinging/first-usage.asciidoc b/docs/asciidoc/client-concepts/connection-pooling/pinging/first-usage.asciidoc new file mode 100644 index 00000000000..c7e6c2b5185 --- /dev/null +++ b/docs/asciidoc/client-concepts/connection-pooling/pinging/first-usage.asciidoc @@ -0,0 +1,121 @@ +:section-number: 5.1 + +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[pinging---first-usage]] +== Pinging - First Usage + +Pinging is enabled by default for the <> and <> connection pools. +This means that the first time a node is used or resurrected, a ping is issued a with a small (configurable) timeout, +allowing the client to fail and fallover to a healthy node much faster than attempting a request that may be heavier than a ping. + +Here's an example with a cluster with 2 nodes where the second node fails on ping + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(2) + .Ping(p => p.Succeeds(Always)) + .Ping(p => p.OnPort(9201).FailAlways()) + .StaticConnectionPool() + .AllDefaults() +); +---- + +When making the calls, the first call goes to 9200 which succeeds, +and the 2nd call does a ping on 9201 because it's used for the first time. +The ping fails so we wrap over to node 9200 which we've already pinged. + +Finally we assert that the connectionpool has one node that is marked as dead + +[source,csharp] +---- +await audit.TraceCalls( + + new ClientCall { + { PingSuccess, 9200}, + { HealthyResponse, 9200}, + { pool => + { + pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(0); + } } + }, + new ClientCall { + { PingFailure, 9201}, + { HealthyResponse, 9200}, + { pool => pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(1) } + } +); +---- + +A cluster with 4 nodes where the second and third pings fail + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(4) + .Ping(p => p.SucceedAlways()) + .Ping(p => p.OnPort(9201).FailAlways()) + .Ping(p => p.OnPort(9202).FailAlways()) + .StaticConnectionPool() + .AllDefaults() +); +---- + +The first call goes to 9200 which succeeds + +The 2nd call does a ping on 9201 because its used for the first time. +It fails and so we ping 9202 which also fails. We then ping 9203 becuase +we haven't used it before and it succeeds + +Finally we assert that the connectionpool has two nodes that are marked as dead + +[source,csharp] +---- +await audit.TraceCalls( +new ClientCall { + { PingSuccess, 9200}, + { HealthyResponse, 9200}, + { pool => + { + pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(0); + } } + }, +new ClientCall { + { PingFailure, 9201}, + { PingFailure, 9202}, + { PingSuccess, 9203}, + { HealthyResponse, 9203}, +{ pool => pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(2) } + } +); +---- + +A healthy cluster of 4 (min master nodes of 3 of course!) + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(4) + .Ping(p => p.SucceedAlways()) + .StaticConnectionPool() + .AllDefaults() +); + +await audit.TraceCalls( + new ClientCall { { PingSuccess, 9200}, { HealthyResponse, 9200} }, + new ClientCall { { PingSuccess, 9201}, { HealthyResponse, 9201} }, + new ClientCall { { PingSuccess, 9202}, { HealthyResponse, 9202} }, + new ClientCall { { PingSuccess, 9203}, { HealthyResponse, 9203} }, + new ClientCall { { HealthyResponse, 9200} }, + new ClientCall { { HealthyResponse, 9201} }, + new ClientCall { { HealthyResponse, 9202} }, + new ClientCall { { HealthyResponse, 9203} }, + new ClientCall { { HealthyResponse, 9200} } +); +---- + diff --git a/docs/asciidoc/client-concepts/connection-pooling/pinging/revival.asciidoc b/docs/asciidoc/client-concepts/connection-pooling/pinging/revival.asciidoc new file mode 100644 index 00000000000..719b0640b90 --- /dev/null +++ b/docs/asciidoc/client-concepts/connection-pooling/pinging/revival.asciidoc @@ -0,0 +1,57 @@ +:section-number: 5.2 + +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[pinging---revival]] +== Pinging - Revival + +When a node is marked dead it will only be put in the dog house for a certain amount of time. Once it comes out of the dog house, or revived, we schedule a ping +before the actual call to make sure its up and running. If its still down we put it back in the dog house a little longer. For an explanation on these timeouts see: TODO LINK + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(3) + .ClientCalls(r => r.SucceedAlways()) + .ClientCalls(r => r.OnPort(9202).Fails(Once)) + .Ping(p => p.SucceedAlways()) + .StaticConnectionPool() + .AllDefaults() +); +audit = await audit.TraceCalls( + new ClientCall { { PingSuccess, 9200 }, { HealthyResponse, 9200 } }, + new ClientCall { { PingSuccess, 9201 }, { HealthyResponse, 9201 } }, + new ClientCall { + { PingSuccess, 9202}, + { BadResponse, 9202}, + { HealthyResponse, 9200}, + { pool => pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(1) } + }, + new ClientCall { { HealthyResponse, 9201 } }, + new ClientCall { { HealthyResponse, 9200 } }, + new ClientCall { { HealthyResponse, 9201 } }, + new ClientCall { + { HealthyResponse, 9200 }, + { pool => pool.Nodes.First(n=>!n.IsAlive).DeadUntil.Should().BeAfter(DateTime.UtcNow) } + } +); +audit = await audit.TraceCalls( + new ClientCall { { HealthyResponse, 9201 } }, + new ClientCall { { HealthyResponse, 9200 } }, + new ClientCall { { HealthyResponse, 9201 } } +); +audit.ChangeTime(d => d.AddMinutes(20)); +audit = await audit.TraceCalls( + new ClientCall { { HealthyResponse, 9201 } }, + new ClientCall { + { Resurrection, 9202 }, + { PingSuccess, 9202 }, + { HealthyResponse, 9202 } + } +); +---- + diff --git a/docs/asciidoc/client-concepts/connection-pooling/request-overrides/disable-sniff-ping-per-request.asciidoc b/docs/asciidoc/client-concepts/connection-pooling/request-overrides/disable-sniff-ping-per-request.asciidoc new file mode 100644 index 00000000000..1acbda22755 --- /dev/null +++ b/docs/asciidoc/client-concepts/connection-pooling/request-overrides/disable-sniff-ping-per-request.asciidoc @@ -0,0 +1,110 @@ +:section-number: 6.1 + +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[disabling-sniffing-and-pinging-on-a-request-basis]] +== Disabling sniffing and pinging on a request basis + +Even if you are using a sniffing connection pool thats set up to sniff on start/failure +and pinging enabled, you can opt out of this behaviour on a _per request_ basis. + +In our first test we set up a cluster that pings and sniffs on startup +but we disable the sniffing on our first request so we only see the ping and the response + +Let's set up the cluster and configure clients to **always** sniff on startup + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(10) + .ClientCalls(r => r.SucceedAlways()) + .SniffingConnectionPool() + .Settings(s => s.SniffOnStartup()) <1> +); +---- +<1> sniff on startup + +Now We disable sniffing on the request so even though it's our first call, we do not want to sniff on startup + +Instead, the sniff on startup is deferred to the second call into the cluster that +does not disable sniffing on a per request basis + +And after that no sniff on startup will happen again + +[source,csharp] +---- +audit = await audit.TraceCalls( +new ClientCall(r => r.DisableSniffing()) <1> + { + { PingSuccess, 9200 }, <2> + { HealthyResponse, 9200 } + }, +new ClientCall() + { + { SniffOnStartup }, <3> + { SniffSuccess, 9200 }, + { PingSuccess, 9200 }, + { HealthyResponse, 9200 } + }, +new ClientCall() + { + { PingSuccess, 9201 }, + { HealthyResponse, 9201 } + } +); +---- +<1> disable sniffing + +<2> first call is a successful ping + +<3> sniff on startup call happens here, on the second call + +Now, let's disable pinging on the request + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(10) + .ClientCalls(r => r.SucceedAlways()) + .SniffingConnectionPool() + .Settings(s => s.SniffOnStartup()) +); +audit = await audit.TraceCall( + new ClientCall(r => r.DisablePing()) <1> + { + { SniffOnStartup }, + { SniffSuccess, 9200 }, <2> + { HealthyResponse, 9200 } + } +); +---- +<1> disable ping + +<2> No ping after sniffing + +Finally, let's demonstrate disabling both sniff and ping on the request + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(10) + .ClientCalls(r => r.SucceedAlways()) + .SniffingConnectionPool() + .Settings(s => s.SniffOnStartup()) +); + +audit = await audit.TraceCall( + new ClientCall(r=>r.DisableSniffing().DisablePing()) <1> + { + { HealthyResponse, 9200 } <2> + } +); +---- +<1> diable ping and sniff + +<2> no ping or sniff before the call + diff --git a/docs/asciidoc/client-concepts/connection-pooling/request-overrides/request-timeouts-overrides.asciidoc b/docs/asciidoc/client-concepts/connection-pooling/request-overrides/request-timeouts-overrides.asciidoc new file mode 100644 index 00000000000..4b3bc518683 --- /dev/null +++ b/docs/asciidoc/client-concepts/connection-pooling/request-overrides/request-timeouts-overrides.asciidoc @@ -0,0 +1,98 @@ +:section-number: 6.2 + +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[request-timeouts]] +== Request Timeouts + +While you can specify Request time out globally you can override this per request too + +we set up a 10 node cluster with a global time out of 20 seconds. +Each call on a node takes 10 seconds. So we can only try this call on 2 nodes +before the max request time out kills the client call. + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(10) + .ClientCalls(r => r.FailAlways().Takes(TimeSpan.FromSeconds(10))) + .ClientCalls(r => r.OnPort(9209).SucceedAlways()) + .StaticConnectionPool() + .Settings(s => s.DisablePing().RequestTimeout(TimeSpan.FromSeconds(20))) +); +---- + +On the second request we specify a request timeout override to 60 seconds +We should now see more nodes being tried. + +[source,csharp] +---- +audit = await audit.TraceCalls( + new ClientCall { + { BadResponse, 9200 }, + { BadResponse, 9201 }, + { MaxTimeoutReached } + }, +new ClientCall(r => r.RequestTimeout(TimeSpan.FromSeconds(80))) + { + { BadResponse, 9203 }, + { BadResponse, 9204 }, + { BadResponse, 9205 }, + { BadResponse, 9206 }, + { BadResponse, 9207 }, + { BadResponse, 9208 }, + { HealthyResponse, 9209 }, + } +); +---- + +[[connect-timeouts]] +== Connect Timeouts + +Connect timeouts can be overridden, webrequest/httpclient can not distinguish connect and retry timeouts however +we use this separate configuration value for ping requests. + +we set up a 10 node cluster with a global time out of 20 seconds. +Each call on a node takes 10 seconds. So we can only try this call on 2 nodes +before the max request time out kills the client call. + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(10) + .Ping(p => p.SucceedAlways().Takes(TimeSpan.FromSeconds(20))) + .ClientCalls(r => r.SucceedAlways()) + .StaticConnectionPool() + .Settings(s => s.RequestTimeout(TimeSpan.FromSeconds(10)).PingTimeout(TimeSpan.FromSeconds(10))) +); +---- + +The first call uses the configured global settings, request times out after 10 seconds and ping +calls always take 20, so we should see a single ping failure + +On the second request we set a request ping timeout override of 2seconds +We should now see more nodes being tried before the request timeout is hit. + +[source,csharp] +---- +audit = await audit.TraceCalls( +new ClientCall { + { PingFailure, 9200 }, + { MaxTimeoutReached } + }, +new ClientCall(r => r.PingTimeout(TimeSpan.FromSeconds(2))) + { + { PingFailure, 9202 }, + { PingFailure, 9203 }, + { PingFailure, 9204 }, + { PingFailure, 9205 }, + { PingFailure, 9206 }, + { MaxTimeoutReached } + } +); +---- + diff --git a/docs/asciidoc/client-concepts/connection-pooling/request-overrides/respects-allowed-status-code.asciidoc b/docs/asciidoc/client-concepts/connection-pooling/request-overrides/respects-allowed-status-code.asciidoc new file mode 100644 index 00000000000..7be5c5fa646 --- /dev/null +++ b/docs/asciidoc/client-concepts/connection-pooling/request-overrides/respects-allowed-status-code.asciidoc @@ -0,0 +1,29 @@ +:section-number: 6.3 + +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[allowed-status-codes]] +== Allowed status codes + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(10) + .ClientCalls(r => r.FailAlways(400)) + .StaticConnectionPool() + .Settings(s => s.DisablePing().MaximumRetries(0)) +); +audit = await audit.TraceCalls( + new ClientCall() { + { BadResponse, 9200 } + }, + new ClientCall(r => r.AllowedStatusCodes(400)) { + { HealthyResponse, 9201 } + } +); +---- + diff --git a/docs/asciidoc/client-concepts/connection-pooling/request-overrides/respects-force-node.asciidoc b/docs/asciidoc/client-concepts/connection-pooling/request-overrides/respects-force-node.asciidoc new file mode 100644 index 00000000000..8c1de860882 --- /dev/null +++ b/docs/asciidoc/client-concepts/connection-pooling/request-overrides/respects-force-node.asciidoc @@ -0,0 +1,30 @@ +:section-number: 6.4 + +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[forcing-nodes]] +== Forcing nodes + +Sometimes you might want to fire a single request to a specific node. You can do so using the `ForceNode` +request configuration. This will ignore the pool and not retry. + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(10) + .ClientCalls(r => r.SucceedAlways()) + .ClientCalls(r => r.OnPort(9208).FailAlways()) + .StaticConnectionPool() + .Settings(s => s.DisablePing()) +); +audit = await audit.TraceCall( + new ClientCall(r => r.ForceNode(new Uri("http://localhost:9208"))) { + { BadResponse, 9208 } + } +); +---- + diff --git a/docs/asciidoc/client-concepts/connection-pooling/request-overrides/respects-max-retry-overrides.asciidoc b/docs/asciidoc/client-concepts/connection-pooling/request-overrides/respects-max-retry-overrides.asciidoc new file mode 100644 index 00000000000..2e555807e06 --- /dev/null +++ b/docs/asciidoc/client-concepts/connection-pooling/request-overrides/respects-max-retry-overrides.asciidoc @@ -0,0 +1,78 @@ +:section-number: 6.5 + +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[maximum-retries]] +== Maximum Retries + +By default retry as many times as we have nodes. However retries still respect the request timeout. +Meaning if you have a 100 node cluster and a request timeout of 20 seconds we will retry as many times as we can +but give up after 20 seconds + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(10) + .ClientCalls(r => r.FailAlways()) + .ClientCalls(r => r.OnPort(9209).SucceedAlways()) + .StaticConnectionPool() + .Settings(s => s.DisablePing()) +); +audit = await audit.TraceCall( + new ClientCall(r => r.MaxRetries(2)) { + { BadResponse, 9200 }, + { BadResponse, 9201 }, + { BadResponse, 9202 }, + { MaxRetriesReached } + } +); +---- + +When you have a 100 node cluster you might want to ensure a fixed number of retries. +Remember that the actual number of requests is initial attempt + set number of retries + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(10) + .ClientCalls(r => r.FailAlways()) + .ClientCalls(r => r.OnPort(9209).SucceedAlways()) + .StaticConnectionPool() + .Settings(s => s.DisablePing().MaximumRetries(5)) +); + +audit = await audit.TraceCall( + new ClientCall(r => r.MaxRetries(2)) { + { BadResponse, 9200 }, + { BadResponse, 9201 }, + { BadResponse, 9202 }, + { MaxRetriesReached } + } +); +---- + +This makes setting any retry setting on a single node connection pool a NOOP, this is by design! +Connection pooling and connection failover is about trying to fail sanely whilst still utilizing available resources and +not giving up on the fail fast principle. It's *NOT* a mechanism for forcing requests to succeed. + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(10) + .ClientCalls(r => r.FailAlways().Takes(TimeSpan.FromSeconds(3))) + .ClientCalls(r => r.OnPort(9209).SucceedAlways()) + .SingleNodeConnection() + .Settings(s => s.DisablePing().MaximumRetries(10)) +); + +audit = await audit.TraceCall( + new ClientCall(r => r.MaxRetries(10)) { + { BadResponse, 9200 } + } +); +---- + diff --git a/docs/asciidoc/ClientConcepts/ConnectionPooling/RoundRobin/RoundRobin.doc.asciidoc b/docs/asciidoc/client-concepts/connection-pooling/round-robin/round-robin.asciidoc similarity index 73% rename from docs/asciidoc/ClientConcepts/ConnectionPooling/RoundRobin/RoundRobin.doc.asciidoc rename to docs/asciidoc/client-concepts/connection-pooling/round-robin/round-robin.asciidoc index 04cbc4ae7a0..2d799c95193 100644 --- a/docs/asciidoc/ClientConcepts/ConnectionPooling/RoundRobin/RoundRobin.doc.asciidoc +++ b/docs/asciidoc/client-concepts/connection-pooling/round-robin/round-robin.asciidoc @@ -1,14 +1,21 @@ -Round Robin +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[round-robin]] +== Round Robin + Each connection pool round robins over the `live` nodes, to evenly distribute the load over all known nodes. +=== GetNext -== GetNext -GetNext is implemented in a lock free thread safe fashion, meaning each callee gets returned its own cursor to advance +`GetNext` is implemented in a lock free thread safe fashion, meaning each callee gets returned its own cursor to advance over the internal list of nodes. This to guarantee each request that needs to fall over tries all the nodes without suffering from noisy neighboors advancing a global cursor. - -[source, csharp] +[source,csharp] ---- var uris = Enumerable.Range(9200, NumberOfNodes).Select(p => new Uri("http://localhost:" + p)); var staticPool = new StaticConnectionPool(uris, randomize: false); @@ -16,56 +23,60 @@ var sniffingPool = new SniffingConnectionPool(uris, randomize: false); this.AssertCreateView(staticPool); this.AssertCreateView(sniffingPool); ---- -Here we have setup a static connection pool seeded with 10 nodes. We force randomizationOnStartup to false + +Here we have setup a static connection pool seeded with 10 nodes. We force randomization OnStartup to false so that we can test the nodes being returned are int the order we expect them to. So what order we expect? Imagine the following: + Thread A calls GetNext first without a local cursor and takes the current from the internal global cursor which is 0. Thread B calls GetNext() second without a local cursor and therefor starts at 1. After this each thread should walk the nodes in successive order using their local cursor e.g Thread A might get 0,1,2,3,5 and thread B will get 1,2,3,4,0. -[source, csharp] +[source,csharp] ---- var startingPositions = Enumerable.Range(0, NumberOfNodes) - .Select(i => pool.CreateView().First()) - .Select(n => n.Uri.Port) - .ToList(); ----- -[source, csharp] ----- + .Select(i => pool.CreateView().First()) + .Select(n => n.Uri.Port) + .ToList(); + var expectedOrder = Enumerable.Range(9200, NumberOfNodes); + startingPositions.Should().ContainInOrder(expectedOrder); ---- What the above code just proved is that each call to GetNext(null) gets assigned the next available node. Lets up the ante: -- call get next over `NumberOfNodes * 2` threads -- on each thread call getnext `NumberOfNodes * 10` times using a local cursor. + +* call get next over `NumberOfNodes * 2` threads + +* on each thread call getnext `NumberOfNodes * 10` times using a local cursor. We'll validate that each thread sees all the nodes and they they wrap over e.g after node 9209 comes 9200 again -[source, csharp] +[source,csharp] ---- var threadedStartPositions = new ConcurrentBag(); ----- -[source, csharp] ----- + var threads = Enumerable.Range(0, 20) - .Select(i => CreateThreadCallingGetNext(pool, threadedStartPositions)) - .ToList(); + .Select(i => CreateThreadCallingGetNext(pool, threadedStartPositions)) + .ToList(); + t.Start(); + t.Join(); ---- + Each thread reported the first node it started off lets make sure we see each node twice as the first node because we started `NumberOfNodes * 2` threads -[source, csharp] +[source,csharp] ---- var grouped = threadedStartPositions.GroupBy(p => p).ToList(); ----- -[source, csharp] ----- + grouped.Count().Should().Be(NumberOfNodes); + grouped.Select(p => p.Count()).Should().OnlyContain(p => p == 2); ---- + diff --git a/docs/asciidoc/ClientConcepts/ConnectionPooling/RoundRobin/SkipDeadNodes.doc.asciidoc b/docs/asciidoc/client-concepts/connection-pooling/round-robin/skip-dead-nodes.asciidoc similarity index 52% rename from docs/asciidoc/ClientConcepts/ConnectionPooling/RoundRobin/SkipDeadNodes.doc.asciidoc rename to docs/asciidoc/client-concepts/connection-pooling/round-robin/skip-dead-nodes.asciidoc index 54688bd9f36..8f043369d3b 100644 --- a/docs/asciidoc/ClientConcepts/ConnectionPooling/RoundRobin/SkipDeadNodes.doc.asciidoc +++ b/docs/asciidoc/client-concepts/connection-pooling/round-robin/skip-dead-nodes.asciidoc @@ -1,196 +1,220 @@ -Round Robin - Skipping Dead Nodes +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[round-robin---skipping-dead-nodes]] +== Round Robin - Skipping Dead Nodes + When selecting nodes the connection pool will try and skip all the nodes that are marked dead. +=== GetNext -== GetNext GetNext is implemented in a lock free thread safe fashion, meaning each callee gets returned its own cursor to advance over the internal list of nodes. This to guarantee each request that needs to fall over tries all the nodes without suffering from noisy neighboors advancing a global cursor. - -[source, csharp] +[source,csharp] ---- var seeds = Enumerable.Range(9200, NumberOfNodes).Select(p => new Node(new Uri("http://localhost:" + p))).ToList(); + +seeds.First().MarkDead(DateTime.Now.AddDays(1)); + var pool = new StaticConnectionPool(seeds, randomize: false); + var node = pool.CreateView().First(); -node.Uri.Port.Should().Be(9200); -node = pool.CreateView().First(); + node.Uri.Port.Should().Be(9201); + node = pool.CreateView().First(); + node.Uri.Port.Should().Be(9202); +---- + +[source,csharp] +---- var seeds = Enumerable.Range(9200, NumberOfNodes).Select(p => new Node(new Uri("http://localhost:" + p))).ToList(); -seeds.First().MarkDead(DateTime.Now.AddDays(1)); var pool = new StaticConnectionPool(seeds, randomize: false); var node = pool.CreateView().First(); +node.Uri.Port.Should().Be(9200); +node = pool.CreateView().First(); node.Uri.Port.Should().Be(9201); node = pool.CreateView().First(); node.Uri.Port.Should().Be(9202); ---- -After we marke the first node alive again we expect it to be hit again -[source, csharp] +After we marked the first node alive again, we expect it to be hit again + +[source,csharp] ---- seeds.First().MarkAlive(); ----- -[source, csharp] ----- + var node = pool.CreateView().First(); + node.Uri.Port.Should().Be(9201); + node = pool.CreateView().First(); + node.Uri.Port.Should().Be(9202); + node = pool.CreateView().First(); + node.Uri.Port.Should().Be(9200); +---- + +[source,csharp] +---- var dateTimeProvider = new TestableDateTimeProvider(); + var seeds = Enumerable.Range(9200, NumberOfNodes).Select(p => new Node(new Uri("http://localhost:" + p))).ToList(); + seeds.First().MarkDead(dateTimeProvider.Now().AddDays(1)); + var pool = new StaticConnectionPool(seeds, randomize: false, dateTimeProvider: dateTimeProvider); + var node = pool.CreateView().First(); + node.Uri.Port.Should().Be(9201); + node = pool.CreateView().First(); + node.Uri.Port.Should().Be(9202); ---- -If we forward our clock 2 days the node that was marked dead until tomorrow (or yesterday!) should be resurrected -[source, csharp] +If we roll the clock forward two days, the node that was marked dead until tomorrow (or yesterday!) should be resurrected + +[source,csharp] ---- dateTimeProvider.ChangeTime(d => d.AddDays(2)); ----- -[source, csharp] ----- + var n = pool.CreateView().First(); + n.Uri.Port.Should().Be(9201); + n = pool.CreateView().First(); + n.Uri.Port.Should().Be(9202); + n = pool.CreateView().First(); + n.Uri.Port.Should().Be(9200); + n.IsResurrected.Should().BeTrue(); ---- + A cluster with 2 nodes where the second node fails on ping -[source, csharp] +[source,csharp] ---- var audit = new Auditor(() => Framework.Cluster - .Nodes(4) - .ClientCalls(p => p.Succeeds(Always)) - .ClientCalls(p => p.OnPort(9201).FailAlways()) - .ClientCalls(p => p.OnPort(9203).FailAlways()) - .StaticConnectionPool() - .Settings(p=>p.DisablePing()) + .Nodes(4) + .ClientCalls(p => p.Succeeds(Always)) + .ClientCalls(p => p.OnPort(9201).FailAlways()) + .ClientCalls(p => p.OnPort(9203).FailAlways()) + .StaticConnectionPool() + .Settings(p=>p.DisablePing()) ); ---- -[source, csharp] ----- -await audit.TraceCalls( ----- + The first call goes to 9200 which succeeds -[source, csharp] ----- -new ClientCall { - { HealthyResponse, 9200}, - { pool => pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(0) } - }, ----- The 2nd call does a ping on 9201 because its used for the first time. It fails so we wrap over to node 9202 -[source, csharp] ----- -new ClientCall { - { BadResponse, 9201}, - { HealthyResponse, 9202}, ----- Finally we assert that the connectionpool has one node that is marked as dead -[source, csharp] ----- -{ pool => pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(1) } - }, ----- The next call goes to 9203 which fails so we should wrap over -[source, csharp] +[source,csharp] ---- +await audit.TraceCalls( new ClientCall { - { BadResponse, 9203}, - { HealthyResponse, 9200}, - { pool => pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(2) } - }, - new ClientCall { - { HealthyResponse, 9202}, - { pool => pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(2) } - }, - new ClientCall { - { HealthyResponse, 9200}, - { pool => pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(2) } - }, - new ClientCall { - { HealthyResponse, 9202}, - { pool => pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(2) } - }, - new ClientCall { - { HealthyResponse, 9200}, - { pool => pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(2) } - } + { HealthyResponse, 9200}, + { pool => pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(0) } + }, +new ClientCall { + { BadResponse, 9201}, + { HealthyResponse, 9202}, +{ pool => pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(1) } + }, +new ClientCall { + { BadResponse, 9203}, + { HealthyResponse, 9200}, + { pool => pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(2) } + }, + new ClientCall { + { HealthyResponse, 9202}, + { pool => pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(2) } + }, + new ClientCall { + { HealthyResponse, 9200}, + { pool => pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(2) } + }, + new ClientCall { + { HealthyResponse, 9202}, + { pool => pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(2) } + }, + new ClientCall { + { HealthyResponse, 9200}, + { pool => pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(2) } + } ); ---- + A cluster with 2 nodes where the second node fails on ping -[source, csharp] +[source,csharp] ---- var audit = new Auditor(() => Framework.Cluster - .Nodes(4) - .ClientCalls(p => p.Fails(Always)) - .StaticConnectionPool() - .Settings(p=>p.DisablePing()) + .Nodes(4) + .ClientCalls(p => p.Fails(Always)) + .StaticConnectionPool() + .Settings(p=>p.DisablePing()) ); ---- -[source, csharp] ----- -await audit.TraceCalls( ----- + All the calls fail -[source, csharp] ----- -new ClientCall { - { BadResponse, 9200}, - { BadResponse, 9201}, - { BadResponse, 9202}, - { BadResponse, 9203}, - { MaxRetriesReached }, - { pool => pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(4) } - }, ----- After all our registered nodes are marked dead we want to sample a single dead node each time to quickly see if the cluster is back up. We do not want to retry all 4 nodes -[source, csharp] +[source,csharp] ---- +await audit.TraceCalls( new ClientCall { - { AllNodesDead }, - { Resurrection, 9201}, - { BadResponse, 9201}, - { pool => pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(4) } - }, - new ClientCall { - { AllNodesDead }, - { Resurrection, 9202}, - { BadResponse, 9202}, - { pool => pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(4) } - }, - new ClientCall { - { AllNodesDead }, - { Resurrection, 9203}, - { BadResponse, 9203}, - { pool => pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(4) } - }, - new ClientCall { - { AllNodesDead }, - { Resurrection, 9200}, - { BadResponse, 9200}, - { pool => pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(4) } - } + { BadResponse, 9200}, + { BadResponse, 9201}, + { BadResponse, 9202}, + { BadResponse, 9203}, + { MaxRetriesReached }, + { pool => pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(4) } + }, +new ClientCall { + { AllNodesDead }, + { Resurrection, 9201}, + { BadResponse, 9201}, + { pool => pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(4) } + }, + new ClientCall { + { AllNodesDead }, + { Resurrection, 9202}, + { BadResponse, 9202}, + { pool => pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(4) } + }, + new ClientCall { + { AllNodesDead }, + { Resurrection, 9203}, + { BadResponse, 9203}, + { pool => pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(4) } + }, + new ClientCall { + { AllNodesDead }, + { Resurrection, 9200}, + { BadResponse, 9200}, + { pool => pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(4) } + } ); ---- + diff --git a/docs/asciidoc/ClientConcepts/ConnectionPooling/RoundRobin/VolatileUpdates.doc.asciidoc b/docs/asciidoc/client-concepts/connection-pooling/round-robin/volatile-updates.asciidoc similarity index 64% rename from docs/asciidoc/ClientConcepts/ConnectionPooling/RoundRobin/VolatileUpdates.doc.asciidoc rename to docs/asciidoc/client-concepts/connection-pooling/round-robin/volatile-updates.asciidoc index 33f1561476d..dd0712a75da 100644 --- a/docs/asciidoc/ClientConcepts/ConnectionPooling/RoundRobin/VolatileUpdates.doc.asciidoc +++ b/docs/asciidoc/client-concepts/connection-pooling/round-robin/volatile-updates.asciidoc @@ -1,28 +1,39 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current +:github: https://github.com/elastic/elasticsearch-net +:nuget: https://www.nuget.org/packages +[[volatile-updates]] +== Volatile Updates -[source, csharp] +[source,csharp] ---- var uris = Enumerable.Range(9200, NumberOfNodes).Select(p => new Uri("http://localhost:" + p)); -var sniffingPool = new SniffingConnectionPool(uris, randomize: false); -Action callSniffing = () => this.AssertCreateView(sniffingPool); -callSniffing.ShouldNotThrow(); -var uris = Enumerable.Range(9200, NumberOfNodes).Select(p => new Uri("http://localhost:" + p)); + var staticPool = new StaticConnectionPool(uris, randomize: false); + Action callStatic = () => this.AssertCreateView(staticPool); + callStatic.ShouldNotThrow(); ---- - -[source, csharp] +[source,csharp] ---- -var threads = Enumerable.Range(0, 50) - .Select(i => CreateReadAndUpdateThread(pool)) - .ToList(); +var uris = Enumerable.Range(9200, NumberOfNodes).Select(p => new Uri("http://localhost:" + p)); +var sniffingPool = new SniffingConnectionPool(uris, randomize: false); +Action callSniffing = () => this.AssertCreateView(sniffingPool); +callSniffing.ShouldNotThrow(); ---- -[source, csharp] + +[source,csharp] ---- +var threads = Enumerable.Range(0, 50) + .Select(i => CreateReadAndUpdateThread(pool)) + .ToList(); + t.Start(); + t.Join(); ---- + diff --git a/docs/asciidoc/client-concepts/connection-pooling/sniffing/on-connection-failure.asciidoc b/docs/asciidoc/client-concepts/connection-pooling/sniffing/on-connection-failure.asciidoc new file mode 100644 index 00000000000..bed35b29570 --- /dev/null +++ b/docs/asciidoc/client-concepts/connection-pooling/sniffing/on-connection-failure.asciidoc @@ -0,0 +1,156 @@ +:section-number: 7.1 + +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[sniffing-on-connection-failure]] +== Sniffing on connection failure + +Sniffing on connection is enabled by default when using a connection pool that allows reseeding. +The only IConnectionPool we ship that allows this is the SniffingConnectionPool. + +This can be very handy to force a refresh of the pools known healthy node by inspecting elasticsearch itself. +A sniff tries to get the nodes by asking each currently known node until one response. + +Here we seed our connection with 5 known nodes 9200-9204 of which we think +9202, 9203, 9204 are master eligible nodes. Our virtualized cluster will throw once when doing +a search on 9201. This should a sniff to be kicked off. + +When the call fails on 9201 the sniff succeeds and returns a new cluster of healty nodes +this cluster only has 3 nodes and the known masters are 9200 and 9202 but a search on 9201 +still fails once + +After this second failure on 9201 another sniff will be returned a cluster that no +longer fails but looks completely different (9210-9212) we should be able to handle this + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(5) + .MasterEligible(9202, 9203, 9204) + .ClientCalls(r => r.SucceedAlways()) + .ClientCalls(r => r.OnPort(9201).Fails(Once)) +.Sniff(p => p.SucceedAlways(Framework.Cluster + .Nodes(3) + .MasterEligible(9200, 9202) + .ClientCalls(r => r.OnPort(9201).Fails(Once)) +.Sniff(s => s.SucceedAlways(Framework.Cluster + .Nodes(3, 9210) + .MasterEligible(9210, 9212) + .ClientCalls(r => r.SucceedAlways()) + .Sniff(r => r.SucceedAlways()) + )) + )) + .SniffingConnectionPool() + .Settings(s => s.DisablePing().SniffOnStartup(false)) +); +---- + +We assert we do a sniff on our first known master node 9202 + +Our pool should now have three nodes + +We assert we do a sniff on the first master node in our updated cluster + +[source,csharp] +---- +audit = await audit.TraceCalls( +new ClientCall { + { HealthyResponse, 9200 }, + { pool => pool.Nodes.Count.Should().Be(5) } + }, + new ClientCall { + { BadResponse, 9201}, +{ SniffOnFail }, + { SniffSuccess, 9202}, + { HealthyResponse, 9200}, +{ pool => pool.Nodes.Count.Should().Be(3) } + }, + new ClientCall { + { BadResponse, 9201}, +{ SniffOnFail }, + { SniffSuccess, 9200}, + { HealthyResponse, 9210}, + { pool => pool.Nodes.Count.Should().Be(3) } + }, + new ClientCall { { HealthyResponse, 9211 } }, + new ClientCall { { HealthyResponse, 9212 } }, + new ClientCall { { HealthyResponse, 9210 } }, + new ClientCall { { HealthyResponse, 9211 } }, + new ClientCall { { HealthyResponse, 9212 } }, + new ClientCall { { HealthyResponse, 9210 } }, + new ClientCall { { HealthyResponse, 9211 } }, + new ClientCall { { HealthyResponse, 9212 } }, + new ClientCall { { HealthyResponse, 9210 } } +); +---- + +Here we set up our cluster exactly the same as the previous setup +Only we enable pinging (default is true) and make the ping fail + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(5) + .MasterEligible(9202, 9203, 9204) + .Ping(r => r.OnPort(9201).Fails(Once)) + .Sniff(p => p.SucceedAlways(Framework.Cluster + .Nodes(3) + .MasterEligible(9200, 9202) + .Ping(r => r.OnPort(9201).Fails(Once)) + .Sniff(s => s.SucceedAlways(Framework.Cluster + .Nodes(3, 9210) + .MasterEligible(9210, 9211) + .Ping(r => r.SucceedAlways()) + .Sniff(r => r.SucceedAlways()) + )) + )) + .SniffingConnectionPool() + .Settings(s => s.SniffOnStartup(false)) +); +---- + +We assert we do a sniff on our first known master node 9202 + +Our pool should now have three nodes + +We assert we do a sniff on the first master node in our updated cluster + +9210 was already pinged after the sniff returned the new nodes + +[source,csharp] +---- +audit = await audit.TraceCalls( + new ClientCall { + { PingSuccess, 9200 }, + { HealthyResponse, 9200 }, + { pool => pool.Nodes.Count.Should().Be(5) } + }, + new ClientCall { + { PingFailure, 9201}, +{ SniffOnFail }, + { SniffSuccess, 9202}, + { PingSuccess, 9200}, + { HealthyResponse, 9200}, +{ pool => pool.Nodes.Count.Should().Be(3) } + }, + new ClientCall { + { PingFailure, 9201}, +{ SniffOnFail }, + { SniffSuccess, 9200}, + { PingSuccess, 9210}, + { HealthyResponse, 9210}, + { pool => pool.Nodes.Count.Should().Be(3) } + }, + new ClientCall { { PingSuccess, 9211 }, { HealthyResponse, 9211 } }, + new ClientCall { { PingSuccess, 9212 }, { HealthyResponse, 9212 } }, +new ClientCall { { HealthyResponse, 9210 } }, + new ClientCall { { HealthyResponse, 9211 } }, + new ClientCall { { HealthyResponse, 9212 } }, + new ClientCall { { HealthyResponse, 9210 } } +); +---- + diff --git a/docs/asciidoc/client-concepts/connection-pooling/sniffing/on-stale-cluster-state.asciidoc b/docs/asciidoc/client-concepts/connection-pooling/sniffing/on-stale-cluster-state.asciidoc new file mode 100644 index 00000000000..c34a3eb86d0 --- /dev/null +++ b/docs/asciidoc/client-concepts/connection-pooling/sniffing/on-stale-cluster-state.asciidoc @@ -0,0 +1,102 @@ +:section-number: 7.2 + +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[sniffing-periodically]] +== Sniffing periodically + +Connection pools that return true for `SupportsReseeding` can be configured to sniff periodically. +In addition to sniffing on startup and sniffing on failures, sniffing periodically can benefit scenerio's where +clusters are often scaled horizontally during peak hours. An application might have a healthy view of a subset of the nodes +but without sniffing periodically it will never find the nodes that have been added to help out with load + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(10) + .MasterEligible(9202, 9203, 9204) + .ClientCalls(r => r.SucceedAlways()) + .Sniff(s => s.SucceedAlways(Framework.Cluster + .Nodes(100) + .MasterEligible(9202, 9203, 9204) + .ClientCalls(r => r.SucceedAlways()) + .Sniff(ss => ss.SucceedAlways(Framework.Cluster + .Nodes(10) + .MasterEligible(9202, 9203, 9204) + .ClientCalls(r => r.SucceedAlways()) + )) + )) + .SniffingConnectionPool() + .Settings(s => s + .DisablePing() + .SniffOnConnectionFault(false) + .SniffOnStartup(false) + .SniffLifeSpan(TimeSpan.FromMinutes(30)) + ) +); +---- + +healty cluster all nodes return healthy responses + +[source,csharp] +---- +audit = await audit.TraceCalls( + new ClientCall { { HealthyResponse, 9200 } }, + new ClientCall { { HealthyResponse, 9201 } }, + new ClientCall { { HealthyResponse, 9202 } }, + new ClientCall { { HealthyResponse, 9203 } }, + new ClientCall { { HealthyResponse, 9204 } }, + new ClientCall { { HealthyResponse, 9205 } }, + new ClientCall { { HealthyResponse, 9206 } }, + new ClientCall { { HealthyResponse, 9207 } }, + new ClientCall { { HealthyResponse, 9208 } }, + new ClientCall { { HealthyResponse, 9209 } }, + new ClientCall { + { HealthyResponse, 9200 }, + { pool => pool.Nodes.Count.Should().Be(10) } + } +); +---- + +Now let's forward the clock 31 minutes, our sniff lifespan should now go state +and the first call should do a sniff which discovered we scaled up to a 100 nodes! + +[source,csharp] +---- +audit.ChangeTime(d => d.AddMinutes(31)); +---- + +a sniff is done first and it prefers the first node master node + +[source,csharp] +---- +audit = await audit.TraceCalls( + new ClientCall { +{ SniffOnStaleCluster }, + { SniffSuccess, 9202 }, + { HealthyResponse, 9201 }, + { pool => pool.Nodes.Count.Should().Be(100) } + } +); + +audit.ChangeTime(d => d.AddMinutes(31)); +---- + +a sniff is done first and it prefers the first node master node + +[source,csharp] +---- +audit = await audit.TraceCalls( + new ClientCall { +{ SniffOnStaleCluster }, + { SniffSuccess, 9202 }, + { HealthyResponse, 9200 }, + { pool => pool.Nodes.Count.Should().Be(10) } + } +); +---- + diff --git a/docs/asciidoc/client-concepts/connection-pooling/sniffing/on-startup.asciidoc b/docs/asciidoc/client-concepts/connection-pooling/sniffing/on-startup.asciidoc new file mode 100644 index 00000000000..016137b20ce --- /dev/null +++ b/docs/asciidoc/client-concepts/connection-pooling/sniffing/on-startup.asciidoc @@ -0,0 +1,153 @@ +:section-number: 7.3 + +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[sniffing-on-startup]] +== Sniffing on startup + +Connection pools that return true for `SupportsReseeding` by default sniff on startup. + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(10) + .Sniff(s => s.Fails(Always)) + .Sniff(s => s.OnPort(9202).Succeeds(Always)) + .SniffingConnectionPool() + .AllDefaults() +); + +await audit.TraceCalls( + new ClientCall + { + { SniffOnStartup}, + { SniffFailure, 9200}, + { SniffFailure, 9201}, + { SniffSuccess, 9202}, + { PingSuccess , 9200}, + { HealthyResponse, 9200} + }, + new ClientCall + { + { PingSuccess, 9201}, + { HealthyResponse, 9201} + } +); +---- + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(10) + .Sniff(s => s.Fails(Always)) + .Sniff(s => s.OnPort(9202).Succeeds(Always, Framework.Cluster.Nodes(8, startFrom: 9204))) + .SniffingConnectionPool() + .AllDefaults() +); + +await audit.TraceCall(new ClientCall { +{ SniffOnStartup}, +{ SniffFailure, 9200}, +{ SniffFailure, 9201}, +{ SniffSuccess, 9202}, +{ PingSuccess, 9204}, +{ HealthyResponse, 9204} + }); +---- + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(10) + .Sniff(s => s.Fails(Always)) + .Sniff(s => s.OnPort(9209).Succeeds(Always)) + .SniffingConnectionPool() + .AllDefaults() +); + +await audit.TraceCall(new ClientCall { +{ SniffOnStartup}, +{ SniffFailure, 9200}, +{ SniffFailure, 9201}, +{ SniffFailure, 9202}, +{ SniffFailure, 9203}, +{ SniffFailure, 9204}, +{ SniffFailure, 9205}, +{ SniffFailure, 9206}, +{ SniffFailure, 9207}, +{ SniffFailure, 9208}, +{ SniffSuccess, 9209}, +{ PingSuccess, 9200}, +{ HealthyResponse, 9200} + }); +---- + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(new[] { + new Node(new Uri("http://localhost:9200")) { MasterEligible = false }, + new Node(new Uri("http://localhost:9201")) { MasterEligible = false }, + new Node(new Uri("http://localhost:9202")) { MasterEligible = true }, + }) + .Sniff(s => s.Succeeds(Always)) + .SniffingConnectionPool() + .AllDefaults() +); + +await audit.TraceCall(new ClientCall { +{ SniffOnStartup}, +{ SniffSuccess, 9202}, +{ PingSuccess, 9200}, +{ HealthyResponse, 9200} + }); +---- + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(new[] { + new Node(new Uri("http://localhost:9200")) { MasterEligible = true }, + new Node(new Uri("http://localhost:9201")) { MasterEligible = true }, + new Node(new Uri("http://localhost:9202")) { MasterEligible = false }, + }) + .Sniff(s => s.Fails(Always)) + .Sniff(s => s.OnPort(9202).Succeeds(Always)) + .SniffingConnectionPool() + .AllDefaults() +); + +await audit.TraceCall(new ClientCall { +{ SniffOnStartup}, +{ SniffFailure, 9200}, +{ SniffFailure, 9201}, +{ SniffSuccess, 9202}, +{ PingSuccess, 9200}, +{ HealthyResponse, 9200} + }); +---- + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(10) + .Sniff(s => s.Fails(Always)) + .Sniff(s => s.OnPort(9202).Succeeds(Always)) + .SniffingConnectionPool() + .AllDefaults() +); +await audit.TraceCall(new ClientCall + { + { SniffOnStartup}, + { SniffFailure, 9200}, + { SniffFailure, 9201}, + { SniffSuccess, 9202}, + { PingSuccess , 9200}, + { HealthyResponse, 9200} +}); +---- + diff --git a/docs/asciidoc/client-concepts/connection-pooling/sniffing/role-detection.asciidoc b/docs/asciidoc/client-concepts/connection-pooling/sniffing/role-detection.asciidoc new file mode 100644 index 00000000000..ee85b93a568 --- /dev/null +++ b/docs/asciidoc/client-concepts/connection-pooling/sniffing/role-detection.asciidoc @@ -0,0 +1,150 @@ +:section-number: 7.4 + +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[sniffing-role-detection]] +== Sniffing role detection + +When we sniff the custer state we detect the role of the node whether its master eligible and holds data +We use this information when selecting a node to perform an API call on. + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(10) + .Sniff(s => s.Fails(Always)) + .Sniff(s => s.OnPort(9202) + .Succeeds(Always, Framework.Cluster.Nodes(8).StoresNoData(9200, 9201, 9202)) + ) + .SniffingConnectionPool() + .AllDefaults() +) +{ + AssertPoolBeforeCall = (pool) => + { + pool.Should().NotBeNull(); + pool.Nodes.Should().HaveCount(10); + pool.Nodes.Where(n => n.HoldsData).Should().HaveCount(10); + }, + + AssertPoolAfterCall = (pool) => + { + pool.Should().NotBeNull(); + pool.Nodes.Should().HaveCount(8); + pool.Nodes.Where(n => n.HoldsData).Should().HaveCount(5); + } +}; + +await audit.TraceStartup(); +---- + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(10) + .Sniff(s => s.SucceedAlways() + .Succeeds(Always, Framework.Cluster.Nodes(8).StoresNoData(9200, 9201, 9202).SniffShouldReturnFqdn()) + ) + .SniffingConnectionPool() + .AllDefaults() +) +{ + AssertPoolBeforeCall = (pool) => + { + pool.Should().NotBeNull(); + pool.Nodes.Should().HaveCount(10); + pool.Nodes.Where(n => n.HoldsData).Should().HaveCount(10); + pool.Nodes.Should().OnlyContain(n => n.Uri.Host == "localhost"); + }, + + AssertPoolAfterCall = (pool) => + { + pool.Should().NotBeNull(); + pool.Nodes.Should().HaveCount(8); + pool.Nodes.Where(n => n.HoldsData).Should().HaveCount(5); + pool.Nodes.Should().OnlyContain(n => n.Uri.Host.StartsWith("fqdn") && !n.Uri.Host.Contains("/")); + } +}; + +await audit.TraceStartup(); +---- + +[source,csharp] +---- +var node = SniffAndReturnNode(); + +node.MasterEligible.Should().BeTrue(); + +node.HoldsData.Should().BeFalse(); + +node = await SniffAndReturnNodeAsync(); + +node.MasterEligible.Should().BeTrue(); + +node.HoldsData.Should().BeFalse(); +---- + +[source,csharp] +---- +var pipeline = CreatePipeline(); + +pipeline.Sniff(); +---- + +[source,csharp] +---- +var pipeline = CreatePipeline(); + +await pipeline.SniffAsync(); +---- + +[source,csharp] +---- +this._settings = + this._cluster.Client(u => new SniffingConnectionPool(new[] {u}), c => c.PrettyJson()).ConnectionSettings; + +var pipeline = new RequestPipeline(this._settings, DateTimeProvider.Default, new MemoryStreamFactory(), + new SearchRequestParameters()); +---- + +[source,csharp] +---- +var nodes = this._settings.ConnectionPool.Nodes; + +nodes.Should().NotBeEmpty().And.HaveCount(1); + +var node = nodes.First(); +---- + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(10) + .Sniff(s => s.Fails(Always)) + .Sniff(s => s.OnPort(9202) + .Succeeds(Always, Framework.Cluster.Nodes(8).MasterEligible(9200, 9201, 9202)) + ) + .SniffingConnectionPool() + .AllDefaults() +) +{ + AssertPoolBeforeCall = (pool) => + { + pool.Should().NotBeNull(); + pool.Nodes.Should().HaveCount(10); + pool.Nodes.Where(n => n.MasterEligible).Should().HaveCount(10); + }, + AssertPoolAfterCall = (pool) => + { + pool.Should().NotBeNull(); + pool.Nodes.Should().HaveCount(8); + pool.Nodes.Where(n => n.MasterEligible).Should().HaveCount(3); + } +}; +await audit.TraceStartup(); +---- + diff --git a/docs/asciidoc/client-concepts/connection-pooling/sticky/skip-dead-nodes.asciidoc b/docs/asciidoc/client-concepts/connection-pooling/sticky/skip-dead-nodes.asciidoc new file mode 100644 index 00000000000..87de87660d1 --- /dev/null +++ b/docs/asciidoc/client-concepts/connection-pooling/sticky/skip-dead-nodes.asciidoc @@ -0,0 +1,197 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[skip-dead-nodes]] +== Skip Dead Nodes + +Sticky - Skipping Dead Nodes +When selecting nodes the connection pool will try and skip all the nodes that are marked dead. + +[source,csharp] +---- +var seeds = Enumerable.Range(9200, NumberOfNodes).Select(p => new Node(new Uri("http://localhost:" + p))).ToList(); + +seeds.First().MarkDead(DateTime.Now.AddDays(1)); + +var pool = new StickyConnectionPool(seeds); + +var node = pool.CreateView().First(); + +node.Uri.Port.Should().Be(9201); + +node = pool.CreateView().First(); + +node.Uri.Port.Should().Be(9201); +---- + +[source,csharp] +---- +var seeds = Enumerable.Range(9200, NumberOfNodes).Select(p => new Node(new Uri("http://localhost:" + p))).ToList(); +var pool = new StickyConnectionPool(seeds); +var node = pool.CreateView().First(); +node.Uri.Port.Should().Be(9200); +node = pool.CreateView().First(); +node.Uri.Port.Should().Be(9200); +node = pool.CreateView().First(); +node.Uri.Port.Should().Be(9200); +---- + +After we marke the first node alive again we expect it to be hit again + +[source,csharp] +---- +seeds.First().MarkAlive(); + +var node = pool.CreateView().First(); + +node.Uri.Port.Should().Be(9200); + +node = pool.CreateView().First(); + +node.Uri.Port.Should().Be(9200); + +node = pool.CreateView().First(); + +node.Uri.Port.Should().Be(9200); +---- + +[source,csharp] +---- +var dateTimeProvider = new TestableDateTimeProvider(); + +var seeds = Enumerable.Range(9200, NumberOfNodes).Select(p => new Node(new Uri("http://localhost:" + p))).ToList(); + +seeds.First().MarkDead(dateTimeProvider.Now().AddDays(1)); + +var pool = new StickyConnectionPool(seeds, dateTimeProvider: dateTimeProvider); + +var node = pool.CreateView().First(); + +node.Uri.Port.Should().Be(9201); + +node = pool.CreateView().First(); + +node.Uri.Port.Should().Be(9201); +---- + +If we forward our clock 2 days the node that was marked dead until tomorrow (or yesterday!) should be resurrected + +[source,csharp] +---- +dateTimeProvider.ChangeTime(d => d.AddDays(2)); + +var n = pool.CreateView().First(); + +n.Uri.Port.Should().Be(9200); + +n = pool.CreateView().First(); + +n.Uri.Port.Should().Be(9200); + +n = pool.CreateView().First(); + +n.Uri.Port.Should().Be(9200); + +n.IsResurrected.Should().BeTrue(); +---- + +A cluster with 2 nodes where the second node fails on ping + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(4) + .ClientCalls(p => p.Succeeds(Always)) + .ClientCalls(p => p.OnPort(9200).FailAlways()) + .ClientCalls(p => p.OnPort(9201).FailAlways()) + .StickyConnectionPool() + .Settings(p => p.DisablePing()) +); +---- + +The first call goes to 9200 which succeeds + +The 2nd call does a ping on 9201 because its used for the first time. +It fails so we wrap over to node 9202 + +Finally we assert that the connectionpool has one node that is marked as dead + +[source,csharp] +---- +await audit.TraceCalls( +new ClientCall { + { BadResponse, 9200}, + { BadResponse, 9201}, + { HealthyResponse, 9202}, + { pool => pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(2) } + }, +new ClientCall { + { HealthyResponse, 9202}, +{ pool => pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(2) } + }, + new ClientCall { + { HealthyResponse, 9202}, + { pool => pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(2) } + } +); +---- + +A cluster with 2 nodes where the second node fails on ping + +[source,csharp] +---- +var audit = new Auditor(() => Framework.Cluster + .Nodes(4) + .ClientCalls(p => p.Fails(Always)) + .StickyConnectionPool() + .Settings(p => p.DisablePing()) +); +---- + +All the calls fail + +After all our registered nodes are marked dead we want to sample a single dead node +each time to quickly see if the cluster is back up. We do not want to retry all 4 +nodes + +[source,csharp] +---- +await audit.TraceCalls( +new ClientCall { + { BadResponse, 9200}, + { BadResponse, 9201}, + { BadResponse, 9202}, + { BadResponse, 9203}, + { MaxRetriesReached }, + { pool => pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(4) } + }, +new ClientCall { + { AllNodesDead }, + { Resurrection, 9200}, + { BadResponse, 9200}, + { pool => pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(4) } + }, + new ClientCall { + { AllNodesDead }, + { Resurrection, 9201}, + { BadResponse, 9201}, + { pool => pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(4) } + }, + new ClientCall { + { AllNodesDead }, + { Resurrection, 9202}, + { BadResponse, 9202}, + { pool => pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(4) } + }, + new ClientCall { + { AllNodesDead }, + { Resurrection, 9203}, + { BadResponse, 9203}, + { pool => pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(4) } + } +); +---- + diff --git a/docs/asciidoc/client-concepts/connection-pooling/sticky/sticky.asciidoc b/docs/asciidoc/client-concepts/connection-pooling/sticky/sticky.asciidoc new file mode 100644 index 00000000000..9b00ac69774 --- /dev/null +++ b/docs/asciidoc/client-concepts/connection-pooling/sticky/sticky.asciidoc @@ -0,0 +1,37 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[sticky]] +== Sticky + +Sticky +Each connection pool returns the first `live` node so that it is sticky between requests + +[source,csharp] +---- +var uris = Enumerable.Range(9200, NumberOfNodes).Select(p => new Uri("http://localhost:" + p)); +var staticPool = new StickyConnectionPool(uris); +this.AssertCreateView(staticPool); +---- + +Here we have setup a static connection pool seeded with 10 nodes. +So what order we expect? Imagine the following: + +Thread A calls GetNext and gets returned the first live node +Thread B calls GetNext() and gets returned the same node as it's still the first live. + +[source,csharp] +---- +var startingPositions = Enumerable.Range(0, NumberOfNodes) + .Select(i => pool.CreateView().First()) + .Select(n => n.Uri.Port) + .ToList(); + +var expectedOrder = Enumerable.Repeat(9200, NumberOfNodes); + +startingPositions.Should().ContainInOrder(expectedOrder); +---- + diff --git a/docs/asciidoc/ClientConcepts/HighLevel/CovariantHits/CovariantSearchResults.doc.asciidoc b/docs/asciidoc/client-concepts/high-level/covariant-hits/covariant-search-results.asciidoc similarity index 64% rename from docs/asciidoc/ClientConcepts/HighLevel/CovariantHits/CovariantSearchResults.doc.asciidoc rename to docs/asciidoc/client-concepts/high-level/covariant-hits/covariant-search-results.asciidoc index 0ccfe32fdd1..703b26da5d5 100644 --- a/docs/asciidoc/ClientConcepts/HighLevel/CovariantHits/CovariantSearchResults.doc.asciidoc +++ b/docs/asciidoc/client-concepts/high-level/covariant-hits/covariant-search-results.asciidoc @@ -1,223 +1,262 @@ -# Covariant Search Results - -NEST directly supports returning covariant result sets. -Meaning a result can be typed to an interface or baseclass -but the actual instance type of the result can be that of the subclass directly - -Let look at an example, imagine we want to search over multiple types that all implement -`ISearchResult` - - - - -We have three implementations of `ISearchResult` namely `A`, `B` and `C` - - -The most straightforward way to search over multiple types is to -type the response to the parent interface or base class -and pass the actual types we want to search over using `.Types()` - -[source, csharp] ----- -var result = this._client.Search(s => s - .Type(Types.Type(typeof(A), typeof(B), typeof(C))) - .Size(100) -); ----- -Nest will translate this to a search over /index/a,b,c/_search. -hits that have `"_type" : "a"` will be serialized to `A` and so forth - -[source, csharp] ----- -result.IsValid.Should().BeTrue(); ----- -Here we assume our response is valid and that we received the 100 documents -we are expecting. Remember `result.Documents` is an `IEnumerable -ISearchResult -` - -[source, csharp] ----- -result.Documents.Count().Should().Be(100); ----- -To prove the returned result set is covariant we filter the documents based on their -actual type and assert the returned subsets are the expected sizes - -[source, csharp] ----- -var aDocuments = result.Documents.OfType(); ----- -[source, csharp] ----- -var bDocuments = result.Documents.OfType(); -var cDocuments = result.Documents.OfType(); -aDocuments.Count().Should().Be(25); -bDocuments.Count().Should().Be(25); -cDocuments.Count().Should().Be(50); ----- -and assume that properties that only exist on the subclass itself are properly filled - -[source, csharp] ----- -aDocuments.Should().OnlyContain(a => a.PropertyOnA > 0); ----- -[source, csharp] ----- -bDocuments.Should().OnlyContain(a => a.PropertyOnB > 0); -cDocuments.Should().OnlyContain(a => a.PropertyOnC > 0); ----- -A more low level approach is to inspect the hit yourself and determine the CLR type to deserialize to - -[source, csharp] ----- -var result = this._client.Search(s => s - .ConcreteTypeSelector((d, h) => h.Type == "a" ? typeof(A) : h.Type == "b" ? typeof(B) : typeof(C)) - .Size(100) -); ----- -here for each hit we'll call the delegate with `d` which a dynamic representation of the `_source` -and a typed `h` which represents the encapsulating hit. - -[source, csharp] ----- -result.IsValid.Should().BeTrue(); ----- -Here we assume our response is valid and that we received the 100 documents -we are expecting. Remember `result.Documents` is an `IEnumerable -ISearchResult -` - -[source, csharp] ----- -result.Documents.Count().Should().Be(100); ----- -To prove the returned result set is covariant we filter the documents based on their -actual type and assert the returned subsets are the expected sizes - -[source, csharp] ----- -var aDocuments = result.Documents.OfType(); ----- -[source, csharp] ----- -var bDocuments = result.Documents.OfType(); -var cDocuments = result.Documents.OfType(); -aDocuments.Count().Should().Be(25); -bDocuments.Count().Should().Be(25); -cDocuments.Count().Should().Be(50); ----- -and assume that properties that only exist on the subclass itself are properly filled - -[source, csharp] ----- -aDocuments.Should().OnlyContain(a => a.PropertyOnA > 0); ----- -[source, csharp] ----- -bDocuments.Should().OnlyContain(a => a.PropertyOnB > 0); -cDocuments.Should().OnlyContain(a => a.PropertyOnC > 0); ----- -Scroll also supports CovariantSearchResponses - - -Scroll() is a continuation of a previous Search() so Types() are lost. -You can hint the type types again using CovariantTypes() - -[source, csharp] ----- -var result = this._client.Scroll(TimeSpan.FromMinutes(60), "scrollId", s => s - .CovariantTypes(Types.Type(typeof(A), typeof(B), typeof(C))) -); ----- -Nest will translate this to a search over /index/a,b,c/_search. -hits that have `"_type" : "a"` will be serialized to `A` and so forth - -[source, csharp] ----- -result.IsValid.Should().BeTrue(); ----- -Here we assume our response is valid and that we received the 100 documents -we are expecting. Remember `result.Documents` is an `IEnumerable -ISearchResult -` - -[source, csharp] ----- -result.Documents.Count().Should().Be(100); ----- -To prove the returned result set is covariant we filter the documents based on their -actual type and assert the returned subsets are the expected sizes - -[source, csharp] ----- -var aDocuments = result.Documents.OfType(); ----- -[source, csharp] ----- -var bDocuments = result.Documents.OfType(); -var cDocuments = result.Documents.OfType(); -aDocuments.Count().Should().Be(25); -bDocuments.Count().Should().Be(25); -cDocuments.Count().Should().Be(50); ----- -and assume that properties that only exist on the subclass itself are properly filled - -[source, csharp] ----- -aDocuments.Should().OnlyContain(a => a.PropertyOnA > 0); ----- -[source, csharp] ----- -bDocuments.Should().OnlyContain(a => a.PropertyOnB > 0); -cDocuments.Should().OnlyContain(a => a.PropertyOnC > 0); ----- -The more low level concrete type selector can also be specified on scroll - -[source, csharp] ----- -var result = this._client.Scroll(TimeSpan.FromMinutes(1), "scrollid", s => s - .ConcreteTypeSelector((d, h) => h.Type == "a" ? typeof(A) : h.Type == "b" ? typeof(B) : typeof(C)) -); ----- -here for each hit we'll call the delegate with `d` which a dynamic representation of the `_source` -and a typed `h` which represents the encapsulating hit. - -[source, csharp] ----- -result.IsValid.Should().BeTrue(); ----- -Here we assume our response is valid and that we received the 100 documents -we are expecting. Remember `result.Documents` is an `IEnumerable -ISearchResult -` - -[source, csharp] ----- -result.Documents.Count().Should().Be(100); ----- -To prove the returned result set is covariant we filter the documents based on their -actual type and assert the returned subsets are the expected sizes - -[source, csharp] ----- -var aDocuments = result.Documents.OfType(); ----- -[source, csharp] ----- -var bDocuments = result.Documents.OfType(); -var cDocuments = result.Documents.OfType(); -aDocuments.Count().Should().Be(25); -bDocuments.Count().Should().Be(25); -cDocuments.Count().Should().Be(50); ----- -and assume that properties that only exist on the subclass itself are properly filled - -[source, csharp] ----- -aDocuments.Should().OnlyContain(a => a.PropertyOnA > 0); ----- -[source, csharp] ----- -bDocuments.Should().OnlyContain(a => a.PropertyOnB > 0); -cDocuments.Should().OnlyContain(a => a.PropertyOnC > 0); ----- +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[covariant-search-results]] +== Covariant Search Results + +NEST directly supports returning covariant result sets. +Meaning a result can be typed to an interface or base class +but the actual instance type of the result can be that of the subclass directly + +Let's look at an example; Imagine we want to search over multiple types that all implement `ISearchResult` + +[source,csharp] +---- +public interface ISearchResult +{ + string Name { get; set; } +} +---- + +We have three implementations of `ISearchResult` namely `A`, `B` and `C` + +[source,csharp] +---- +public class A : ISearchResult +{ + public string Name { get; set; } + public int PropertyOnA { get; set; } +} + +public class B : ISearchResult +{ + public string Name { get; set; } + public int PropertyOnB { get; set; } +} + +public class C : ISearchResult +{ + public string Name { get; set; } + public int PropertyOnC { get; set; } +} +---- + +=== Using Types + +The most straightforward way to search over multiple types is to +type the response to the parent interface or base class +and pass the actual types we want to search over using `.Type()` + +[source,csharp] +---- +var result = this._client.Search(s => s + .Type(Types.Type(typeof(A), typeof(B), typeof(C))) + .Size(100) +); +---- + +NEST will translate this to a search over `/index/a,b,c/_search`; +hits that have `"_type" : "a"` will be serialized to `A` and so forth + +Here we assume our response is valid and that we received the 100 documents +we are expecting. Remember `result.Documents` is an `IEnumerable` + +[source,csharp] +---- +result.IsValid.Should().BeTrue(); + +result.Documents.Count().Should().Be(100); +---- + +To prove the returned result set is covariant we filter the documents based on their +actual type and assert the returned subsets are the expected sizes + +[source,csharp] +---- +var aDocuments = result.Documents.OfType(); + +var bDocuments = result.Documents.OfType(); +var cDocuments = result.Documents.OfType(); +aDocuments.Count().Should().Be(25); +bDocuments.Count().Should().Be(25); +cDocuments.Count().Should().Be(50); +---- + +and assume that properties that only exist on the subclass itself are properly filled + +[source,csharp] +---- +aDocuments.Should().OnlyContain(a => a.PropertyOnA > 0); + +bDocuments.Should().OnlyContain(a => a.PropertyOnB > 0); +cDocuments.Should().OnlyContain(a => a.PropertyOnC > 0); +---- + +=== Using ConcreteTypeSelector + +A more low level approach is to inspect the hit yourself and determine the CLR type to deserialize to + +[source,csharp] +---- +var result = this._client.Search(s => s + .ConcreteTypeSelector((d, h) => h.Type == "a" ? typeof(A) : h.Type == "b" ? typeof(B) : typeof(C)) + .Size(100) +); +---- + +here for each hit we'll call the delegate passed to `ConcreteTypeSelector where + +* `d` is a representation of the `_source` exposed as a `dynamic` type + +* a typed `h` which represents the encapsulating hit of the source i.e. `Hit` + +Here we assume our response is valid and that we received the 100 documents +we are expecting. Remember `result.Documents` is an `IEnumerable` + +[source,csharp] +---- +result.IsValid.Should().BeTrue(); + +result.Documents.Count().Should().Be(100); +---- + +To prove the returned result set is covariant we filter the documents based on their +actual type and assert the returned subsets are the expected sizes + +[source,csharp] +---- +var aDocuments = result.Documents.OfType(); + +var bDocuments = result.Documents.OfType(); + +var cDocuments = result.Documents.OfType(); + +aDocuments.Count().Should().Be(25); + +bDocuments.Count().Should().Be(25); + +cDocuments.Count().Should().Be(50); +---- + +and assume that properties that only exist on the subclass itself are properly filled + +[source,csharp] +---- +aDocuments.Should().OnlyContain(a => a.PropertyOnA > 0); + +bDocuments.Should().OnlyContain(a => a.PropertyOnB > 0); + +cDocuments.Should().OnlyContain(a => a.PropertyOnC > 0); +---- + +Scroll also supports CovariantSearchResponses + +The Scroll API is a continuation of the previous Search example so Types() are lost. +You can hint at the types using `.CovariantTypes()` + +[source,csharp] +---- +var result = this._client.Scroll(TimeSpan.FromMinutes(60), "scrollId", s => s + .CovariantTypes(Types.Type(typeof(A), typeof(B), typeof(C))) +); +---- + +NEST will translate this to a search over `/index/a,b,c/_search`; +hits that have `"_type" : "a"` will be serialized to `A` and so forth + +Here we assume our response is valid and that we received the 100 documents +we are expecting. Remember `result.Documents` is an `IEnumerable` + +[source,csharp] +---- +result.IsValid.Should().BeTrue(); + +result.Documents.Count().Should().Be(100); +---- + +To prove the returned result set is covariant we filter the documents based on their +actual type and assert the returned subsets are the expected sizes + +[source,csharp] +---- +var aDocuments = result.Documents.OfType(); + +var bDocuments = result.Documents.OfType(); + +var cDocuments = result.Documents.OfType(); + +aDocuments.Count().Should().Be(25); + +bDocuments.Count().Should().Be(25); + +cDocuments.Count().Should().Be(50); +---- + +and assume that properties that only exist on the subclass itself are properly filled + +[source,csharp] +---- +aDocuments.Should().OnlyContain(a => a.PropertyOnA > 0); + +bDocuments.Should().OnlyContain(a => a.PropertyOnB > 0); + +cDocuments.Should().OnlyContain(a => a.PropertyOnC > 0); +---- + +The more low level concrete type selector can also be specified on scroll + +[source,csharp] +---- +var result = this._client.Scroll(TimeSpan.FromMinutes(1), "scrollid", s => s + .ConcreteTypeSelector((d, h) => h.Type == "a" ? typeof(A) : h.Type == "b" ? typeof(B) : typeof(C)) +); +---- + +As before, within the delegate passed to `.ConcreteTypeSelector` + +* `d` is the `_source` typed as `dynamic` + +* `h` is the encapsulating typed hit + +Here we assume our response is valid and that we received the 100 documents +we are expecting. Remember `result.Documents` is an `IEnumerable` + +[source,csharp] +---- +result.IsValid.Should().BeTrue(); + +result.Documents.Count().Should().Be(100); +---- + +To prove the returned result set is covariant we filter the documents based on their +actual type and assert the returned subsets are the expected sizes + +[source,csharp] +---- +var aDocuments = result.Documents.OfType(); + +var bDocuments = result.Documents.OfType(); + +var cDocuments = result.Documents.OfType(); + +aDocuments.Count().Should().Be(25); + +bDocuments.Count().Should().Be(25); + +cDocuments.Count().Should().Be(50); +---- + +and assume that properties that only exist on the subclass itself are properly filled + +[source,csharp] +---- +aDocuments.Should().OnlyContain(a => a.PropertyOnA > 0); + +bDocuments.Should().OnlyContain(a => a.PropertyOnB > 0); + +cDocuments.Should().OnlyContain(a => a.PropertyOnC > 0); +---- + diff --git a/docs/asciidoc/ClientConcepts/HighLevel/Inferrence/DocumentPaths.doc.asciidoc b/docs/asciidoc/client-concepts/high-level/inference/document-paths.asciidoc similarity index 61% rename from docs/asciidoc/ClientConcepts/HighLevel/Inferrence/DocumentPaths.doc.asciidoc rename to docs/asciidoc/client-concepts/high-level/inference/document-paths.asciidoc index f493e031c5e..16fa15261b9 100644 --- a/docs/asciidoc/ClientConcepts/HighLevel/Inferrence/DocumentPaths.doc.asciidoc +++ b/docs/asciidoc/client-concepts/high-level/inference/document-paths.asciidoc @@ -1,105 +1,125 @@ -# DocumentPaths -Many API's in elasticsearch describe a path to a document. In NEST besides generating a constructor that takes -and Index, Type and Id seperately we also generate a constructor taking a DocumentPath that allows you to describe the path -to your document more succintly +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net +:nuget: https://www.nuget.org/packages -Manually newing +[[document-paths]] +== Document Paths + +Many API's in Elasticsearch describe a path to a document. In NEST, besides generating a constructor that takes +and Index, Type and Id seperately, we also generate a constructor taking a `DocumentPath` that allows you to describe the path +to your document more succintly + +=== Creating new instances here we create a new document path based on Project with the id 1 -[source, csharp] +[source,csharp] ---- IDocumentPath path = new DocumentPath(1); ----- -[source, csharp] ----- + Expect("project").WhenSerializing(path.Index); Expect("project").WhenSerializing(path.Type); Expect(1).WhenSerializing(path.Id); ---- + You can still override the inferred index and type name -[source, csharp] +[source,csharp] ---- path = new DocumentPath(1).Type("project1"); ----- -[source, csharp] ----- + Expect("project1").WhenSerializing(path.Type); path = new DocumentPath(1).Index("project1"); Expect("project1").WhenSerializing(path.Index); ---- -there is also a static way to describe such paths -[source, csharp] +and there is also a static way to describe such paths + +[source,csharp] ---- path = DocumentPath.Id(1); ----- -[source, csharp] ----- + Expect("project").WhenSerializing(path.Index); Expect("project").WhenSerializing(path.Type); Expect(1).WhenSerializing(path.Id); -var project = new Project { Name = "hello-world" }; ---- -here we create a new document path based on a Project -[source, csharp] +=== Creating from a document type instance + +if you have an instance of your document you can use it as well generate document paths + +[source,csharp] ---- -IDocumentPath path = new DocumentPath(project); +var project = new Project { Name = "hello-world" }; ---- -[source, csharp] + +here we create a new document path based on the instance of `Project`, project + +[source,csharp] ---- +IDocumentPath path = new DocumentPath(project); + Expect("project").WhenSerializing(path.Index); + Expect("project").WhenSerializing(path.Type); + Expect("hello-world").WhenSerializing(path.Id); ---- + You can still override the inferred index and type name -[source, csharp] +[source,csharp] ---- path = new DocumentPath(project).Type("project1"); ----- -[source, csharp] ----- + Expect("project1").WhenSerializing(path.Type); + path = new DocumentPath(project).Index("project1"); + Expect("project1").WhenSerializing(path.Index); ---- -there is also a static way to describe such paths -[source, csharp] +and again, there is also a static way to describe such paths + +[source,csharp] ---- path = DocumentPath.Id(project); ----- -[source, csharp] ----- + Expect("project").WhenSerializing(path.Index); + Expect("project").WhenSerializing(path.Type); + Expect("hello-world").WhenSerializing(path.Id); + DocumentPath p = project; -var project = new Project { Name = "hello-world" }; ---- -Here we can see and example how DocumentPath helps your describe your requests more tersely -[source, csharp] +=== An example with requests + +[source,csharp] ---- -var request = new IndexRequest(2) { Document = project }; +var project = new Project { Name = "hello-world" }; ---- -[source, csharp] + +we can see an example of how `DocumentPath` helps your describe your requests more tersely + +[source,csharp] ---- +var request = new IndexRequest(2) { Document = project }; + request = new IndexRequest(project) { }; ---- -when comparing with the full blown constructor and passing document manually -DocumentPath -T -'s benefits become apparent. -[source, csharp] +when comparing with the full blown constructor and passing document manually, +`DocumentPath`'s benefits become apparent. + +[source,csharp] ---- request = new IndexRequest(IndexName.From(), TypeName.From(), 2) { - Document = project + Document = project }; ---- + diff --git a/docs/asciidoc/client-concepts/high-level/inference/features-inference.asciidoc b/docs/asciidoc/client-concepts/high-level/inference/features-inference.asciidoc new file mode 100644 index 00000000000..131c375530e --- /dev/null +++ b/docs/asciidoc/client-concepts/high-level/inference/features-inference.asciidoc @@ -0,0 +1,35 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[features-inference]] +== Features Inference + +Some URIs in Elasticsearch take a `Feature` enum. +Within NEST, route values on the URI are represented as classes that implement an interface, `IUrlParameter`. +Since enums _cannot_ implement interfaces in C#, a route parameter that would be of type `Feature` is represented using the `Features` class that +the `Feature` enum implicitly converts to. + +=== Constructor + +Using the `Features` constructor directly is possible but rather involved + +[source,csharp] +---- +Features fieldString = Feature.Mappings | Feature.Aliases; +Expect("_mappings,_aliases") + .WhenSerializing(fieldString); +---- + +=== Implicit conversion + +Here we instantiate a GET index request whichs takes two features, settings and warmers. +Notice how we can use the `Feature` enum directly. + +[source,csharp] +---- +var request = new GetIndexRequest(All, Feature.Settings | Feature.Warmers); +---- + diff --git a/docs/asciidoc/client-concepts/high-level/inference/field-inference.asciidoc b/docs/asciidoc/client-concepts/high-level/inference/field-inference.asciidoc new file mode 100644 index 00000000000..cd12d888522 --- /dev/null +++ b/docs/asciidoc/client-concepts/high-level/inference/field-inference.asciidoc @@ -0,0 +1,486 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[field-inference]] +== Field Inference + +Several places in the Elasticsearch API expect the path to a field from your original source document as a string. +NEST allows you to use C# expressions to strongly type these field path strings. + +These expressions are assigned to a type called `Field` and there are several ways to create an instance of one: + +=== Constructor + +Using the constructor directly is possible but rather involved + +[source,csharp] +---- +var fieldString = new Field { Name = "name" }; +---- + +This is more cumbersome when using C# expressions since they cannot be instantiated easily + +[source,csharp] +---- +Expression> expression = p => p.Name; + +var fieldExpression = Field.Create(expression); +Expect("name") + .WhenSerializing(fieldExpression) + .WhenSerializing(fieldString); +---- + +=== Implicit Conversion + +Therefore you can also implicitly convert strings and expressions to ``Field``s + +[source,csharp] +---- +Field fieldString = "name"; +---- + +but for expressions this is _still_ rather involved + +[source,csharp] +---- +Expression> expression = p => p.Name; + +Field fieldExpression = expression; + +Expect("name") + .WhenSerializing(fieldExpression) + .WhenSerializing(fieldString); +---- + +=== ``Nest.Infer`` + +to ease creating ``Field``s from expressions there is a static `Infer` class you can use + +[source,csharp] +---- +Field fieldString = "name"; +---- + +but for expressions this is still rather involved + +[source,csharp] +---- +var fieldExpression = Infer.Field(p => p.Name); +---- + +this can be even shortened even further using a https://msdn.microsoft.com/en-us/library/sf0df423.aspx#Anchor_0[static import in C# 6] i.e. + `using static Nest.Infer;` + +[source,csharp] +---- +fieldExpression = Field(p => p.Name); +---- + +Now that is much terser then our first example using the constructor! + +[source,csharp] +---- +Expect("name") + .WhenSerializing(fieldString) + .WhenSerializing(fieldExpression); + +fieldString = "name^2.1"; + +fieldString.Boost.Should().Be(2.1); + +fieldExpression = Field(p => p.Name, 2.1); +---- + +Now this is much much terser then our first example using the constructor! + +[source,csharp] +---- +Expect("name^2.1") + .WhenSerializing(fieldString) + .WhenSerializing(fieldExpression); +---- + +=== Field name casing + +By default, NEST will camel-case **all** field names to better align with typical +javascript/json conventions + +using `DefaultFieldNameInferrer()` on ConnectionSettings you can change this behavior + +[source,csharp] +---- +var setup = WithConnectionSettings(s => s.DefaultFieldNameInferrer(p => p.ToUpper())); + +setup.Expect("NAME").WhenSerializing(Field(p => p.Name)); +---- + +However ``string``s are *always* passed along verbatim + +[source,csharp] +---- +setup.Expect("NaMe").WhenSerializing("NaMe"); +---- + +if you want the same behavior for expressions, simply pass a Func to `DefaultFieldNameInferrer` +to make no changes to the name + +[source,csharp] +---- +setup = WithConnectionSettings(s => s.DefaultFieldNameInferrer(p => p)); + +setup.Expect("Name").WhenSerializing(Field(p => p.Name)); +---- + +=== Complex field name expressions + +You can follow your property expression to any depth. Here we are traversing to the ``LeadDeveloper``'s `FirstName` + +[source,csharp] +---- +Expect("leadDeveloper.firstName").WhenSerializing(Field(p => p.LeadDeveloper.FirstName)); +---- + +When dealing with collection indexers, the indexer access is ignored allowing you to traverse into properties of collections + +[source,csharp] +---- +Expect("curatedTags").WhenSerializing(Field(p => p.CuratedTags[0])); +---- + +Similarly, LINQ's `.First()` method also works + +[source,csharp] +---- +Expect("curatedTags").WhenSerializing(Field(p => p.CuratedTags.First())); + +Expect("curatedTags.added").WhenSerializing(Field(p => p.CuratedTags[0].Added)); + +Expect("curatedTags.name").WhenSerializing(Field(p => p.CuratedTags.First().Name)); +---- + +NOTE: Remember, these are _expressions_ and not actual code that will be executed + +An indexer on a dictionary is assumed to describe a property name + +[source,csharp] +---- +Expect("metadata.hardcoded").WhenSerializing(Field(p => p.Metadata["hardcoded"])); + +Expect("metadata.hardcoded.created").WhenSerializing(Field(p => p.Metadata["hardcoded"].Created)); +---- + +A cool feature here is that we'll evaluate variables passed to an indexer + +[source,csharp] +---- +var variable = "var"; + +Expect("metadata.var").WhenSerializing(Field(p => p.Metadata[variable])); + +Expect("metadata.var.created").WhenSerializing(Field(p => p.Metadata[variable].Created)); +---- + +If you are using Elasticearch's {ref_current}/_multi_fields.html[multi_fields], which you really should as they allow +you to analyze a string in a number of different ways, these __"virtual"__ sub fields +do not always map back on to your POCO. By calling `.Suffix()` on expressions, you describe the sub fields that +should be mapped and <> + +[source,csharp] +---- +Expect("leadDeveloper.firstName.raw").WhenSerializing(Field(p => p.LeadDeveloper.FirstName.Suffix("raw"))); + +Expect("curatedTags.raw").WhenSerializing(Field(p => p.CuratedTags[0].Suffix("raw"))); + +Expect("curatedTags.raw").WhenSerializing(Field(p => p.CuratedTags.First().Suffix("raw"))); + +Expect("curatedTags.added.raw").WhenSerializing(Field(p => p.CuratedTags[0].Added.Suffix("raw"))); + +Expect("metadata.hardcoded.raw").WhenSerializing(Field(p => p.Metadata["hardcoded"].Suffix("raw"))); + +Expect("metadata.hardcoded.created.raw").WhenSerializing(Field(p => p.Metadata["hardcoded"].Created.Suffix("raw"))); +---- + +You can even chain `.Suffix()` calls to any depth! + +[source,csharp] +---- +Expect("curatedTags.name.raw.evendeeper").WhenSerializing(Field(p => p.CuratedTags.First().Name.Suffix("raw").Suffix("evendeeper"))); +---- + +Variables passed to suffix will be evaluated as well + +[source,csharp] +---- +var suffix = "unanalyzed"; + +Expect("metadata.var.unanalyzed").WhenSerializing(Field(p => p.Metadata[variable].Suffix(suffix))); + +Expect("metadata.var.created.unanalyzed").WhenSerializing(Field(p => p.Metadata[variable].Created.Suffix(suffix))); +---- + +Suffixes can also be appended to expressions using `.ApplySuffix()`. This is useful in cases where you want to apply the same suffix +to a list of fields. + +Here we have a list of expressions + +[source,csharp] +---- +var expressions = new List>> +{ + p => p.Name, + p => p.Description, + p => p.CuratedTags.First().Name, + p => p.LeadDeveloper.FirstName +}; +---- + +and we want to append the suffix "raw" to each + +[source,csharp] +---- +var fieldExpressions = + expressions.Select>, Field>(e => e.AppendSuffix("raw")).ToList(); + +Expect("name.raw").WhenSerializing(fieldExpressions[0]); + +Expect("description.raw").WhenSerializing(fieldExpressions[1]); + +Expect("curatedTags.name.raw").WhenSerializing(fieldExpressions[2]); + +Expect("leadDeveloper.firstName.raw").WhenSerializing(fieldExpressions[3]); +---- + +=== Annotations + +When using NEST's property attributes you can specify a new name for the properties + +[source,csharp] +---- +public class BuiltIn +{ + [String(Name = "naam")] + public string Name { get; set; } +} +---- + +[source,csharp] +---- +Expect("naam").WhenSerializing(Field(p => p.Name)); +---- + +Starting with NEST 2.x we also ask the serializer if it can resolve the property to a name. +Here we ask the default `JsonNetSerializer` to resolve a property name and it takes +the `JsonPropertyAttribute` into account + +[source,csharp] +---- +public class SerializerSpecific +{ + [JsonProperty("nameInJson")] + public string Name { get; set; } +} +---- + +[source,csharp] +---- +Expect("nameInJson").WhenSerializing(Field(p => p.Name)); +---- + +If both a NEST property attribute and a serializer specific attribute are present on a property, +NEST takes precedence + +[source,csharp] +---- +public class Both +{ + [String(Name = "naam")] + [JsonProperty("nameInJson")] + public string Name { get; set; } +} +---- + +[source,csharp] +---- +Expect("naam").WhenSerializing(Field(p => p.Name)); + +Expect(new +{ + naam = "Martijn Laarman" +}).WhenSerializing(new Both { Name = "Martijn Laarman" }); +---- + +Resolution of field names is cached per connection settings instance. To demonstrate, +take the following simple POCOs + +[source,csharp] +---- +class A { public C C { get; set; } } + +class B { public C C { get; set; } } + +class C { public string Name { get; set; } } +---- + +[source,csharp] +---- +var connectionSettings = TestClient.CreateSettings(forceInMemory: true); + +var client = new ElasticClient(connectionSettings); + +var fieldNameOnA = client.Infer.Field(Field(p => p.C.Name)); + +var fieldNameOnB = client.Infer.Field(Field(p => p.C.Name)); +---- + +Here we have to similary shaped expressions on coming from A and on from B +that will resolve to the same field name, as expected + +[source,csharp] +---- +fieldNameOnA.Should().Be("c.name"); + +fieldNameOnB.Should().Be("c.name"); +---- + +now we create a new connectionsettings with a remap for `C` on class `A` to `"d"` +now when we resolve the field path for property `C` on `A`, it will be different than +for property `C` on `B` + +[source,csharp] +---- +var newConnectionSettings = TestClient.CreateSettings(forceInMemory: true, modifySettings: s => s + .InferMappingFor(m => m + .Rename(p => p.C, "d") + ) +); + +var newClient = new ElasticClient(newConnectionSettings); + +fieldNameOnA = newClient.Infer.Field(Field(p => p.C.Name)); + +fieldNameOnB = newClient.Infer.Field(Field(p => p.C.Name)); + +fieldNameOnA.Should().Be("d.name"); + +fieldNameOnB.Should().Be("c.name"); +---- + +however we didn't break inferrence on the first client instance using its separate connectionsettings + +[source,csharp] +---- +fieldNameOnA = client.Infer.Field(Field(p => p.C.Name)); + +fieldNameOnB = client.Infer.Field(Field(p => p.C.Name)); + +fieldNameOnA.Should().Be("c.name"); + +fieldNameOnB.Should().Be("c.name"); +---- + +To wrap up, the precedence in which field names are inferred is: + +. A hard rename of the property on connection settings using `.Rename()` + +. A NEST property mapping + +. Ask the serializer if the property has a verbatim value e.g it has an explicit JsonPropery attribute. + +. Pass the MemberInfo's Name to the DefaultFieldNameInferrer which by default camelCases + +The following example class will demonstrate this precedence + +[source,csharp] +---- +class Precedence +{ + // Even though this property has a NEST property mapping and a JsonProperty attribute, + // We are going to provide a hard rename for it on ConnectionSettings later that should win. + [String(Name = "renamedIgnoresNest")] + [JsonProperty("renamedIgnoresJsonProperty")] + public string RenamedOnConnectionSettings { get; set; } + + // This property has both a NEST attribute and a JsonProperty, NEST should win. + [String(Name = "nestAtt")] + [JsonProperty("jsonProp")] + public string NestAttribute { get; set; } + + // We should take the json property into account by itself + [JsonProperty("jsonProp")] + public string JsonProperty { get; set; } + + // This property we are going to special case in our custom serializer to resolve to ask + [JsonProperty("dontaskme")] + public string AskSerializer { get; set; } + + // We are going to register a DefaultFieldNameInferrer on ConnectionSettings + // that will uppercase all properties. + public string DefaultFieldNameInferrer { get; set; } +} +---- + +Here we create a custom serializer that renames any property named `AskSerializer` to `ask` + +[source,csharp] +---- +class CustomSerializer : JsonNetSerializer +{ + public CustomSerializer(IConnectionSettingsValues settings) : base(settings) { } + + public override IPropertyMapping CreatePropertyMapping(MemberInfo memberInfo) + { + return memberInfo.Name == nameof(Precedence.AskSerializer) + ? new PropertyMapping { Name = "ask" } + : base.CreatePropertyMapping(memberInfo); + } +} +---- + +here we provide an explicit rename of a property on `ConnectionSettings` using `.Rename()` +and all properties that are not mapped verbatim should be uppercased + +[source,csharp] +---- +var usingSettings = WithConnectionSettings(s => s + + .InferMappingFor(m => m + .Rename(p => p.RenamedOnConnectionSettings, "renamed") + ) + .DefaultFieldNameInferrer(p => p.ToUpperInvariant()) +).WithSerializer(s => new CustomSerializer(s)); + +usingSettings.Expect("renamed").ForField(Field(p => p.RenamedOnConnectionSettings)); + +usingSettings.Expect("nestAtt").ForField(Field(p => p.NestAttribute)); + +usingSettings.Expect("jsonProp").ForField(Field(p => p.JsonProperty)); + +usingSettings.Expect("ask").ForField(Field(p => p.AskSerializer)); + +usingSettings.Expect("DEFAULTFIELDNAMEINFERRER").ForField(Field(p => p.DefaultFieldNameInferrer)); +---- + +The same naming rules also apply when indexing a document + +[source,csharp] +---- +usingSettings.Expect(new [] +{ + "ask", + "DEFAULTFIELDNAMEINFERRER", + "jsonProp", + "nestAtt", + "renamed" +}).AsPropertiesOf(new Precedence +{ + RenamedOnConnectionSettings = "renamed on connection settings", + NestAttribute = "using a nest attribute", + JsonProperty = "the default serializer resolves json property attributes", + AskSerializer = "serializer fiddled with this one", + DefaultFieldNameInferrer = "shouting much?" +}); +---- + diff --git a/docs/asciidoc/client-concepts/high-level/inference/ids-inference.asciidoc b/docs/asciidoc/client-concepts/high-level/inference/ids-inference.asciidoc new file mode 100644 index 00000000000..ad6a0278ee1 --- /dev/null +++ b/docs/asciidoc/client-concepts/high-level/inference/ids-inference.asciidoc @@ -0,0 +1,134 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[ids-inference]] +== Ids Inference + +=== Implicit Conversions + +Several places in the Elasticsearch API expect an `Id` object to be passed. +This is a special box type that you can implicitly convert to from the following types + +* `Int32` + +* `Int64` + +* `String` + +* `Guid` + +Methods that take an `Id` can be passed any of these types and it will be implicitly converted to an `Id` + +[source,csharp] +---- +Id idFromInt = 1; +Id idFromLong = 2L; +Id idFromString = "hello-world"; +Id idFromGuid = new Guid("D70BD3CF-4E38-46F3-91CA-FCBEF29B148E"); +Expect(1).WhenSerializing(idFromInt); +Expect(2).WhenSerializing(idFromLong); +Expect("hello-world").WhenSerializing(idFromString); +Expect("d70bd3cf-4e38-46f3-91ca-fcbef29b148e").WhenSerializing(idFromGuid); +---- + +=== Inferring from a Type + +Sometimes a method takes an object and we need an Id from that object to build up a path. +There is no implicit conversion from any object to Id but we can call `Id.From`. + +Imagine your codebase has the following type that we want to index into Elasticsearch + +[source,csharp] +---- +class MyDTO +{ + public Guid Id { get; set; } + public string Name { get; set; } + public string OtherName { get; set; } +} +---- + +By default NEST will try to find a property called `Id` on the class using reflection +and create a cached fast func delegate based on the properties getter + +[source,csharp] +---- +var dto = new MyDTO +{ + Id = new Guid("D70BD3CF-4E38-46F3-91CA-FCBEF29B148E"), + Name = "x", + OtherName = "y" +}; + +Expect("d70bd3cf-4e38-46f3-91ca-fcbef29b148e").WhenInferringIdOn(dto); +---- + +Using the connection settings you can specify a different property that NEST should use to infer the document Id. +Here we instruct NEST to infer the Id for `MyDTO` based on its `Name` property + +[source,csharp] +---- +WithConnectionSettings(x => x + .InferMappingFor(m => m + .IdProperty(p => p.Name) + ) +).Expect("x").WhenInferringIdOn(dto); +---- + +IMPORTANT: Inference rules are cached __per__ `ConnectionSettings` instance. + +Because the cache is per `ConnectionSettings` instance, we can create another `ConnectionSettings` instance +with different inference rules + +[source,csharp] +---- +WithConnectionSettings(x => x + .InferMappingFor(m => m + .IdProperty(p => p.OtherName) + ) +).Expect("y").WhenInferringIdOn(dto); +---- + +=== Using the `ElasticsearchType` attribute + +Another way is to mark the type with an `ElasticsearchType` attribute, setting `IdProperty` +to the name of the property that should be used for the document id + +[source,csharp] +---- +[ElasticsearchType(IdProperty = nameof(Name))] +class MyOtherDTO +{ + public Guid Id { get; set; } + public string Name { get; set; } + public string OtherName { get; set; } +} +---- + +Now when we infer the id we expect it to be the value of the `Name` property without doing any configuration on the `ConnectionSettings` + +[source,csharp] +---- +var dto = new MyOtherDTO { Id = new Guid("D70BD3CF-4E38-46F3-91CA-FCBEF29B148E"), Name = "x", OtherName = "y" }; + +Expect("x").WhenInferringIdOn(dto); +---- + +=== Using Mapping inference on `ConnectionSettings` + +This attribute *is* cached statically/globally, however an inference rule on the `ConnectionSettings` for the type will +still win over the attribute. Here we demonstrate this by creating a different `ConnectionSettings` instance +that will infer the document id from the property `OtherName`: + +[source,csharp] +---- +WithConnectionSettings(x => x + .InferMappingFor(m => m + .IdProperty(p => p.OtherName) + ) +).Expect("y").WhenInferringIdOn(dto); +---- + diff --git a/docs/asciidoc/client-concepts/high-level/inference/index-name-inference.asciidoc b/docs/asciidoc/client-concepts/high-level/inference/index-name-inference.asciidoc new file mode 100644 index 00000000000..c9d8bc16ff3 --- /dev/null +++ b/docs/asciidoc/client-concepts/high-level/inference/index-name-inference.asciidoc @@ -0,0 +1,192 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[index-name-inference]] +== Index Name Inference + +Many endpoints within the Elasticsearch API expect to receive one or more index names +as part of the request in order to know what index/indices a request should operate on. + +NEST has a number of ways in which an index name can be specified + +=== Default Index name on ConnectionSettings + +A default index name can be specified on `ConnectionSettings` usinf `.DefaultIndex()`. +This is the default index name to use when no other index name can be resolved for a request + +[source,csharp] +---- +var settings = new ConnectionSettings() + .DefaultIndex("defaultindex"); +var resolver = new IndexNameResolver(settings); +var index = resolver.Resolve(); +index.Should().Be("defaultindex"); +---- + +=== Mapping an Index name for POCOs + +A index name can be mapped for CLR types using `.MapDefaultTypeIndices()` on `ConnectionSettings`. + +[source,csharp] +---- +var settings = new ConnectionSettings() + .MapDefaultTypeIndices(m => m + .Add(typeof(Project), "projects") + ); + +var resolver = new IndexNameResolver(settings); + +var index = resolver.Resolve(); + +index.Should().Be("projects"); +---- + +=== Mapping an Index name for POCOs + +An index name for a POCO provided using `.MapDefaultTypeIndices()` **will take precedence** over +the default index name + +[source,csharp] +---- +var settings = new ConnectionSettings() + .DefaultIndex("defaultindex") + .MapDefaultTypeIndices(m => m + .Add(typeof(Project), "projects") + ); + +var resolver = new IndexNameResolver(settings); + +var index = resolver.Resolve(); + +index.Should().Be("projects"); +---- + +=== Explicitly specifying Index name on the request + +For API calls that expect an index name, the index name can be explicitly provided +on the request + +[source,csharp] +---- +Uri requestUri = null; + +var client = TestClient.GetInMemoryClient(s => s + .OnRequestCompleted(r => { requestUri = r.Uri; })); + +var response = client.Search(s => s.Index("some-other-index")); <1> + +requestUri.Should().NotBeNull(); + +requestUri.LocalPath.Should().StartWith("/some-other-index/"); +---- +<1> Provide the index name on the request + +When an index name is provided on a request, it **will take precedence** over the default +index name and any index name specified for the POCO type using `.MapDefaultTypeIndices()` + +[source,csharp] +---- +var client = TestClient.GetInMemoryClient(s => + new ConnectionSettings() + .DefaultIndex("defaultindex") + .MapDefaultTypeIndices(m => m + .Add(typeof(Project), "projects") + ) +); + +var response = client.Search(s => s.Index("some-other-index")); <1> + +response.ApiCall.Uri.Should().NotBeNull(); + +response.ApiCall.Uri.LocalPath.Should().StartWith("/some-other-index/"); +---- +<1> Provide the index name on the request + +=== Naming Conventions + +Index names within Elasticsearch cannot contain upper case letters. +NEST will check the index name at the point at which the index +name needs to be resolved to make a request; if the index name contains +upper case letters, a `ResolveException` will be thrown indicating +the problem and the index name that caused the problem. + +In the following example, we create a connection settings withboth a default index +name and an index name to use for the `Project` type. + +[source,csharp] +---- +var settings = new ConnectionSettings() + .DefaultIndex("Default") + .MapDefaultTypeIndices(m => m + .Add(typeof(Project), "myProjects") + ); + +var resolver = new IndexNameResolver(settings); +---- + +When resolving the index name for the `Project` type, a `ResolveException` +is thrown, indicating that the index name "__myProjects__" contains upper case letters + +[source,csharp] +---- +var e = Assert.Throws(() => resolver.Resolve()); + +e.Message.Should().Be($"Index names cannot contain uppercase characters: myProjects."); +---- + +Similarly, when resolving the index name for the `Tag` type, which will use the default index +name, a `ResolveException` is thrown indicating that the default index name contains upper case +letters + +[source,csharp] +---- +e = Assert.Throws(() => resolver.Resolve()); + +e.Message.Should().Be($"Index names cannot contain uppercase characters: Default."); +---- + +Finally, when resolving an index name from a string, a `ResolveException` will be thrown +if the string contains upper case letters + +[source,csharp] +---- +e = Assert.Throws(() => resolver.Resolve("Foo")); + +e.Message.Should().Be($"Index names cannot contain uppercase characters: Foo."); +---- + +If no index name can be resolved for a request i.e. if + +* no default index name is set on connection settings + +* no index name is mapped for a POCO + +* no index name is explicitly specified on the request + +then a `ResolveException` will be thrown to indicate that the index name is `null` + +[source,csharp] +---- +var settings = new ConnectionSettings(); + +var resolver = new IndexNameResolver(settings); + +var e = Assert.Throws(() => resolver.Resolve()); + +e.Message.Should().Contain("Index name is null"); +---- + +``ResolveException``s bubble out of the client and should be dealt with as <> +similar to `ArgumentException`, `ArgumentOutOfRangeException` and other exceptions that _usually_ indicate +misuse of the client API + +[source,csharp] +---- +var client = TestClient.GetInMemoryClient(s => new ConnectionSettings()); + +var e = Assert.Throws(() => client.Search()); +---- + diff --git a/docs/asciidoc/client-concepts/high-level/inference/indices-paths.asciidoc b/docs/asciidoc/client-concepts/high-level/inference/indices-paths.asciidoc new file mode 100644 index 00000000000..d396901414e --- /dev/null +++ b/docs/asciidoc/client-concepts/high-level/inference/indices-paths.asciidoc @@ -0,0 +1,66 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[indices-paths]] +== Indices paths + +Some API's in elasticsearch take one or many index name or a special "_all" marker to send the request to all the indices +In nest this is encoded using `Indices` + +=== Implicit Conversion + +Several types implicitly convert to `Indices` + +[source,csharp] +---- +Nest.Indices singleIndexFromString = "name"; +Nest.Indices multipleIndicesFromString = "name1, name2"; +Nest.Indices allFromString = "_all"; +Nest.Indices allWithOthersFromString = "_all, name2"; +singleIndexFromString.Match( + all => all.Should().BeNull(), + many => many.Indices.Should().HaveCount(1).And.Contain("name") +); +multipleIndicesFromString.Match( + all => all.Should().BeNull(), + many => many.Indices.Should().HaveCount(2).And.Contain("name2") +); +allFromString.Match( + all => all.Should().NotBeNull(), + many => many.Indices.Should().BeNull() +); +allWithOthersFromString.Match( + all => all.Should().NotBeNull(), + many => many.Indices.Should().BeNull() +); +---- + +=== Using `Nest.Indices` + +To ease creating ``Indice``s from expressions, there is a static `Nest.Indices` class you can use + +[source,csharp] +---- +var all = Nest.Indices.All; <1> + +var many = Nest.Indices.Index("name1", "name2"); <2> + +var manyTyped = Nest.Indices.Index().And(); <3> + +var singleTyped = Nest.Indices.Index(); + +var singleString = Nest.Indices.Index("name1"); + +var invalidSingleString = Nest.Indices.Index("name1, name2"); <4> +---- +<1> Using "_all" indices + +<2> specifying multiple indices using strings + +<3> speciying multiple using types + +<4> **invalid** single index name + diff --git a/docs/asciidoc/client-concepts/high-level/inference/property-inference.asciidoc b/docs/asciidoc/client-concepts/high-level/inference/property-inference.asciidoc new file mode 100644 index 00000000000..287bc88d940 --- /dev/null +++ b/docs/asciidoc/client-concepts/high-level/inference/property-inference.asciidoc @@ -0,0 +1,101 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[property-inference]] +== Property Name Inference + +=== Appending suffixes to a Lambda expression body + +Suffixes can be appended to the body of a lambda expression, useful in cases where +you have a POCO property mapped as a {ref_current}/_multi_fields.html[multi_field] +and want to use strongly typed access based on the property, yet append a suffix to the +generated field name in order to access a particular `multi_field`. + +The `.Suffix()` extension method can be used for this purpose and when serializing expressions suffixed +in this way, the serialized field name resolves to the last token + +[source,csharp] +---- +Expression> expression = p => p.Name.Suffix("raw"); +Expect("raw").WhenSerializing(expression); +---- + +=== Appending suffixes to a Lambda expression + +Alternatively, suffixes can be applied to a lambda expression directly using +the `.ApplySuffix()` extension method. Again, the serialized field name +resolves to the last token + +[source,csharp] +---- +Expression> expression = p => p.Name; + +expression = expression.AppendSuffix("raw"); + +Expect("raw").WhenSerializing(expression); +---- + +=== Naming conventions + +Currently, the name of a field cannot contain a `.` in Elasticsearch due to the potential for ambiguity with +a field that is mapped as a {ref_current}/_multi_fields.html[multi_field]. + +In these cases, NEST allows the call to go to Elasticsearch, deferring the naming conventions to the server side and, +in the case of a `.` in a field name, a `400 Bad Response` is returned with a server error indicating the reason + +[source,csharp] +---- +var createIndexResponse = _client.CreateIndex("random-" + Guid.NewGuid().ToString().ToLowerInvariant(), c => c + .Mappings(m => m + .Map("type-with-dot", mm => mm + .Properties(p => p + .String(s => s + .Name("name-with.dot") + ) + ) + ) + ) +); +---- + +The response is not valid + +[source,csharp] +---- +createIndexResponse.IsValid.Should().BeFalse(); +---- + +`DebugInformation` provides an audit trail of information to help diagnose the issue + +[source,csharp] +---- +createIndexResponse.DebugInformation.Should().NotBeNullOrEmpty(); +---- + +`ServerError` contains information about the response from Elasticsearch + +[source,csharp] +---- +createIndexResponse.ServerError.Should().NotBeNull(); + +createIndexResponse.ServerError.Status.Should().Be(400); + +createIndexResponse.ServerError.Error.Should().NotBeNull(); + +createIndexResponse.ServerError.Error.RootCause.Should().NotBeNullOrEmpty(); + +var rootCause = createIndexResponse.ServerError.Error.RootCause[0]; +---- + +We can see that the underlying reason is a `.` in the field name "name-with.dot" + +[source,csharp] +---- +rootCause.Reason.Should().Be("Field name [name-with.dot] cannot contain '.'"); + +rootCause.Type.Should().Be("mapper_parsing_exception"); +---- + diff --git a/docs/asciidoc/client-concepts/high-level/mapping/auto-map.asciidoc b/docs/asciidoc/client-concepts/high-level/mapping/auto-map.asciidoc new file mode 100644 index 00000000000..8aaa7ca2280 --- /dev/null +++ b/docs/asciidoc/client-concepts/high-level/mapping/auto-map.asciidoc @@ -0,0 +1,1060 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[auto-map]] +== Auto mapping properties + +When creating a mapping (either when creating an index or via the put mapping API), +NEST offers a feature called AutoMap(), which will automagically infer the correct +Elasticsearch datatypes of the POCO properties you are mapping. Alternatively, if +you're using attributes to map your properties, then calling AutoMap() is required +in order for your attributes to be applied. We'll look at examples of both. + +For these examples, we'll define two POCOs, `Company`, which has a name +and a collection of Employees, and `Employee` which has various properties of +different types, and itself has a collection of `Employee` types. + +[source,csharp] +---- +public class Company +{ + public string Name { get; set; } + public List Employees { get; set; } +} + +public class Employee +{ + public string FirstName { get; set; } + public string LastName { get; set; } + public int Salary { get; set; } + public DateTime Birthday { get; set; } + public bool IsManager { get; set; } + public List Employees { get; set; } + public TimeSpan Hours { get; set;} +} +---- + +=== Manual mapping + +To create a mapping for our Company type, we can use the fluent API +and map each property explicitly + +[source,csharp] +---- +var descriptor = new CreateIndexDescriptor("myindex") + .Mappings(ms => ms + .Map(m => m + .Properties(ps => ps + .String(s => s + .Name(c => c.Name) + ) + .Object(o => o + .Name(c => c.Employees) + .Properties(eps => eps + .String(s => s + .Name(e => e.FirstName) + ) + .String(s => s + .Name(e => e.LastName) + ) + .Number(n => n + .Name(e => e.Salary) + .Type(NumberType.Integer) + ) + ) + ) + ) + ) + ); +---- + +This is all fine and dandy and useful for some use cases however in most cases +this can become verbose and wieldy. The majority of the time you simply just want to map *all* +the properties of a POCO in a single go. + +[source,csharp] +---- +var expected = new +{ + mappings = new + { + company = new + { + properties = new + { + name = new + { + type = "string" + }, + employees = new + { + type = "object", + properties = new + { + firstName = new + { + type = "string" + }, + lastName = new + { + type = "string" + }, + salary = new + { + type = "integer" + } + } + } + } + } + } +}; + +Expect(expected).WhenSerializing((ICreateIndexRequest)descriptor); +---- + +=== Simple Automapping + +This is exactly where `AutoMap()` becomes useful. Instead of manually mapping each property, +explicitly, we can instead call `.AutoMap()` for each of our mappings and let NEST do all the work + +[source,csharp] +---- +var descriptor = new CreateIndexDescriptor("myindex") + .Mappings(ms => ms + .Map(m => m.AutoMap()) + .Map(m => m.AutoMap()) + ); +---- + +Observe that NEST has inferred the Elasticsearch types based on the CLR type of our POCO properties. +In this example, + +* Birthday was mapped as a date, + +* Hours was mapped as a long (ticks) + +* IsManager was mapped as a boolean, + +* Salary as an integer + +* Employees as an object +and the remaining string properties as strings. + +[source,csharp] +---- +var expected = new +{ + mappings = new + { + company = new + { + properties = new + { + employees = new + { + properties = new + { + birthday = new + { + type = "date" + }, + employees = new + { + properties = new { }, + type = "object" + }, + firstName = new + { + type = "string" + }, + hours = new + { + type = "long" + }, + isManager = new + { + type = "boolean" + }, + lastName = new + { + type = "string" + }, + salary = new + { + type = "integer" + } + }, + type = "object" + }, + name = new + { + type = "string" + } + } + }, + employee = new + { + properties = new + { + birthday = new + { + type = "date" + }, + employees = new + { + properties = new { }, + type = "object" + }, + firstName = new + { + type = "string" + }, + hours = new + { + type = "long" + }, + isManager = new + { + type = "boolean" + }, + lastName = new + { + type = "string" + }, + salary = new + { + type = "integer" + } + } + } + } +}; + +Expect(expected).WhenSerializing((ICreateIndexRequest)descriptor); +---- + +[[auto-mapping-with-overrides]] +[float] +== Auto mapping with overrides + +In most cases, you'll want to map more than just the vanilla datatypes and also provide +various options for your properties (analyzer to use, whether to enable doc_values, etc...). +In that case, it's possible to use `.AutoMap()` in conjuction with explicitly mapped properties. + +Here we are using `.AutoMap()` to automatically map our company type, but then we're +overriding our employee property and making it a `nested` type, since by default, `.AutoMap()` will infer objects as `object`. + +[source,csharp] +---- +var descriptor = new CreateIndexDescriptor("myindex") + .Mappings(ms => ms + .Map(m => m + .AutoMap() + .Properties(ps => ps + .Nested(n => n + .Name(c => c.Employees) + ) + ) + ) + ); + +var expected = new +{ + mappings = new + { + company = new + { + properties = new + { + name = new + { + type = "string" + }, + employees = new + { + type = "nested" + } + } + } + } +}; + +Expect(expected).WhenSerializing((ICreateIndexRequest)descriptor); +---- + +`.AutoMap()` is __idempotent__; calling it before or after manually +mapped properties should still yield the same results. + +[source,csharp] +---- +descriptor = new CreateIndexDescriptor("myindex") + .Mappings(ms => ms + .Map(m => m + .Properties(ps => ps + .Nested(n => n + .Name(c => c.Employees) + ) + ) + .AutoMap() + ) + ); + +Expect(expected).WhenSerializing((ICreateIndexRequest)descriptor); +---- + +[[attribute-mapping]] +[float] +== Attribute mapping + +It is also possible to define your mappings using attributes on your POCOs. When you +use attributes, you *must* use `.AutoMap()` in order for the attributes to be applied. +Here we define the same two types as before, but this time using attributes to define the mappings. + +[source,csharp] +---- +[ElasticsearchType(Name = "company")] +public class CompanyWithAttributes +{ + [String(Analyzer = "keyword", NullValue = "null", Similarity = SimilarityOption.BM25)] + public string Name { get; set; } + + [String(Name = "office_hours")] + public TimeSpan? HeadOfficeHours { get; set; } + + [Object(Path = "employees", Store = false)] + public List Employees { get; set; } +} + +[ElasticsearchType(Name = "employee")] +public class EmployeeWithAttributes +{ + [String(Name = "first_name")] + public string FirstName { get; set; } + + [String(Name = "last_name")] + public string LastName { get; set; } + + [Number(DocValues = false, IgnoreMalformed = true, Coerce = true)] + public int Salary { get; set; } + + [Date(Format = "MMddyyyy", NumericResolution = NumericResolutionUnit.Seconds)] + public DateTime Birthday { get; set; } + + [Boolean(NullValue = false, Store = true)] + public bool IsManager { get; set; } + + [Nested(Path = "employees")] + [JsonProperty("empl")] + public List Employees { get; set; } +} +---- + +Then we map the types by calling `.AutoMap()` + +[source,csharp] +---- +var descriptor = new CreateIndexDescriptor("myindex") + .Mappings(ms => ms + .Map(m => m.AutoMap()) + .Map(m => m.AutoMap()) + ); + +var expected = new +{ + mappings = new + { + company = new + { + properties = new + { + employees = new + { + path = "employees", + properties = new + { + birthday = new + { + type = "date" + }, + employees = new + { + properties = new { }, + type = "object" + }, + firstName = new + { + type = "string" + }, + hours = new + { + type = "long" + }, + isManager = new + { + type = "boolean" + }, + lastName = new + { + type = "string" + }, + salary = new + { + type = "integer" + } + }, + store = false, + type = "object" + }, + name = new + { + analyzer = "keyword", + null_value = "null", + similarity = "BM25", + type = "string" + }, + office_hours = new + { + type = "string" + } + } + }, + employee = new + { + properties = new + { + birthday = new + { + format = "MMddyyyy", + numeric_resolution = "seconds", + type = "date" + }, + empl = new + { + path = "employees", + properties = new + { + birthday = new + { + type = "date" + }, + employees = new + { + properties = new { }, + type = "object" + }, + firstName = new + { + type = "string" + }, + hours = new + { + type = "long" + }, + isManager = new + { + type = "boolean" + }, + lastName = new + { + type = "string" + }, + salary = new + { + type = "integer" + } + }, + type = "nested" + }, + first_name = new + { + type = "string" + }, + isManager = new + { + null_value = false, + store = true, + type = "boolean" + }, + last_name = new + { + type = "string" + }, + salary = new + { + coerce = true, + doc_values = false, + ignore_malformed = true, + type = "double" + } + } + } + } +}; + +Expect(expected).WhenSerializing((ICreateIndexRequest) descriptor); +---- + +Just as we were able to override the inferred properties in our earlier example, explicit (manual) +mappings also take precedence over attributes. Therefore we can also override any mappings applied +via any attributes defined on the POCO + +[source,csharp] +---- +var descriptor = new CreateIndexDescriptor("myindex") + .Mappings(ms => ms + .Map(m => m + .AutoMap() + .Properties(ps => ps + .Nested(n => n + .Name(c => c.Employees) + ) + ) + ) + .Map(m => m + .AutoMap() + .TtlField(ttl => ttl + .Enable() + .Default("10m") + ) + .Properties(ps => ps + .String(s => s + .Name(e => e.FirstName) + .Fields(fs => fs + .String(ss => ss + .Name("firstNameRaw") + .Index(FieldIndexOption.NotAnalyzed) + ) + .TokenCount(t => t + .Name("length") + .Analyzer("standard") + ) + ) + ) + .Number(n => n + .Name(e => e.Salary) + .Type(NumberType.Double) + .IgnoreMalformed(false) + ) + .Date(d => d + .Name(e => e.Birthday) + .Format("MM-dd-yy") + ) + ) + ) + ); + +var expected = new +{ + mappings = new + { + company = new + { + properties = new + { + employees = new + { + type = "nested" + }, + name = new + { + analyzer = "keyword", + null_value = "null", + similarity = "BM25", + type = "string" + }, + office_hours = new + { + type = "string" + } + } + }, + employee = new + { + _ttl = new + { + enabled = true, + @default = "10m" + }, + properties = new + { + birthday = new + { + format = "MM-dd-yy", + type = "date" + }, + empl = new + { + path = "employees", + properties = new + { + birthday = new + { + type = "date" + }, + employees = new + { + properties = new { }, + type = "object" + }, + firstName = new + { + type = "string" + }, + hours = new + { + type = "long" + }, + isManager = new + { + type = "boolean" + }, + lastName = new + { + type = "string" + }, + salary = new + { + type = "integer" + } + }, + type = "nested" + }, + first_name = new + { + fields = new + { + firstNameRaw = new + { + index = "not_analyzed", + type = "string" + }, + length = new + { + type = "token_count", + analyzer = "standard" + } + }, + type = "string" + }, + isManager = new + { + null_value = false, + store = true, + type = "boolean" + }, + last_name = new + { + type = "string" + }, + salary = new + { + ignore_malformed = false, + type = "double" + } + } + } + } +}; + +Expect(expected).WhenSerializing((ICreateIndexRequest)descriptor); +---- + +[[ignoring-properties]] +[float] +== Ignoring Properties + +Properties on a POCO can be ignored in a few ways: + +* Using the `Ignore` property on a derived `ElasticsearchPropertyAttribute` type applied to the property that should be ignored on the POCO + +* Using the `.InferMappingFor(Func, IClrTypeMapping> selector)` on the connection settings + +* Using an ignore attribute applied to the POCO property that is understood by the `IElasticsearchSerializer` used, and inspected inside of the `CreatePropertyMapping()` on the serializer. In the case of the default `JsonNetSerializer`, this is the Json.NET `JsonIgnoreAttribute` + +This example demonstrates all ways, using the `Ignore` property on the attribute to ignore the property `PropertyToIgnore`, the infer mapping to ignore the +property `AnotherPropertyToIgnore` and the json serializer specific attribute to ignore the property `JsonIgnoredProperty` + +[source,csharp] +---- +[ElasticsearchType(Name = "company")] +public class CompanyWithAttributesAndPropertiesToIgnore +{ + public string Name { get; set; } + + [String(Ignore = true)] + public string PropertyToIgnore { get; set; } + + public string AnotherPropertyToIgnore { get; set; } + + [JsonIgnore] + public string JsonIgnoredProperty { get; set; } +} +---- + +All of the properties except `Name` have been ignored in the mapping + +[source,csharp] +---- +var descriptor = new CreateIndexDescriptor("myindex") + .Mappings(ms => ms + .Map(m => m + .AutoMap() + ) + ); + +var expected = new +{ + mappings = new + { + company = new + { + properties = new + { + name = new + { + type = "string" + } + } + } + } +}; + +var settings = WithConnectionSettings(s => s + .InferMappingFor(i => i + .Ignore(p => p.AnotherPropertyToIgnore) + ) +); + +settings.Expect(expected).WhenSerializing((ICreateIndexRequest)descriptor); +---- + +[[mapping-recursion]] +[float] +== Mapping Recursion + +If you notice in our previous `Company` and `Employee` examples, the `Employee` type is recursive +in that the `Employee` class itself contains a collection of type `Employee`. By default, `.AutoMap()` will only +traverse a single depth when it encounters recursive instances like this. Hence, in the +previous examples, the collection of type `Employee` on the `Employee` class did not get any of its properties mapped. +This is done as a safe-guard to prevent stack overflows and all the fun that comes with +infinite recursion. Additionally, in most cases, when it comes to Elasticsearch mappings, it is +often an edge case to have deeply nested mappings like this. However, you may still have +the need to do this, so you can control the recursion depth of `.AutoMap()`. + +Let's introduce a very simple class, `A`, which itself has a property +Child of type `A`. + +[source,csharp] +---- +public class A +{ + public A Child { get; set; } +} +---- + +By default, `.AutoMap()` only goes as far as depth 1 + +[source,csharp] +---- +var descriptor = new CreateIndexDescriptor("myindex") + .Mappings(ms => ms + .Map(m => m.AutoMap()) + ); +---- + +Thus we do not map properties on the second occurrence of our Child property + +[source,csharp] +---- +var expected = new +{ + mappings = new + { + a = new + { + properties = new + { + child = new + { + properties = new { }, + type = "object" + } + } + } + } +}; + +Expect(expected).WhenSerializing((ICreateIndexRequest) descriptor); +---- + +Now lets specify a maxRecursion of 3 + +[source,csharp] +---- +var withMaxRecursionDescriptor = new CreateIndexDescriptor("myindex") + .Mappings(ms => ms + .Map(m => m.AutoMap(3)) + ); +---- + +`.AutoMap()` has now mapped three levels of our Child property + +[source,csharp] +---- +var expectedWithMaxRecursion = new +{ + mappings = new + { + a = new + { + properties = new + { + child = new + { + type = "object", + properties = new + { + child = new + { + type = "object", + properties = new + { + child = new + { + type = "object", + properties = new + { + child = new + { + type = "object", + properties = new { } + } + } + } + } + } + } + } + } + } + } +}; + +Expect(expectedWithMaxRecursion).WhenSerializing((ICreateIndexRequest) withMaxRecursionDescriptor); +---- + +[source,csharp] +---- +var descriptor = new PutMappingDescriptor().AutoMap(); + +var expected = new +{ + properties = new + { + child = new + { + properties = new { }, + type = "object" + } + } +}; + +Expect(expected).WhenSerializing((IPutMappingRequest)descriptor); + +var withMaxRecursionDescriptor = new PutMappingDescriptor().AutoMap(3); + +var expectedWithMaxRecursion = new +{ + properties = new + { + child = new + { + type = "object", + properties = new + { + child = new + { + type = "object", + properties = new + { + child = new + { + type = "object", + properties = new + { + child = new + { + type = "object", + properties = new { } + } + } + } + } + } + } + } + } +}; + +Expect(expectedWithMaxRecursion).WhenSerializing((IPutMappingRequest)withMaxRecursionDescriptor); +---- + +[[applying-conventions-through-the-visitor-pattern]] +[float] +== Applying conventions through the Visitor pattern + +It is also possible to apply a transformation on all or specific properties. + +AutoMap internally implements the https://en.wikipedia.org/wiki/Visitor_pattern[visitor pattern]. The default visitor, `NoopPropertyVisitor`, +does nothing and acts as a blank canvas for you to implement your own visiting methods. + +For instance, lets create a custom visitor that disables doc values for numeric and boolean types +(Not really a good idea in practice, but let's do it anyway for the sake of a clear example.) + +[source,csharp] +---- +public class DisableDocValuesPropertyVisitor : NoopPropertyVisitor +{ + public override void Visit( + INumberProperty type, + PropertyInfo propertyInfo, + ElasticsearchPropertyAttributeBase attribute) <1> + { + type.DocValues = false; + } + + public override void Visit( + IBooleanProperty type, + PropertyInfo propertyInfo, + ElasticsearchPropertyAttributeBase attribute) <2> + { + type.DocValues = false; + } +} +---- +<1> Override the `Visit` method on `INumberProperty` and set `DocValues = false` + +<2> Similarily, override the `Visit` method on `IBooleanProperty` and set `DocValues = false` + +Now we can pass an instance of our custom visitor to `.AutoMap()` + +[source,csharp] +---- +var descriptor = new CreateIndexDescriptor("myindex") + .Mappings(ms => ms + .Map(m => m.AutoMap(new DisableDocValuesPropertyVisitor())) + ); +---- + +and anytime the client maps a property of the POCO (``Employee`` in this example) as a number (``INumberProperty``) or boolean (``IBooleanProperty``), +it will apply the transformation defined in each `Visit()` call respectively, which in this example +disables {ref_current}/doc-values.html[doc_values]. + +[source,csharp] +---- +var expected = new +{ + mappings = new + { + employee = new + { + properties = new + { + birthday = new + { + type = "date" + }, + employees = new + { + properties = new { }, + type = "object" + }, + firstName = new + { + type = "string" + }, + isManager = new + { + doc_values = false, + type = "boolean" + }, + lastName = new + { + type = "string" + }, + salary = new + { + doc_values = false, + type = "integer" + } + } + } + } +}; +---- + +=== Visiting on ``PropertyInfo`` + +You can even take the visitor approach a step further, and instead of visiting on `IProperty` types, visit +directly on your POCO properties (``PropertyInfo``). As an example, let's create a visitor that maps all CLR types +to an Elasticsearch string (``IStringProperty``). + +[source,csharp] +---- +public class EverythingIsAStringPropertyVisitor : NoopPropertyVisitor +{ + public override IProperty Visit( + PropertyInfo propertyInfo, + ElasticsearchPropertyAttributeBase attribute) => new StringProperty(); +} +---- + +[source,csharp] +---- +var descriptor = new CreateIndexDescriptor("myindex") + .Mappings(ms => ms + .Map(m => m.AutoMap(new EverythingIsAStringPropertyVisitor())) + ); + +var expected = new +{ + mappings = new + { + employee = new + { + properties = new + { + birthday = new + { + type = "string" + }, + employees = new + { + type = "string" + }, + firstName = new + { + type = "string" + }, + isManager = new + { + type = "string" + }, + lastName = new + { + type = "string" + }, + salary = new + { + type = "string" + } + } + } + } +}; +---- + diff --git a/docs/asciidoc/client-concepts/low-level/class.png b/docs/asciidoc/client-concepts/low-level/class.png new file mode 100644 index 00000000000..bbc981cfe7a Binary files /dev/null and b/docs/asciidoc/client-concepts/low-level/class.png differ diff --git a/docs/asciidoc/client-concepts/low-level/connecting.asciidoc b/docs/asciidoc/client-concepts/low-level/connecting.asciidoc new file mode 100644 index 00000000000..506b142afd3 --- /dev/null +++ b/docs/asciidoc/client-concepts/low-level/connecting.asciidoc @@ -0,0 +1,353 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[connecting]] +== Connecting + +Connecting to Elasticsearch with `Elasticsearch.Net` is quite easy and there a few options to suit a number of different use cases. + +[[connection-strategies]] +=== Choosing the right Connection Strategy + +If you simply new an `ElasticLowLevelClient`, it will be a non-failover connection to `http://localhost:9200` + +[source,csharp] +---- +var client = new ElasticLowLevelClient(); +---- + +If your Elasticsearch node does not live at `http://localhost:9200` but instead lives somewhere else, for example, `http://mynode.example.com:8082/apiKey`, then +you will need to pass in some instance of `IConnectionConfigurationValues`. + +The easiest way to do this is: + +[source,csharp] +---- +var node = new Uri("http://mynode.example.com:8082/apiKey"); + +var config = new ConnectionConfiguration(node); + +var client = new ElasticLowLevelClient(config); +---- + +This will still be a non-failover connection, meaning if that `node` goes down the operation will not be retried on any other nodes in the cluster. + +To get a failover connection we have to pass an <> instance instead of a `Uri`. + +[source,csharp] +---- +var node = new Uri("http://mynode.example.com:8082/apiKey"); + +var connectionPool = new SniffingConnectionPool(new[] { node }); + +var config = new ConnectionConfiguration(connectionPool); + +var client = new ElasticLowLevelClient(config); +---- + +Here instead of directly passing `node`, we pass a <> +which will use our `node` to find out the rest of the available cluster nodes. +Be sure to read more about <>. + +=== Configuration Options + +Besides either passing a `Uri` or `IConnectionPool` to `ConnectionConfiguration`, you can also fluently control many more options. For instance: + +[source,csharp] +---- +var node = new Uri("http://mynode.example.com:8082/apiKey"); + +var connectionPool = new SniffingConnectionPool(new[] { node }); + +var config = new ConnectionConfiguration(connectionPool) + .DisableDirectStreaming() <1> + .BasicAuthentication("user", "pass") <1> + .RequestTimeout(TimeSpan.FromSeconds(5)); <1> +---- +<1> Additional options + +The following is a list of available connection configuration options: + +[source,csharp] +---- +var config = new ConnectionConfiguration() + .DisableAutomaticProxyDetection() <1> + .EnableHttpCompression() <2> + .DisableDirectStreaming(); <3> + +var client = new ElasticLowLevelClient(config); + +var result = client.Search>(new { size = 12 }); +---- +<1> Disable automatic proxy detection. Defaults to `true`. + +<2> Enable compressed request and reesponses from Elasticsearch (Note that nodes need to be configured to allow this. See the {ref_current}/modules-http.html[http module settings] for more info). + +<3> By default responses are deserialized directly from the response stream to the object you tell it to. For debugging purposes, it can be very useful to keep a copy of the raw response on the result object, which is what calling this method will do. + +This will only have a value if the client configuration has `DisableDirectStreaming` set + +[source,csharp] +---- +var raw = result.ResponseBodyInBytes; +---- + +Please note that using `.DisableDirectStreaming` only makes sense if you need the mapped response **and** the raw response __at the same time__. +If you need a only `string` or `byte[]` response simply call + +[source,csharp] +---- +var stringResult = client.Search(new { }); +---- + +other configuration options + +[source,csharp] +---- +config = config + .GlobalQueryStringParameters(new NameValueCollection()) <1> + .Proxy(new Uri("http://myproxy"), "username", "pass") <2> + .RequestTimeout(TimeSpan.FromSeconds(4)) <3> + .ThrowExceptions() <4> + .PrettyJson() <5> + .BasicAuthentication("username", "password"); +---- +<1> Allows you to set querystring parameters that have to be added to every request. For instance, if you use a hosted elasticserch provider, and you need need to pass an `apiKey` parameter onto every request. + +<2> Sets proxy information on the connection. + +<3> Sets the global maximum time a connection may take. Please note that this is the request timeout, the builtin .NET `WebRequest` has no way to set connection timeouts (see http://msdn.microsoft.com/en-us/library/system.net.httpwebrequest.timeout(v=vs.110).aspx[the MSDN documentation on `HttpWebRequest.Timeout` Property]). + +<4> As an alternative to the C/go like error checking on `response.IsValid`, you can instead tell the client to <>. + +<5> forces all serialization to be indented and appends `pretty=true` to all the requests so that the responses are indented as well + +NOTE: Basic authentication credentials can alternatively be specified on the node URI directly: + +[source,csharp] +---- +var uri = new Uri("http://username:password@localhost:9200"); + +var settings = new ConnectionConfiguration(uri); +---- + +...but this may become tedious when using connection pooling with multiple nodes. + +[[thrown-exceptions]] +=== Exceptions + +There are three category of exceptions that may be thrown: + +`ElasticsearchClientException`:: +These are known exceptions, either an exception that occurred in the request pipeline +(such as max retries or timeout reached, bad authentication, etc...) or Elasticsearch itself returned an error (could +not parse the request, bad query, missing field, etc...). If it is an Elasticsearch error, the `ServerError` property +on the response will contain the the actual error that was returned. The inner exception will always contain the +root causing exception. + +`UnexpectedElasticsearchClientException`:: +These are unknown exceptions, for instance a response from Elasticsearch not +properly deserialized. These are usually bugs and {github}/issues[should be reported]. This exception also inherits from `ElasticsearchClientException` +so an additional catch block isn't necessary, but can be helpful in distinguishing between the two. + +Development time exceptions:: +These are CLR exceptions like `ArgumentException`, `ArgumentOutOfRangeException`, etc. and other exceptions like `ResolveException` that are thrown when an API in the client is misused. +These should not be handled as you want to know about them during development. + +=== OnRequestCompleted + +You can pass a callback of type `Action` that can eaves drop every time a response (good or bad) is created. +If you have complex logging needs this is a good place to add that in. + +[source,csharp] +---- +var counter = 0; + +var connectionPool = new SingleNodeConnectionPool(new Uri("http://localhost:9200")); + +var settings = new ConnectionSettings(connectionPool, new InMemoryConnection()) + .OnRequestCompleted(r => counter++); + +var client = new ElasticClient(settings); + +client.RootNodeInfo(); + +counter.Should().Be(1); + +client.RootNodeInfoAsync(); + +counter.Should().Be(2); +---- + +`OnRequestCompleted` is called even when an exception is thrown + +[source,csharp] +---- +var counter = 0; + +var client = TestClient.GetFixedReturnClient(new { }, 500, s => s + .ThrowExceptions() + .OnRequestCompleted(r => counter++) +); + +Assert.Throws(() => client.RootNodeInfo()); + +counter.Should().Be(1); + +Assert.ThrowsAsync(() => client.RootNodeInfoAsync()); + +counter.Should().Be(2); +---- + +[[complex-logging]] +=== Complex logging with OnRequestCompleted + +Here's an example of using `OnRequestCompleted()` for complex logging. Remember, if you would also like +to capture the request and/or response bytes, you also need to set `.DisableDirectStreaming()` to `true` + +[source,csharp] +---- +var list = new List(); + +var connectionPool = new SingleNodeConnectionPool(new Uri("http://localhost:9200")); + +var settings = new ConnectionSettings(connectionPool, new InMemoryConnection()) <1> + .DefaultIndex("default-index") + .DisableDirectStreaming() + .OnRequestCompleted(response => + { + // log out the request and the request body, if available + if (response.RequestBodyInBytes != null) + { + list.Add( + $"{response.HttpMethod} {response.Uri} \n" + + $"{Encoding.UTF8.GetString(response.RequestBodyInBytes)}"); + } + else + { + list.Add($"{response.HttpMethod} {response.Uri}"); + } + + // log out the response and the response body, if available + if (response.ResponseBodyInBytes != null) + { + list.Add($"Status: {response.HttpStatusCode}\n" + + $"{Encoding.UTF8.GetString(response.ResponseBodyInBytes)}\n" + + $"{new string('-', 30)}\n"); + } + else + { + list.Add($"Status: {response.HttpStatusCode}\n" + + $"{new string('-', 30)}\n"); + } + }); + +var client = new ElasticClient(settings); + +var syncResponse = client.Search(s => s + .AllTypes() + .AllIndices() + .Scroll("2m") + .Sort(ss => ss + .Ascending(SortSpecialField.DocumentIndexOrder) + ) +); + +list.Count.Should().Be(2); + +var asyncResponse = await client.SearchAsync(s => s + .AllTypes() + .AllIndices() + .Scroll("2m") + .Sort(ss => ss + .Ascending(SortSpecialField.DocumentIndexOrder) + ) +); + +list.Count.Should().Be(4); + +list.ShouldAllBeEquivalentTo(new [] + { + "POST http://localhost:9200/_search?scroll=2m \n{\"sort\":[{\"_doc\":{\"order\":\"asc\"}}]}", + "Status: 200\n------------------------------\n", + "POST http://localhost:9200/_search?scroll=2m \n{\"sort\":[{\"_doc\":{\"order\":\"asc\"}}]}", + "Status: 200\n------------------------------\n" + }); +---- +<1> Here we use `InMemoryConnection`; in reality you would use another type of `IConnection` that actually makes a request. + +[[configuring-ssl]] +=== Configuring SSL + +SSL must be configured outside of the client using .NET's http://msdn.microsoft.com/en-us/library/system.net.servicepointmanager%28v=vs.110%29.aspx[ServicePointManager] +class and setting the http://msdn.microsoft.com/en-us/library/system.net.servicepointmanager.servercertificatevalidationcallback.aspx[ServerCertificateValidationCallback] +property. + +The bare minimum to make .NET accept self-signed SSL certs that are not in the Window's CA store would be to have the callback simply return `true`: + +[source,csharp] +---- +ServicePointManager.ServerCertificateValidationCallback += (sender, cert, chain, errors) => true; +---- + +However, this will accept **all** requests from the AppDomain to untrusted SSL sites, +therefore **we recommend doing some minimal introspection on the passed in certificate.** + +IMPORTANT: Using `ServicePointManager` does not work on **Core CLR** as the request does not go through `ServicePointManager`; please file an {github}/issues[issue] if you need support for certificate validation on Core CLR. + +=== Overriding default Json.NET behavior + +Overriding the default Json.NET behaviour in NEST is an expert behavior but if you need to get to the nitty gritty, this can be really useful. +First, create a subclass of the `JsonNetSerializer` + +Override ModifyJsonSerializerSettings if you need access to `JsonSerializerSettings` + +You can inject contract resolved converters by implementing the ContractConverters property +This can be much faster then registering them on `JsonSerializerSettings.Converters` + +[source,csharp] +---- +public class MyJsonNetSerializer : JsonNetSerializer +{ + public MyJsonNetSerializer(IConnectionSettingsValues settings) : base(settings) { } +public int CallToModify { get; set; } = 0; + protected override void ModifyJsonSerializerSettings(JsonSerializerSettings settings) => ++CallToModify; +public int CallToContractConverter { get; set; } = 0; + protected override IList> ContractConverters => new List> + { + t => { + CallToContractConverter++; + return null; + } + }; + +} +---- + +You can then register a factory on `ConnectionSettings` to create an instance of your subclass instead. +This is **_called once per instance_** of ConnectionSettings. + +[source,csharp] +---- +var connectionPool = new SingleNodeConnectionPool(new Uri("http://localhost:9200")); + +var settings = new ConnectionSettings(connectionPool, new InMemoryConnection(), s => new MyJsonNetSerializer(s)); + +var client = new ElasticClient(settings); + +client.RootNodeInfo(); + +client.RootNodeInfo(); + +var serializer = ((IConnectionSettingsValues)settings).Serializer as MyJsonNetSerializer; + +serializer.CallToModify.Should().BeGreaterThan(0); + +serializer.SerializeToString(new Project { }); + +serializer.CallToContractConverter.Should().BeGreaterThan(0); +---- + diff --git a/docs/asciidoc/client-concepts/low-level/lifetimes.asciidoc b/docs/asciidoc/client-concepts/low-level/lifetimes.asciidoc new file mode 100644 index 00000000000..a7808a47ab8 --- /dev/null +++ b/docs/asciidoc/client-concepts/low-level/lifetimes.asciidoc @@ -0,0 +1,93 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[lifetimes]] +== Lifetimes + +If you are using an IOC container its always useful to know the best practices around the lifetime of your objects + +In general we advise folks to register their ElasticClient instances as singletons. The client is thread safe +so sharing an instance between threads is fine. + +Zooming in however the actual moving part that benefits the most from being static for most of the duration of your +application is `ConnectionSettings`; caches are __per__ `ConnectionSettings`. + +In some applications it could make perfect sense to have multiple singleton `ElasticClient`'s registered with different +connection settings. e.g if you have 2 functionally isolated Elasticsearch clusters. + +NOTE: Due to the semantic versioning of Elasticsearch.Net and NEST and their alignment to versions of Elasticsearch, all instances of `ElasticClient` and +Elasticsearch clusters that are connected to must be on the **same major version** i.e. it is not possible to have both an `ElasticClient` to connect to +Elasticsearch 1.x _and_ 2.x in the same application as the former would require NEST 1.x and the latter, NEST 2.x. + +Let's demonstrate which components are disposed by creating our own derived `ConnectionSettings`, `IConnectionPool` and `IConnection` types + +[source,csharp] +---- +class AConnectionSettings : ConnectionSettings +{ + public AConnectionSettings(IConnectionPool pool, IConnection connection) + : base(pool, connection) + { } + public bool IsDisposed { get; private set; } + protected override void DisposeManagedResources() + { + this.IsDisposed = true; + base.DisposeManagedResources(); + } +} + +class AConnectionPool : SingleNodeConnectionPool +{ + public AConnectionPool(Uri uri, IDateTimeProvider dateTimeProvider = null) : base(uri, dateTimeProvider) { } + + public bool IsDisposed { get; private set; } + protected override void DisposeManagedResources() + { + this.IsDisposed = true; + base.DisposeManagedResources(); + } +} + +class AConnection : InMemoryConnection +{ + public bool IsDisposed { get; private set; } + protected override void DisposeManagedResources() + { + this.IsDisposed = true; + base.DisposeManagedResources(); + } +} +---- + +`ConnectionSettings`, `IConnectionPool` and `IConnection` all explictily implement `IDisposable` + +[source,csharp] +---- +var connection = new AConnection(); +var connectionPool = new AConnectionPool(new Uri("http://localhost:9200")); +var settings = new AConnectionSettings(connectionPool, connection); +settings.IsDisposed.Should().BeFalse(); +connectionPool.IsDisposed.Should().BeFalse(); +connection.IsDisposed.Should().BeFalse(); +---- + +Disposing `ConnectionSettings` will dispose the `IConnectionPool` and `IConnection` it has a hold of + +[source,csharp] +---- +var connection = new AConnection(); + +var connectionPool = new AConnectionPool(new Uri("http://localhost:9200")); + +var settings = new AConnectionSettings(connectionPool, connection); + +settings.IsDisposed.Should().BeTrue(); + +connectionPool.IsDisposed.Should().BeTrue(); + +connection.IsDisposed.Should().BeTrue(); +---- + diff --git a/docs/asciidoc/ClientConcepts/LowLevel/pipeline.png b/docs/asciidoc/client-concepts/low-level/pipeline.png similarity index 100% rename from docs/asciidoc/ClientConcepts/LowLevel/pipeline.png rename to docs/asciidoc/client-concepts/low-level/pipeline.png diff --git a/docs/asciidoc/ClientConcepts/LowLevel/PostData.doc.asciidoc b/docs/asciidoc/client-concepts/low-level/post-data.asciidoc similarity index 54% rename from docs/asciidoc/ClientConcepts/LowLevel/PostData.doc.asciidoc rename to docs/asciidoc/client-concepts/low-level/post-data.asciidoc index b174112033f..0f47a863d7d 100644 --- a/docs/asciidoc/ClientConcepts/LowLevel/PostData.doc.asciidoc +++ b/docs/asciidoc/client-concepts/low-level/post-data.asciidoc @@ -1,37 +1,69 @@ -# Post data -The low level allows you to post a string, byte[] array directly. On top of this if you pass a list of strings or objects -they will be serialized in Elasticsearch's special bulk/multi format. +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current +:github: https://github.com/elastic/elasticsearch-net -Even though the argument for postData on the low level client takes a PostData -You can rely on C# implicit conversion to abstract the notion of PostData completely. -You can implicitly convert from the following types. +:nuget: https://www.nuget.org/packages -[source, csharp] +[[post-data]] +== Post data + +The low level client allows you to post a `string` or `byte[]` array directly. On top of this, +if you pass a collection of ``string``s or ``object``s they will be serialized +using Elasticsearch's special bulk/multi format. + +Even though the argument for PostData on the low level client takes a `PostData`, +You can rely on implicit conversion to abstract the notion of PostData completely. +You can implicitly convert from the following types + +* `string` + +* `byte[]` + +* collection of `string` + +* collection of `object` + +* `object` + +[source,csharp] ---- var fromString = ImplicitlyConvertsFrom(@string); ----- -[source, csharp] ----- + var fromByteArray = ImplicitlyConvertsFrom(bytes); var fromListOfString = ImplicitlyConvertsFrom(listOfStrings); var fromListOfObject = ImplicitlyConvertsFrom(listOfObjects); var fromObject = ImplicitlyConvertsFrom(@object); ---- -postData Bytes will always be set if it originated from a byte -[source, csharp] +PostData bytes will always be set if it originated from `byte[]` + +[source,csharp] ---- fromByteArray.WrittenBytes.Should().BeSameAs(bytes); ----- -[source, csharp] ----- + fromString.Type.Should().Be(PostType.LiteralString); fromByteArray.Type.Should().Be(PostType.ByteArray); fromListOfString.Type.Should().Be(PostType.EnumerableOfString); fromListOfObject.Type.Should().Be(PostType.EnumerableOfObject); fromObject.Type.Should().Be(PostType.Serializable); +---- + +and passing a `PostData` object to a method taking `PostData` should not wrap + +[source,csharp] +---- fromString = ImplicitlyConvertsFrom(fromString); +---- + +[source,csharp] +---- +await this.AssertOn(new ConnectionSettings()); + +await this.AssertOn(new ConnectionConfiguration()); +---- + +[source,csharp] +---- fromByteArray = ImplicitlyConvertsFrom(fromByteArray); fromListOfString = ImplicitlyConvertsFrom(fromListOfString); fromListOfObject = ImplicitlyConvertsFrom(fromListOfObject); @@ -41,70 +73,92 @@ fromByteArray.Type.Should().Be(PostType.ByteArray); fromListOfString.Type.Should().Be(PostType.EnumerableOfString); fromListOfObject.Type.Should().Be(PostType.EnumerableOfObject); fromObject.Type.Should().Be(PostType.Serializable); -await this.AssertOn(new ConnectionSettings()); -await this.AssertOn(new ConnectionConfiguration()); ---- + Although each implicitly types behaves slightly differently -[source, csharp] ----- -await Post(()=>@string, writes: Utf8Bytes(@string), storesBytes: true, settings: settings); ----- -[source, csharp] +[source,csharp] ---- -await Post(()=>bytes, writes: bytes, storesBytes: true, settings: settings); +await Post(() => @string, writes: Utf8Bytes(@string), storesBytes: true, settings: settings); + +await Post(() => bytes, writes: bytes, storesBytes: true, settings: settings); ---- + When passing a list of strings we assume its a list of valid serialized json that we join with newlinefeeds making sure there is a trailing linefeed -[source, csharp] +[source,csharp] ---- -await Post(()=>listOfStrings, writes: multiStringJson, storesBytes: true, settings: settings); +await Post(() => listOfStrings, writes: multiStringJson, storesBytes: true, settings: settings); ---- + When passing a list of object we assume its a list of objects we need to serialize individually to json and join with newlinefeeds aking sure there is a trailing linefeed -[source, csharp] +[source,csharp] ---- -await Post(()=>listOfObjects, writes: multiObjectJson, storesBytes: false, settings: settings); +await Post(() => listOfObjects, writes: multiObjectJson, storesBytes: false, settings: settings); ---- + In all other cases postdata is serialized as is. -[source, csharp] +[source,csharp] ---- -await Post(()=>@object, writes: objectJson, storesBytes: false, settings: settings); +await Post(() => @object, writes: objectJson, storesBytes: false, settings: settings); ---- -If you want to maintain a copy of the request that went out use the following settings -[source, csharp] +If you want to maintain a copy of the request that went out, use `DisableDirectStreaming` + +[source,csharp] ---- settings = new ConnectionSettings().DisableDirectStreaming(); ---- -by forcing `DisableDirectStreaming` serializing happens first in a private MemoryStream -so we can get a hold of the serialized bytes -[source, csharp] +by forcing `DisableDirectStreaming` on connection settings, serialization happens first in a private `MemoryStream` +so we can get hold of the serialized bytes + +[source,csharp] ---- -await Post(()=>listOfObjects, writes: multiObjectJson, storesBytes: true, settings: settings); +await Post(() => listOfObjects, writes: multiObjectJson, storesBytes: true, settings: settings); ---- + this behavior can also be observed when serializing a simple object using `DisableDirectStreaming` -[source, csharp] +[source,csharp] ---- -await Post(()=>@object, writes: objectJson, storesBytes: true, settings: settings); +await Post(() => @object, writes: objectJson, storesBytes: true, settings: settings); ---- -[source, csharp] + +[source,csharp] ---- PostAssert(postData(), writes, storesBytes, settings); + await PostAssertAsync(postData(), writes, storesBytes, settings); +---- + +[source,csharp] +---- postData.Write(ms, settings); + var sentBytes = ms.ToArray(); + sentBytes.Should().Equal(writes); + postData.WrittenBytes.Should().NotBeNull(); + postData.WrittenBytes.Should().BeNull(); +---- + +[source,csharp] +---- await postData.WriteAsync(ms, settings); + var sentBytes = ms.ToArray(); + sentBytes.Should().Equal(writes); + postData.WrittenBytes.Should().NotBeNull(); + postData.WrittenBytes.Should().BeNull(); ---- + diff --git a/docs/asciidoc/code-standards/descriptors.asciidoc b/docs/asciidoc/code-standards/descriptors.asciidoc new file mode 100644 index 00000000000..9ac9cf21d9a --- /dev/null +++ b/docs/asciidoc/code-standards/descriptors.asciidoc @@ -0,0 +1,61 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[descriptors]] +== Descriptors + +Every descriptor should inherit from `DescriptorBase`, this hides object members from the fluent interface + +[source,csharp] +---- +var notDescriptors = new[] { typeof(ClusterProcessOpenFileDescriptors).Name, "DescriptorForAttribute" }; +var descriptors = from t in typeof(DescriptorBase<,>).Assembly().Types() + where t.IsClass() + && t.Name.Contains("Descriptor") + && !notDescriptors.Contains(t.Name) + && !t.GetInterfaces().Any(i => i == typeof(IDescriptor)) + select t.FullName; +descriptors.Should().BeEmpty(); +---- + +Methods taking a func should have that func return an interface + +[source,csharp] +---- +var descriptors = + from t in typeof(DescriptorBase<,>).Assembly().Types() + where t.IsClass() && typeof(IDescriptor).IsAssignableFrom(t) + select t; + +var selectorMethods = + from d in descriptors + from m in d.GetMethods() + let parameters = m.GetParameters() + from p in parameters + let type = p.ParameterType + let isGeneric = type.IsGeneric() + where isGeneric + let isFunc = type.GetGenericTypeDefinition() == typeof(Func<,>) + where isFunc + let firstFuncArg = type.GetGenericArguments().First() + let secondFuncArg = type.GetGenericArguments().Last() + let isQueryFunc = firstFuncArg.IsGeneric() && + firstFuncArg.GetGenericTypeDefinition() == typeof(QueryContainerDescriptor<>) && + typeof(QueryContainer).IsAssignableFrom(secondFuncArg) + where !isQueryFunc + let isFluentDictionaryFunc = + firstFuncArg.IsGeneric() && + firstFuncArg.GetGenericTypeDefinition() == typeof(FluentDictionary<,>) && + secondFuncArg.IsGeneric() && + secondFuncArg.GetGenericTypeDefinition() == typeof(FluentDictionary<,>) + where !isFluentDictionaryFunc + let lastArgIsNotInterface = !secondFuncArg.IsInterface() + where lastArgIsNotInterface + select $"{m.Name} on {m.DeclaringType.Name}"; + +selectorMethods.Should().BeEmpty(); +---- + diff --git a/docs/asciidoc/CodeStandards/ElasticClient.doc.asciidoc b/docs/asciidoc/code-standards/elastic-client.asciidoc similarity index 54% rename from docs/asciidoc/CodeStandards/ElasticClient.doc.asciidoc rename to docs/asciidoc/code-standards/elastic-client.asciidoc index d8e7ee25432..7da07cba572 100644 --- a/docs/asciidoc/CodeStandards/ElasticClient.doc.asciidoc +++ b/docs/asciidoc/code-standards/elastic-client.asciidoc @@ -1,37 +1,63 @@ -[source, csharp] +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[elastic-client]] +== Elastic Client + +[source,csharp] ---- -var fluentParametersNotNamedSelector = - from m in typeof (IElasticClient).GetMethods() - from p in m.GetParameters() - where p.ParameterType.BaseType() == typeof (MulticastDelegate) - where !p.Name.Equals("selector") - select $"method '{nameof(IElasticClient)}.{m.Name}' should have parameter name of 'selector' but has a name of '{p.Name}'"; -fluentParametersNotNamedSelector.Should().BeEmpty(); var requestParametersNotNamedRequest = - from m in typeof(IElasticClient).GetMethods() - from p in m.GetParameters() - where typeof(IRequest).IsAssignableFrom(p.ParameterType) - where !p.Name.Equals("request") - select $"method '{nameof(IElasticClient)}.{m.Name}' should have parameter name of 'request' but has a name of '{p.Name}'"; + from m in typeof(IElasticClient).GetMethods() + from p in m.GetParameters() + where typeof(IRequest).IsAssignableFrom(p.ParameterType) + where !p.Name.Equals("request") + select $"method '{nameof(IElasticClient)}.{m.Name}' should have parameter name of 'request' but has a name of '{p.Name}'"; + requestParametersNotNamedRequest.Should().BeEmpty(); +---- + +[source,csharp] +---- var requestParameters = - (from m in typeof(IElasticClient).GetMethods() - from p in m.GetParameters() - where typeof(IRequest).IsAssignableFrom(p.ParameterType) - select p).ToList(); + (from m in typeof(IElasticClient).GetMethods() + from p in m.GetParameters() + where typeof(IRequest).IsAssignableFrom(p.ParameterType) + select p).ToList(); + requestParameter.HasDefaultValue.Should().BeFalse(); +---- + +[source,csharp] +---- var concreteMethodParametersDoNotMatchInterface = new List(); + var interfaceMap = typeof(ElasticClient).GetInterfaceMap(typeof(IElasticClient)); + var indexOfInterfaceMethod = Array.IndexOf(interfaceMap.InterfaceMethods, interfaceMethodInfo); + var concreteMethod = interfaceMap.TargetMethods[indexOfInterfaceMethod]; + var concreteParameters = concreteMethod.GetParameters(); + var interfaceParameters = interfaceMethodInfo.GetParameters(); + var parameterInfo = concreteParameters[i]; + var interfaceParameter = interfaceParameters[i]; + parameterInfo.Name.Should().Be(interfaceParameter.Name); + concreteMethodParametersDoNotMatchInterface.Add( $"'{interfaceParameter.Name}' parameter on concrete implementation of '{nameof(ElasticClient)}.{interfaceMethodInfo.Name}' to {(interfaceParameter.HasDefaultValue ? string.Empty : "NOT")} be optional"); + concreteMethodParametersDoNotMatchInterface.Should().BeEmpty(); +---- + +[source,csharp] +---- var methodGroups = from methodInfo in typeof(IElasticClient).GetMethods() where @@ -42,13 +68,71 @@ var methodGroups = let method = new MethodWithRequestParameter(methodInfo) group method by method.Name into methodGroup select methodGroup; + var parameters = asyncMethod.MethodInfo.GetParameters(); + var syncMethod = methodGroup.First(g => !g.IsAsync && g.MethodType == asyncMethod.MethodType && g.MethodInfo.GetParameters().Length == parameters.Length && (!asyncMethod.MethodInfo.IsGenericMethod || g.MethodInfo.GetGenericArguments().Length == asyncMethod.MethodInfo.GetGenericArguments().Length)); + asyncMethod.Parameter.HasDefaultValue.Should().Be(syncMethod.Parameter.HasDefaultValue, $"sync and async versions of {asyncMethod.MethodType} '{nameof(ElasticClient)}{methodGroup.Key}' should match"); ---- + +[source,csharp] +---- +var fluentParametersNotNamedSelector = + from m in typeof (IElasticClient).GetMethods() + from p in m.GetParameters() + where p.ParameterType.BaseType() == typeof (MulticastDelegate) + where !p.Name.Equals("selector") + select $"method '{nameof(IElasticClient)}.{m.Name}' should have parameter name of 'selector' but has a name of '{p.Name}'"; +fluentParametersNotNamedSelector.Should().BeEmpty(); +---- + +[source,csharp] +---- +private class MethodWithRequestParameter + { + public string Name { get; } + + public MethodInfo MethodInfo { get; } + + public bool IsAsync { get; } + + public ClientMethodType MethodType { get; } + + public ParameterInfo Parameter { get; } + + public MethodWithRequestParameter(MethodInfo methodInfo) + { + Name = methodInfo.Name.EndsWith("Async") + ? methodInfo.Name.Substring(0, methodInfo.Name.Length - "Async".Length) + : methodInfo.Name; + + IsAsync = methodInfo.ReturnType.IsGeneric() && + methodInfo.ReturnType.GetGenericTypeDefinition() == typeof(Task<>); + + MethodInfo = methodInfo; + + var parameterInfo = methodInfo.GetParameters() + .FirstOrDefault(p => typeof(IRequest).IsAssignableFrom(p.ParameterType)); + + if (parameterInfo != null) + { + Parameter = parameterInfo; + MethodType = ClientMethodType.Initializer; + } + else + { + Parameter = methodInfo.GetParameters() + .First(p => p.ParameterType.BaseType() == typeof(MulticastDelegate)); + MethodType = ClientMethodType.Fluent; + } + } + } +---- + diff --git a/docs/asciidoc/code-standards/naming-conventions.asciidoc b/docs/asciidoc/code-standards/naming-conventions.asciidoc new file mode 100644 index 00000000000..a4a0ea64bf0 --- /dev/null +++ b/docs/asciidoc/code-standards/naming-conventions.asciidoc @@ -0,0 +1,130 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[naming-conventions]] +== Naming Conventions + +NEST uses the following naming conventions (with _some_ exceptions). + +=== Class Names + +Abstract class names should end with a `Base` suffix + +[source,csharp] +---- +var exceptions = new[] +{ + typeof(DateMath) +}; +var abstractClasses = typeof(IRequest).Assembly().GetTypes() + .Where(t => t.IsClass() && t.IsAbstract() && !t.IsSealed() && !exceptions.Contains(t)) + .Where(t => !t.Name.Split('`')[0].EndsWith("Base")) + .Select(t => t.Name.Split('`')[0]) + .ToList(); +abstractClasses.Should().BeEmpty(); +---- + +Class names that end with `Base` suffix are abstract + +[source,csharp] +---- +var exceptions = new[] { typeof(DateMath) }; + +var baseClassesNotAbstract = typeof(IRequest).Assembly().GetTypes() + .Where(t => t.IsClass() && !exceptions.Contains(t)) + .Where(t => t.Name.Split('`')[0].EndsWith("Base")) + .Where(t => !t.IsAbstractClass()) + .Select(t => t.Name.Split('`')[0]) + .ToList(); + +baseClassesNotAbstract.Should().BeEmpty(); +---- + +=== Requests and Responses + +Request class names should end with `Request` + +[source,csharp] +---- +var types = typeof(IRequest).Assembly().GetTypes(); + +var requests = types + .Where(t => typeof(IRequest).IsAssignableFrom(t) && !t.IsAbstract()) + .Where(t => !typeof(IDescriptor).IsAssignableFrom(t)) + .Where(t => !t.Name.Split('`')[0].EndsWith("Request")) + .Select(t => t.Name.Split('`')[0]) + .ToList(); + +requests.Should().BeEmpty(); +---- + +Response class names should end with `Response` + +[source,csharp] +---- +var types = typeof(IRequest).Assembly().GetTypes(); + +var responses = types + .Where(t => typeof(IResponse).IsAssignableFrom(t) && !t.IsAbstract()) + .Where(t => !t.Name.Split('`')[0].EndsWith("Response")) + .Select(t => t.Name.Split('`')[0]) + .ToList(); + +responses.Should().BeEmpty(); +---- + +Request and Response class names should be one to one in *most* cases. +e.g. `ValidateRequest` => `ValidateResponse`, and not `ValidateQueryRequest` => `ValidateResponse` +There are a few exceptions to this rule, most notably the `Cat` prefixed requests and +the `Exists` requests. + +[source,csharp] +---- +var exceptions = new[] <1> +{ + typeof(DocumentExistsRequest), + typeof(DocumentExistsRequest<>), + typeof(AliasExistsRequest), + typeof(IndexExistsRequest), + typeof(TypeExistsRequest), + typeof(IndexTemplateExistsRequest), + typeof(SearchExistsRequest), + typeof(SearchExistsRequest<>), + typeof(SearchTemplateRequest), + typeof(SearchTemplateRequest<>), + typeof(ScrollRequest), + typeof(SourceRequest), + typeof(SourceRequest<>), + typeof(ValidateQueryRequest<>), + typeof(GetAliasRequest), +#pragma warning disable 612 + typeof(CatNodeattrsRequest), +#pragma warning restore 612 + typeof(IndicesShardStoresRequest), + typeof(RenderSearchTemplateRequest) +}; + +var types = typeof(IRequest).Assembly().GetTypes(); + +var requests = new HashSet(types + .Where(t => + t.IsClass() && + !t.IsAbstract() && + typeof(IRequest).IsAssignableFrom(t) && + !typeof(IDescriptor).IsAssignableFrom(t) + && !t.Name.StartsWith("Cat") + && !exceptions.Contains(t)) + .Select(t => t.Name.Split('`')[0].Replace("Request", "")) +); + +var responses = types + .Where(t => t.IsClass() && !t.IsAbstract() && typeof(IResponse).IsAssignableFrom(t)) + .Select(t => t.Name.Split('`')[0].Replace("Response", "")); + +requests.Except(responses).Should().BeEmpty(); +---- +<1> _Exceptions to the rule_ + diff --git a/docs/asciidoc/code-standards/queries.asciidoc b/docs/asciidoc/code-standards/queries.asciidoc new file mode 100644 index 00000000000..b78f193173f --- /dev/null +++ b/docs/asciidoc/code-standards/queries.asciidoc @@ -0,0 +1,63 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[queries]] +== Queries + +[source,csharp] +---- +var staticProperties = from p in typeof(Query<>).GetMethods() + let name = p.Name.StartsWith("GeoShape") ? "GeoShape" : p.Name + select name; + +var placeHolders = QueryPlaceHolderProperties.Select(p => p.Name.StartsWith("GeoShape") ? "GeoShape" : p.Name); + +staticProperties.Distinct().Should().Contain(placeHolders.Distinct()); +---- + +[source,csharp] +---- +var fluentMethods = from p in typeof(QueryContainerDescriptor<>).GetMethods() + let name = p.Name.StartsWith("GeoShape") ? "GeoShape" : p.Name + select name; + +var placeHolders = QueryPlaceHolderProperties.Select(p => p.Name.StartsWith("GeoShape") ? "GeoShape" : p.Name); + +fluentMethods.Distinct().Should().Contain(placeHolders.Distinct()); +---- + +[source,csharp] +---- +var skipQueryImplementations = new[] { typeof(IFieldNameQuery), typeof(IFuzzyQuery<,>), typeof(IConditionlessQuery) }; + +var queries = typeof(IQuery).Assembly().ExportedTypes + .Where(t => t.IsInterface() && typeof(IQuery).IsAssignableFrom(t)) + .Where(t => !skipQueryImplementations.Contains(t)) + .ToList(); + +queries.Should().NotBeEmpty(); + +var visitMethods = typeof(IQueryVisitor).GetMethods().Where(m => m.Name == "Visit"); + +visitMethods.Should().NotBeEmpty(); + +var missingTypes = from q in queries + let visitMethod = visitMethods.FirstOrDefault(m => m.GetParameters().First().ParameterType == q) + where visitMethod == null + select q; + +missingTypes.Should().BeEmpty(); +---- + +[source,csharp] +---- +var properties = from p in QueryProperties + let a = p.GetCustomAttributes().Concat(p.GetCustomAttributes()) + where a.Count() != 1 + select p; +properties.Should().BeEmpty(); +---- + diff --git a/docs/asciidoc/CodeStandards/Serialization/Properties.doc.asciidoc b/docs/asciidoc/code-standards/serialization/properties.asciidoc similarity index 76% rename from docs/asciidoc/CodeStandards/Serialization/Properties.doc.asciidoc rename to docs/asciidoc/code-standards/serialization/properties.asciidoc index 7814362db9b..1bfc7353ec6 100644 --- a/docs/asciidoc/CodeStandards/Serialization/Properties.doc.asciidoc +++ b/docs/asciidoc/code-standards/serialization/properties.asciidoc @@ -1,8 +1,15 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current -Our Json.NET contract resolver picks up attributes set on the interface +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages +[[properties]] +== Properties -[source, csharp] +Our Json.NET contract resolver picks up attributes set on the interface + +[source,csharp] ---- var pool = new SingleNodeConnectionPool(new Uri("http://localhost:9200")); var settings = new ConnectionSettings(pool, new InMemoryConnection()); @@ -14,3 +21,4 @@ serialized = c.Serializer.SerializeToString(new AnalysisDescriptor().CharFilters serialized.Should().NotContain("char_filters").And.NotContain("charFilters"); serialized.Should().Contain("char_filter"); ---- + diff --git a/docs/asciidoc/common-options.asciidoc b/docs/asciidoc/common-options.asciidoc new file mode 100644 index 00000000000..b0d00578726 --- /dev/null +++ b/docs/asciidoc/common-options.asciidoc @@ -0,0 +1,24 @@ +:output-dir: common-options + +[[common-options]] += Common Options + +[partintro] +-- +NEST has a number of types for working with Elasticsearch conventions for: + + +* <> + +* <> + +* <> + +-- + +include::{output-dir}/time-unit/time-units.asciidoc[] + +include::{output-dir}/distance-unit/distance-units.asciidoc[] + +include::{output-dir}/date-math/date-math-expressions.asciidoc[] + diff --git a/docs/asciidoc/common-options/date-math/date-math-expressions.asciidoc b/docs/asciidoc/common-options/date-math/date-math-expressions.asciidoc new file mode 100644 index 00000000000..bed4875f759 --- /dev/null +++ b/docs/asciidoc/common-options/date-math/date-math-expressions.asciidoc @@ -0,0 +1,125 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[date-math-expressions]] +== Date Math Expressions + +The date type supports using date math expression when using it in a query/filter +Whenever durations need to be specified, eg for a timeout parameter, the duration can be specified + +The expression starts with an "anchor" date, which can be either now or a date string (in the applicable format) ending with `||`. +It can then follow by a math expression, supporting `+`, `-` and `/` (rounding). +The units supported are + +* `y` (year) + +* `M` (month) + +* `w` (week) + +* `d` (day) + +* `h` (hour) + +* `m` (minute) + +* `s` (second) + +as a whole number representing time in milliseconds, or as a time value like `2d` for 2 days. + +:datemath: {ref_current}/common-options.html#date-math + +Be sure to read the elasticsearch documentation {datemath}[on this subject here] + +=== Simple Expressions + +You can create simple expressions using any of the static methods on `DateMath` + +[source,csharp] +---- +Expect("now").WhenSerializing(Nest.DateMath.Now); + +Expect("2015-05-05T00:00:00").WhenSerializing(Nest.DateMath.Anchored(new DateTime(2015,05, 05))); +---- + +strings implicitly convert to `DateMath` + +[source,csharp] +---- +Expect("now").WhenSerializing("now"); +---- + +but are lenient to bad math expressions + +[source,csharp] +---- +var nonsense = "now||*asdaqwe"; +---- + +the resulting date math will assume the whole string is the anchor + +[source,csharp] +---- +Expect(nonsense).WhenSerializing(nonsense) +.Result(dateMath => ((IDateMath)dateMath) + .Anchor.Match( + d => d.Should().NotBe(default(DateTime)), + s => s.Should().Be(nonsense) + ) + ); +---- + +`DateTime` also implicitly convert to simple date math expressions + +[source,csharp] +---- +var date = new DateTime(2015, 05, 05); +---- + +the anchor will be an actual `DateTime`, even after a serialization/deserialization round trip + +[source,csharp] +---- +Expect("2015-05-05T00:00:00").WhenSerializing(date) +.Result(dateMath => ((IDateMath)dateMath) + . Anchor.Match( + d => d.Should().Be(date), + s => s.Should().BeNull() + ) + ); +---- + +=== Complex Expressions + +Ranges can be chained on to simple expressions + +[source,csharp] +---- +Expect("now+1d").WhenSerializing(Nest.DateMath.Now.Add("1d")); +---- + +Including multiple operations + +[source,csharp] +---- +Expect("now+1d-1m").WhenSerializing(Nest.DateMath.Now.Add("1d").Subtract(TimeSpan.FromMinutes(1))); +---- + +A rounding value can be chained to the end of the expression, after which no more ranges can be appended + +[source,csharp] +---- +Expect("now+1d-1m/d").WhenSerializing(Nest.DateMath.Now.Add("1d").Subtract(TimeSpan.FromMinutes(1)).RoundTo(Nest.TimeUnit.Day)); +---- + +When anchoring dates, a `||` needs to be appended as clear separator between the anchor and ranges. +Again, multiple ranges can be chained + +[source,csharp] +---- +Expect("2015-05-05T00:00:00||+1d-1m").WhenSerializing(Nest.DateMath.Anchored(new DateTime(2015,05,05)).Add("1d").Subtract(TimeSpan.FromMinutes(1))); +---- + diff --git a/docs/asciidoc/common-options/distance-unit/distance-units.asciidoc b/docs/asciidoc/common-options/distance-unit/distance-units.asciidoc new file mode 100644 index 00000000000..c455d46b1ae --- /dev/null +++ b/docs/asciidoc/common-options/distance-unit/distance-units.asciidoc @@ -0,0 +1,124 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[distance-units]] +== Distance Units + +Whenever distances need to be specified, e.g. for a {ref_current}/query-dsl-geo-distance-query.html[geo distance query], +the distance unit can be specified as a double number representing distance in meters, as a new instance of +a `Distance`, or as a string of the form number and distance unit e.g. "`2.72km`" + +=== Using Distance units in NEST + +NEST uses `Distance` to strongly type distance units and there are several ways to construct one. + +==== Constructor + +The most straight forward way to construct a `Distance` is through its constructor + +[source,csharp] +---- +var unitComposed = new Distance(25); +var unitComposedWithUnits = new Distance(25, Nest.DistanceUnit.Meters); +---- + +`Distance` serializes to a string composed of a factor and distance unit. +The factor is a double so always has at least one decimal place when serialized + +[source,csharp] +---- +Expect("25.0m") + .WhenSerializing(unitComposed) + .WhenSerializing(unitComposedWithUnits); +---- + +==== Implicit conversion + +Alternatively a distance unit `string` can be assigned to a `Distance`, resulting in an implicit conversion to a new `Distance` instance. +If no `DistanceUnit` is specified, the default distance unit is meters + +[source,csharp] +---- +Distance distanceString = "25"; + +Distance distanceStringWithUnits = "25m"; + +Expect(new Distance(25)) + .WhenSerializing(distanceString) + .WhenSerializing(distanceStringWithUnits); +---- + +==== Supported units + +A number of distance units are supported, from millimeters to nautical miles + +===== Metric + +`mm` (Millimeters) + +[source,csharp] +---- +Expect("2.0mm").WhenSerializing(new Distance(2, Nest.DistanceUnit.Millimeters)); +---- + +`cm` (Centimeters) + +[source,csharp] +---- +Expect("123.456cm").WhenSerializing(new Distance(123.456, Nest.DistanceUnit.Centimeters)); +---- + +`m` (Meters) + +[source,csharp] +---- +Expect("400.0m").WhenSerializing(new Distance(400, Nest.DistanceUnit.Meters)); +---- + +`km` (Kilometers) + +[source,csharp] +---- +Expect("0.1km").WhenSerializing(new Distance(0.1, Nest.DistanceUnit.Kilometers)); +---- + +===== Imperial + +`in` (Inches) + +[source,csharp] +---- +Expect("43.23in").WhenSerializing(new Distance(43.23, Nest.DistanceUnit.Inch)); +---- + +`ft` (Feet) + +[source,csharp] +---- +Expect("3.33ft").WhenSerializing(new Distance(3.33, Nest.DistanceUnit.Feet)); +---- + +`yd` (Yards) + +[source,csharp] +---- +Expect("9.0yd").WhenSerializing(new Distance(9, Nest.DistanceUnit.Yards)); +---- + +`mi` (Miles) + +[source,csharp] +---- +Expect("0.62mi").WhenSerializing(new Distance(0.62, Nest.DistanceUnit.Miles)); +---- + +`nmi` or `NM` (Nautical Miles) + +[source,csharp] +---- +Expect("45.5nmi").WhenSerializing(new Distance(45.5, Nest.DistanceUnit.NauticalMiles)); +---- + diff --git a/docs/asciidoc/common-options/time-unit/time-units.asciidoc b/docs/asciidoc/common-options/time-unit/time-units.asciidoc new file mode 100644 index 00000000000..8a968a9c5b3 --- /dev/null +++ b/docs/asciidoc/common-options/time-unit/time-units.asciidoc @@ -0,0 +1,249 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[time-units]] +== Time units + +Whenever durations need to be specified, eg for a timeout parameter, the duration can be specified +as a whole number representing time in milliseconds, or as a time value like `2d` for 2 days. + +=== Using Time units in NEST + +NEST uses `Time` to strongly type this and there are several ways to construct one. + +==== Constructor + +The most straight forward way to construct a `Time` is through its constructor + +[source,csharp] +---- +var unitString = new Time("2d"); +var unitComposed = new Time(2, Nest.TimeUnit.Day); +var unitTimeSpan = new Time(TimeSpan.FromDays(2)); +var unitMilliseconds = new Time(1000 * 60 * 60 * 24 * 2); +---- + +When serializing Time constructed from + +* a string + +* milliseconds (as a double) + +* composition of factor and interval + +* a `TimeSpan` + +the expression will be serialized to a time unit string composed of the factor and interval e.g. `2d` + +[source,csharp] +---- +Expect("2d") + .WhenSerializing(unitString) + .WhenSerializing(unitComposed) + .WhenSerializing(unitTimeSpan) + .WhenSerializing(unitMilliseconds); +---- + +The `Milliseconds` property on `Time` is calculated even when not using the constructor that takes a double + +[source,csharp] +---- +unitMilliseconds.Milliseconds.Should().Be(1000*60*60*24*2); + +unitComposed.Milliseconds.Should().Be(1000*60*60*24*2); +unitTimeSpan.Milliseconds.Should().Be(1000*60*60*24*2); +unitString.Milliseconds.Should().Be(1000*60*60*24*2); +---- + +==== Implicit conversion + +Alternatively to using the constructor, `string`, `TimeSpan` and `double` can be implicitly converted to `Time` + +[source,csharp] +---- +Time oneAndHalfYear = "1.5y"; + +Time twoWeeks = TimeSpan.FromDays(14); + +Time twoDays = 1000*60*60*24*2; + +Expect("1.5y").WhenSerializing(oneAndHalfYear); + +Expect("2w").WhenSerializing(twoWeeks); + +Expect("2d").WhenSerializing(twoDays); +---- + +[source,csharp] +---- +Time oneAndHalfYear = "1.5y"; + +Time twoWeeks = TimeSpan.FromDays(14); + +Time twoDays = 1000*60*60*24*2; +---- + +Milliseconds are calculated even when values are not passed as long + +[source,csharp] +---- +twoWeeks.Milliseconds.Should().BeGreaterThan(1); +---- + +Except when dealing with years or months, whose millsecond value cannot +be calculated *accurately*, since they are not fixed durations. For instance, +30 vs 31 vs 28 days in a month, or 366 vs 365 days in a year. +In this instance, Milliseconds will be -1. + +[source,csharp] +---- +oneAndHalfYear.Milliseconds.Should().Be(-1); +---- + +This allows you to do comparisons on the expressions + +[source,csharp] +---- +oneAndHalfYear.Should().BeGreaterThan(twoWeeks); + +(oneAndHalfYear > twoWeeks).Should().BeTrue(); + +(oneAndHalfYear >= twoWeeks).Should().BeTrue(); + +(twoDays >= new Time("2d")).Should().BeTrue(); + +twoDays.Should().BeLessThan(twoWeeks); + +(twoDays < twoWeeks).Should().BeTrue(); + +(twoDays <= twoWeeks).Should().BeTrue(); + +(twoDays <= new Time("2d")).Should().BeTrue(); +---- + +And assert equality + +[source,csharp] +---- +twoDays.Should().Be(new Time("2d")); + +(twoDays == new Time("2d")).Should().BeTrue(); + +(twoDays != new Time("2.1d")).Should().BeTrue(); + +(new Time("2.1d") == new Time(TimeSpan.FromDays(2.1))).Should().BeTrue(); + +(new Time("1") == new Time(1)).Should().BeTrue(); + +(new Time("-1") == new Time(-1)).Should().BeTrue(); +---- + +=== Time units + +Time units are specified as a union of either a `DateInterval` or `Time`, +both of which implicitly convert to the `Union` of these two. + +[source,csharp] +---- +Expect("month").WhenSerializing>(DateInterval.Month); + +Expect("day").WhenSerializing>(DateInterval.Day); + +Expect("hour").WhenSerializing>(DateInterval.Hour); + +Expect("minute").WhenSerializing>(DateInterval.Minute); + +Expect("quarter").WhenSerializing>(DateInterval.Quarter); + +Expect("second").WhenSerializing>(DateInterval.Second); + +Expect("week").WhenSerializing>(DateInterval.Week); + +Expect("year").WhenSerializing>(DateInterval.Year); + +Expect("2d").WhenSerializing>((Time)"2d"); + +Expect("1.16w").WhenSerializing>((Time)TimeSpan.FromDays(8.1)); +---- + +[source,csharp] +---- +double millisecondsInAMonth = 2592000000; + +Expect("4.29w").WhenSerializing(new Time(millisecondsInAMonth)); + +Expect("8.57w").WhenSerializing(new Time(millisecondsInAMonth * 2)); + +Expect("51.43w").WhenSerializing(new Time(millisecondsInAMonth * 12)); + +Expect("102.86w").WhenSerializing(new Time(millisecondsInAMonth * 24)); +---- + +[source,csharp] +---- +Expect("-1").WhenSerializing(new Time(-1)); + +Expect("-1").WhenSerializing(new Time("-1")); + +Assert( + 1, Nest.TimeUnit.Year, -1, "1y", + new Time(1, Nest.TimeUnit.Year), + new Time("1y") +); + +Assert( + 1, Nest.TimeUnit.Month, -1, "1M", + new Time(1, Nest.TimeUnit.Month), + new Time("1M") +); + +Assert( + 1, Nest.TimeUnit.Week, TimeSpan.FromDays(7).TotalMilliseconds, "1w", + new Time(1, Nest.TimeUnit.Week), + new Time("1w"), + new Time(TimeSpan.FromDays(7).TotalMilliseconds) +); + +Assert( + 1, Nest.TimeUnit.Day, TimeSpan.FromDays(1).TotalMilliseconds, "1d", + new Time(1, Nest.TimeUnit.Day), + new Time("1d"), + new Time(TimeSpan.FromDays(1).TotalMilliseconds) +); + +Assert( + 1, Nest.TimeUnit.Hour, TimeSpan.FromHours(1).TotalMilliseconds, "1h", + new Time(1, Nest.TimeUnit.Hour), + new Time("1h"), + new Time(TimeSpan.FromHours(1).TotalMilliseconds) +); + +Assert( + 1, Nest.TimeUnit.Minute, TimeSpan.FromMinutes(1).TotalMilliseconds, "1m", + new Time(1, Nest.TimeUnit.Minute), + new Time("1m"), + new Time(TimeSpan.FromMinutes(1).TotalMilliseconds) +); + +Assert( + 1, Nest.TimeUnit.Second, TimeSpan.FromSeconds(1).TotalMilliseconds, "1s", + new Time(1, Nest.TimeUnit.Second), + new Time("1s"), + new Time(TimeSpan.FromSeconds(1).TotalMilliseconds) +); +---- + +[source,csharp] +---- +time.Factor.Should().Be(expectedFactor); + +time.Interval.Should().Be(expectedInterval); + +time.Milliseconds.Should().Be(expectedMilliseconds); + +Expect(expectedSerialized).WhenSerializing(time); +---- + diff --git a/docs/asciidoc/connection-pooling.asciidoc b/docs/asciidoc/connection-pooling.asciidoc new file mode 100644 index 00000000000..58bc6ae0e48 --- /dev/null +++ b/docs/asciidoc/connection-pooling.asciidoc @@ -0,0 +1,64 @@ +:output-dir: client-concepts/connection-pooling + +:building-blocks: {output-dir}/building-blocks + +:sniffing: {output-dir}/sniffing + +:pinging: {output-dir}/pinging + +:round-robin: {output-dir}/round-robin + +:failover: {output-dir}/failover + +:max-retries: {output-dir}/max-retries + +:request-overrides: {output-dir}/request-overrides + +:exceptions: {output-dir}/exceptions + +include::{building-blocks}/connection-pooling.asciidoc[] + +include::{building-blocks}/request-pipelines.asciidoc[] + +include::{building-blocks}/transports.asciidoc[] + +include::{building-blocks}/keeping-track-of-nodes.asciidoc[] + +include::{building-blocks}/date-time-providers.asciidoc[] + +include::{sniffing}/on-startup.asciidoc[] + +include::{sniffing}/on-connection-failure.asciidoc[] + +include::{sniffing}/on-stale-cluster-state.asciidoc[] + +include::{sniffing}/role-detection.asciidoc[] + +include::{pinging}/first-usage.asciidoc[] + +include::{pinging}/revival.asciidoc[] + +include::{round-robin}/round-robin.asciidoc[] + +include::{round-robin}/skip-dead-nodes.asciidoc[] + +include::{round-robin}/volatile-updates.asciidoc[] + +include::{failover}/falling-over.asciidoc[] + +include::{max-retries}/respects-max-retry.asciidoc[] + +include::{request-overrides}/disable-sniff-ping-per-request.asciidoc[] + +include::{request-overrides}/request-timeouts-overrides.asciidoc[] + +include::{request-overrides}/respects-max-retry-overrides.asciidoc[] + +include::{request-overrides}/respects-allowed-status-code.asciidoc[] + +include::{request-overrides}/respects-force-node.asciidoc[] + +include::{exceptions}/unexpected-exceptions.asciidoc[] + +include::{exceptions}/unrecoverable-exceptions.asciidoc[] + diff --git a/docs/asciidoc/hadouken-indentation.jpg b/docs/asciidoc/hadouken-indentation.jpg new file mode 100644 index 00000000000..afe03b960d1 Binary files /dev/null and b/docs/asciidoc/hadouken-indentation.jpg differ diff --git a/docs/asciidoc/high-level.asciidoc b/docs/asciidoc/high-level.asciidoc new file mode 100644 index 00000000000..ff18811690f --- /dev/null +++ b/docs/asciidoc/high-level.asciidoc @@ -0,0 +1,66 @@ +:output-dir: client-concepts/high-level + +[[nest]] += Client Concepts - NEST + +[partintro] +-- +The high level client, `ElasticClient`, provides a strongly typed query DSL that maps one-to-one with the Elasticsearch query DSL. + +It can be installed from the Package Manager Console inside Visual Studio using + + +[source,shell] +---- +Install-Package NEST +---- + + +Or by searching for https://www.nuget.org/packages/NEST[NEST] in the Package Manager GUI. + +NEST internally uses and still exposes the low level client, `ElasticLowLevelClient`, from <> via +the `.LowLevel` property on `ElasticClient`. + +There are a number of conventions that NEST uses for inference of + + +* <> + +* <> + +* <> and <> + +* <> + +* <> + +* <> + + +In addition to features such as + + +* <> + +* <> + +-- + +include::{output-dir}/inference/index-name-inference.asciidoc[] + +include::{output-dir}/inference/indices-paths.asciidoc[] + +include::{output-dir}/inference/field-inference.asciidoc[] + +include::{output-dir}/inference/property-inference.asciidoc[] + +include::{output-dir}/inference/ids-inference.asciidoc[] + +include::{output-dir}/inference/document-paths.asciidoc[] + +include::{output-dir}/inference/features-inference.asciidoc[] + +include::{output-dir}/mapping/auto-map.asciidoc[] + +include::{output-dir}/covariant-hits/covariant-search-results.asciidoc[] + diff --git a/docs/asciidoc/index.asciidoc b/docs/asciidoc/index.asciidoc index 873adf55c98..57662ec8c3d 100644 --- a/docs/asciidoc/index.asciidoc +++ b/docs/asciidoc/index.asciidoc @@ -1,53 +1,13 @@ -# Introduction +[[elasticsearch-net-reference]] += Elasticsearch.Net and NEST: the .NET clients -You've reached the documentation page for `Elasticsearch.Net` and `NEST`. The two official .NET clients for Elasticsearch. So why two clients I hear you say? +include::intro.asciidoc[] -`Elasticsearch.Net` is a very low level, dependency free, client that has no opinions about how you build and represent your requests and responses. It has abstracted -enough so that **all** the Elasticsearch API endpoints are represented as methods but not too much to get in the way of how you want to build your json/request/response objects. It also comes with builtin, configurable/overridable, cluster failover retry mechanisms. Elasticsearch is elastic so why not your client? +include::client-concepts.asciidoc[] -`NEST` is a high level client that has the advantage of having mapped all the request and response objects, comes with a strongly typed query DSL that maps 1 to 1 with the Elasticsearch query DSL, and takes advantage of specific .NET features such as covariant results. NEST internally uses, and still exposes, the low level `Elasticsearch.Net` client. +include::common-options.asciidoc[] -Please read the getting started guide for both. - - -## Who's using Nest -* [stackoverflow.com](http://www.stackoverflow.com) (and the rest of the stackexchange family). -* [7digital.com](http://www.7digital.com) (run NEST on mono). -* [rijksmuseum.nl](https://www.rijksmuseum.nl/en) (Elasticsearch is the only datastorage hit for each page). -* [Kiln](http://www.fogcreek.com/kiln/) FogCreek's version control & code review tooling. - They are so pleased with Elasticsearch that [they made a video about how pleased they are!](http://blog.fogcreek.com/kiln-powered-by-elasticsearch/) - -## Other resources - -[@joelabrahamsson](http://twitter.com/joelabrahamsson) wrote a great [intro into elasticsearch on .NET](http://joelabrahamsson.com/entry/extending-aspnet-mvc-music-store-with-elasticsearch) -using NEST. - -Also checkout the [searchbox.io guys](https://searchbox.io/) rocking NEST [on AppHarbor](http://blog.appharbor.com/2012/06/19/searchbox-elasticsearch-is-now-an-add-on) -with their [demo project](https://github.com/searchbox-io/.net-sample) - -## Questions, bugs, comments, requests - -All of these are more then welcome on the github issues pages! We try to to at least reply within the same day. - -We also monitor question tagged with ['nest' on stackoverflow](http://stackoverflow.com/questions/tagged/nest) or -['elasticsearch-net' on stackoverflow](http://stackoverflow.com/questions/tagged/elasticsearch-net) - -# License - -This software is licensed under the Apache 2 license, quoted below. - - Copyright (c) 2014 Elasticsearch - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. +include::query-dsl.asciidoc[] +include::aggregations.asciidoc[] diff --git a/docs/asciidoc/intro.asciidoc b/docs/asciidoc/intro.asciidoc new file mode 100644 index 00000000000..ddc314e19bd --- /dev/null +++ b/docs/asciidoc/intro.asciidoc @@ -0,0 +1,62 @@ +:github: https://github.com/elastic/elasticsearch-net + +:stackoverflow: http://stackoverflow.com + +[[introduction]] +== Introduction + +You've reached the documentation page for `Elasticsearch.Net` and `NEST`. The two official .NET clients for Elasticsearch. So why two clients I hear you say? + +`Elasticsearch.Net` is a very low level, dependency free, client that has no opinions about how you build and represent your requests and responses. It has abstracted +enough so that **all** the Elasticsearch API endpoints are represented as methods but not too much to get in the way of how you want to build your json/request/response objects. It also comes with builtin, configurable/overridable, cluster failover retry mechanisms. Elasticsearch is elastic so why not your client? + +`NEST` is a high level client that has the advantage of having mapped all the request and response objects, comes with a strongly typed query DSL that maps 1 to 1 with the Elasticsearch query DSL, and takes advantage of specific .NET features such as covariant results. NEST internally uses, and still exposes, the low level `Elasticsearch.Net` client. + +Please read the getting started guide for both. + +=== Who's using Nest + +* {stackoverflow}[stackoverflow.com] (and the rest of the stackexchange family). + +* http://www.7digital.com[7digital.com] (run NEST on mono). + +* https://www.rijksmuseum.nl/en[rijksmuseum.nl] (Elasticsearch is the only datastorage hit for each page). + +* http://www.fogcreek.com/kiln/[Kiln] FogCreek's version control & code review tooling. + They are so pleased with Elasticsearch that http://blog.fogcreek.com/kiln-powered-by-elasticsearch/[they made a video about how pleased they are!] + +=== Other resources + +http://twitter.com/joelabrahamsson[@joelabrahamsson] wrote a great http://joelabrahamsson.com/entry/extending-aspnet-mvc-music-store-with-elasticsearch[intro into elasticsearch on .NET] +using NEST. + +Also checkout the https://searchbox.io/[searchbox.io guys] rocking NEST http://blog.appharbor.com/2012/06/19/searchbox-elasticsearch-is-now-an-add-on[on AppHarbor] +with their https://github.com/searchbox-io/.net-sample[demo project] + +=== Questions, bugs, comments, requests + +All of these are more then welcome on the {github}/issues[github issues pages]! We try to at least reply within the same day. + +We also monitor question tagged with {stackoverflow}/questions/tagged/nest['nest' on stackoverflow] or +{stackoverflow}/questions/tagged/elasticsearch-net['elasticsearch-net' on stackoverflow], as well as https://discuss.elastic.co[discussions on our discourse site] + +=== License + +.... +This software is licensed under the Apache 2 license, quoted below. + + Copyright (c) 2014 Elasticsearch + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +.... + diff --git a/docs/asciidoc/low-level.asciidoc b/docs/asciidoc/low-level.asciidoc new file mode 100644 index 00000000000..4abee9ec908 --- /dev/null +++ b/docs/asciidoc/low-level.asciidoc @@ -0,0 +1,31 @@ +:output-dir: client-concepts/low-level + +[[elasticsearch-net]] += Client Concepts - Elasticsearch.Net + +[partintro] +-- +The low level client, `ElasticLowLevelClient`, is a low level, dependency free client that has no +opinions about how you build and represent your requests and responses. + +It can be installed from the Package Manager Console inside Visual Studio using + + +[source,shell] +---- +Install-Package Elasticsearch.Net +---- + + +Or by searching for https://www.nuget.org/packages/Elasticsearch.Net[Elasticsearch.Net] in the Package Manager GUI. + +-- + +include::{output-dir}/connecting.asciidoc[] + +include::{output-dir}/lifetimes.asciidoc[] + +include::{output-dir}/post-data.asciidoc[] + +include::connection-pooling.asciidoc[] + diff --git a/docs/asciidoc/pipeline.png b/docs/asciidoc/pipeline.png new file mode 100644 index 00000000000..b15d2f0f8b6 Binary files /dev/null and b/docs/asciidoc/pipeline.png differ diff --git a/docs/asciidoc/query-dsl-usage.asciidoc b/docs/asciidoc/query-dsl-usage.asciidoc new file mode 100644 index 00000000000..8d752caf371 --- /dev/null +++ b/docs/asciidoc/query-dsl-usage.asciidoc @@ -0,0 +1,134 @@ +:includes-from-dirs: query-dsl/compound,query-dsl/full-text,query-dsl/geo,query-dsl/joining,query-dsl/nest-specific,query-dsl/span,query-dsl/specialized,query-dsl/term-level + +include::../../docs/asciidoc/query-dsl/compound/and/and-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/compound/bool/bool-dsl-complex-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/compound/bool/bool-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/compound/boosting/boosting-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/compound/constant-score/constant-score-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/compound/dismax/dismax-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/compound/filtered/filtered-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/compound/function-score/function-score-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/compound/indices/indices-no-match-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/compound/indices/indices-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/compound/limit/limit-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/compound/not/not-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/compound/or/or-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/full-text/common-terms/common-terms-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/full-text/match/match-phrase-prefix-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/full-text/match/match-phrase-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/full-text/match/match-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/full-text/multi-match/multi-match-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/full-text/query-string/query-string-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/full-text/simple-query-string/simple-query-string-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/geo/bounding-box/geo-bounding-box-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/geo/distance/geo-distance-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/geo/distance-range/geo-distance-range-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/geo/hash-cell/geo-hash-cell-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/geo/polygon/geo-polygon-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/geo/shape/circle/geo-shape-circle-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/geo/shape/envelope/geo-envelope-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/geo/shape/indexed-shape/geo-indexed-shape-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/geo/shape/line-string/geo-line-string-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/geo/shape/multi-line-string/geo-multi-line-string-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/geo/shape/multi-point/geo-multi-point-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/geo/shape/point/geo-point-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/geo/shape/polygon/geo-polygon-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/joining/has-child/has-child-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/joining/has-parent/has-parent-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/joining/nested/nested-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/nest-specific/raw/raw-combine-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/nest-specific/raw/raw-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/span/container/span-containing-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/span/first/span-first-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/span/multi-term/span-multi-term-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/span/near/span-near-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/span/not/span-not-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/span/or/span-or-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/span/term/span-term-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/span/within/span-within-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/specialized/more-like-this/more-like-this-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/specialized/script/script-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/specialized/template/template-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/term-level/exists/exists-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/term-level/fuzzy/fuzzy-date-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/term-level/fuzzy/fuzzy-numeric-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/term-level/fuzzy/fuzzy-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/term-level/ids/ids-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/term-level/missing/missing-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/term-level/prefix/prefix-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/term-level/range/date-range-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/term-level/range/numeric-range-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/term-level/range/term-range-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/term-level/regexp/regexp-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/term-level/term/term-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/term-level/terms/terms-list-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/term-level/terms/terms-lookup-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/term-level/terms/terms-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/term-level/type/type-query-usage.asciidoc[] + +include::../../docs/asciidoc/query-dsl/term-level/wildcard/wildcard-query-usage.asciidoc[] + diff --git a/docs/asciidoc/query-dsl.asciidoc b/docs/asciidoc/query-dsl.asciidoc new file mode 100644 index 00000000000..32d3b0f824a --- /dev/null +++ b/docs/asciidoc/query-dsl.asciidoc @@ -0,0 +1,147 @@ +:output-dir: query-dsl + +[[query-dsl]] += Query DSL + +[partintro] +-- +NEST exposes all of the query DSL endpoints available in Elasticsearch + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +* <> + +-- + +include::{output-dir}/bool-dsl/bool-dsl.asciidoc[] + +include::query-dsl-usage.asciidoc[] + diff --git a/docs/asciidoc/query-dsl/bool-dsl/bool-dsl.asciidoc b/docs/asciidoc/query-dsl/bool-dsl/bool-dsl.asciidoc new file mode 100644 index 00000000000..743350b69eb --- /dev/null +++ b/docs/asciidoc/query-dsl/bool-dsl/bool-dsl.asciidoc @@ -0,0 +1,324 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[bool-queries]] +== Bool Queries + +Writing boolean queries can grow verbose rather quickly when using the query DSL. For example, +take a single {ref_current}/query-dsl-bool-query.html[``bool`` query] with only two clauses + +[source,csharp] +---- +var searchResults = this.Client.Search(s => s + .Query(q => q + .Bool(b => b + .Should( + bs => bs.Term(p => p.Name, "x"), + bs => bs.Term(p => p.Name, "y") + ) + ) + ) +); +---- + +Now, imagine multiple nested bools; you'll realise that this quickly becomes an exercise in _hadouken indenting_ + +[[indent]] +.hadouken indenting +image::hadouken-indentation.jpg[hadouken indenting] + +=== Operator Overloading + +For this reason, NEST introduces **operator overloading** so complex bool queries become easier to write. +The previous example now becomes the following with the fluent API + +[source,csharp] +---- +var searchResults = this.Client.Search(s => s + .Query(q => q.Term(p => p.Name, "x") || q.Term(p => p.Name, "y")) +); +---- + +or, using the object initializer syntax + +[source,csharp] +---- +searchResults = this.Client.Search(new SearchRequest +{ + Query = new TermQuery { Field = "name", Value= "x" } + || new TermQuery { Field = Field(p=>p.Name), Value = "y" } +}); +---- + +A naive implementation of operator overloading would rewrite + +`term && term && term` to + +.... +bool +|___must + |___term + |___bool + |___must + |___term + |___term +.... + +As you can image this becomes unwieldy quite fast the more complex a query becomes NEST can spot these and +join them together to become a single bool query + +.... +bool +|___must + |___term + |___term + |___term +.... + +[source,csharp] +---- +Assert( + q => q.Query() && q.Query() && q.Query(), + Query && Query && Query, + c => c.Bool.Must.Should().HaveCount(3) + ); +---- + +The bool DSL offers also a short hand notation to mark a query as a `must_not` using the `!` operator + +[source,csharp] +---- +Assert(q => !q.Query(), !Query, c => c.Bool.MustNot.Should().HaveCount(1)); +---- + +And to mark a query as a `filter` using the `+` operator + +[source,csharp] +---- +Assert(q => +q.Query(), +Query, c => c.Bool.Filter.Should().HaveCount(1)); +---- + +Both of these can be combined with `&&` to form a single bool query + +[source,csharp] +---- +Assert(q => !q.Query() && !q.Query(), !Query && !Query, c => c.Bool.MustNot.Should().HaveCount(2)); +---- + +[source,csharp] +---- +Assert(q => +q.Query() && +q.Query(), +Query && +Query, c => c.Bool.Filter.Should().HaveCount(2)); +---- + +=== Combining/Merging bool queries + +When combining multiple queries some or all possibly marked as `must_not` or `filter`, NEST still combines to a single bool query + +.... +bool +|___must +| |___term +| |___term +| |___term +| +|___must_not + |___term +.... + +[source,csharp] +---- +Assert( + q => q.Query() && q.Query() && q.Query() && !q.Query(), + Query && Query && Query && !Query, + c=> + { + c.Bool.Must.Should().HaveCount(3); + c.Bool.MustNot.Should().HaveCount(1); + }); + +c.Bool.Must.Should().HaveCount(3); + +c.Bool.MustNot.Should().HaveCount(1); +---- + +Even more involved `term && term && term && !term && +term && +term` still only results in a single `bool` query: + +.... +bool +|___must +| |___term +| |___term +| |___term +| +|___must_not +| |___term +| +|___filter + |___term + |___term +.... + +[source,csharp] +---- +Assert( + q => q.Query() && q.Query() && q.Query() && !q.Query() && +q.Query() && +q.Query(), + Query && Query && Query && !Query && +Query && +Query, + c => + { + c.Bool.Must.Should().HaveCount(3); + c.Bool.MustNot.Should().HaveCount(1); + c.Bool.Filter.Should().HaveCount(2); + }); + +c.Bool.Must.Should().HaveCount(3); + +c.Bool.MustNot.Should().HaveCount(1); + +c.Bool.Filter.Should().HaveCount(2); +---- + +You can still mix and match actual bool queries with the bool DSL e.g `bool(must=term, term, term) && !term` would still merge into a single `bool` query. + +[source,csharp] +---- +Assert( + q => q.Bool(b => b.Must(mq => mq.Query(), mq => mq.Query(), mq => mq.Query())) && !q.Query(), + new BoolQuery { Must = new QueryContainer[] { Query, Query, Query } } && !Query, + c => + { + c.Bool.Must.Should().HaveCount(3); + c.Bool.MustNot.Should().HaveCount(1); + }); + +c.Bool.Must.Should().HaveCount(3); + +c.Bool.MustNot.Should().HaveCount(1); +---- + +[source,csharp] +---- +Assert( + q => q.Query() && (q.Query() || q.Query() || q.Query()), + Query && (Query || Query || Query), + c => + { + c.Bool.Must.Should().HaveCount(2); + var lastClause = c.Bool.Must.Last() as IQueryContainer; + lastClause.Should().NotBeNull(); + lastClause.Bool.Should().NotBeNull(); + lastClause.Bool.Should.Should().HaveCount(3); + }); + +c.Bool.Must.Should().HaveCount(2); + +var lastClause = c.Bool.Must.Last() as IQueryContainer; + +lastClause.Should().NotBeNull(); + +lastClause.Bool.Should().NotBeNull(); + +lastClause.Bool.Should.Should().HaveCount(3); +---- + +TIP: You can add parentheses to force evaluation order + +Also note that using shoulds as boosting factors can be really powerful so if you need this +always remember that you can mix and match an actual bool query with the bool dsl. + +There is another subtle situation where NEST will not blindly merge 2 bool queries with only should clauses. Imagine the following: + +`bool(should=term1, term2, term3, term4, minimum_should_match=2) || term5 || term6` + +if NEST identified both sides of the OR operation as only containing `should` clauses and it would +join them together it would give a different meaning to the `minimum_should_match` parameter of the first boolean query. +Rewriting this to a single bool with 5 `should` clauses would break because only matching on `term5` or `term6` should still be a hit. + +[source,csharp] +---- +Assert( + q => q.Bool(b => b + .Should(mq => mq.Query(), mq => mq.Query(), mq => mq.Query(), mq => mq.Query()) + .MinimumShouldMatch(2) + ) + || !q.Query() || q.Query(), + new BoolQuery + { + Should = new QueryContainer[] { Query, Query, Query, Query }, + MinimumShouldMatch = 2 + } || !Query || Query, + c => + { + c.Bool.Should.Should().HaveCount(3); + var nestedBool = c.Bool.Should.First() as IQueryContainer; + nestedBool.Bool.Should.Should().HaveCount(4); + }); + +c.Bool.Should.Should().HaveCount(3); + +var nestedBool = c.Bool.Should.First() as IQueryContainer; + +nestedBool.Bool.Should.Should().HaveCount(4); +---- + +=== Locked bool queries + +NEST will not combine `bool` queries if any of the query metadata is set e.g if metadata such as `boost` or `name` are set, +NEST will treat these as locked + +Here we demonstrate that two locked `bool` queries are not combined + +[source,csharp] +---- +Assert( + q => q.Bool(b => b.Name("leftBool").Should(mq => mq.Query())) + || q.Bool(b => b.Name("rightBool").Should(mq => mq.Query())), + new BoolQuery { Name = "leftBool", Should = new QueryContainer[] { Query } } + || new BoolQuery { Name = "rightBool", Should = new QueryContainer[] { Query } }, + c => AssertDoesNotJoinOntoLockedBool(c, "leftBool")); +---- + +neither are two `bool` queries where either right query is locked + +[source,csharp] +---- +Assert( + q => q.Bool(b => b.Should(mq => mq.Query())) + || q.Bool(b => b.Name("rightBool").Should(mq => mq.Query())), + new BoolQuery { Should = new QueryContainer[] { Query } } + || new BoolQuery { Name = "rightBool", Should = new QueryContainer[] { Query } }, + c => AssertDoesNotJoinOntoLockedBool(c, "rightBool")); +---- + +or the left query is locked + +[source,csharp] +---- +Assert( + q => q.Bool(b => b.Name("leftBool").Should(mq => mq.Query())) + || q.Bool(b => b.Should(mq => mq.Query())), + new BoolQuery { Name = "leftBool", Should = new QueryContainer[] { Query } } + || new BoolQuery { Should = new QueryContainer[] { Query } }, + c => AssertDoesNotJoinOntoLockedBool(c, "leftBool")); +---- + +[source,csharp] +---- +c.Bool.Should.Should().HaveCount(2); + +var nestedBool = c.Bool.Should.Cast().First(b=>!string.IsNullOrEmpty(b.Bool?.Name)); + +nestedBool.Bool.Should.Should().HaveCount(1); + +nestedBool.Bool.Name.Should().Be(firstName); +---- + +[source,csharp] +---- +assert(fluent.InvokeQuery(new QueryContainerDescriptor())); + +assert((QueryContainer)ois); +---- + diff --git a/docs/asciidoc/query-dsl/bool-dsl/hadouken-indentation.jpg b/docs/asciidoc/query-dsl/bool-dsl/hadouken-indentation.jpg new file mode 100644 index 00000000000..afe03b960d1 Binary files /dev/null and b/docs/asciidoc/query-dsl/bool-dsl/hadouken-indentation.jpg differ diff --git a/docs/asciidoc/query-dsl/bool-dsl/operators/and-operator-usage.asciidoc b/docs/asciidoc/query-dsl/bool-dsl/operators/and-operator-usage.asciidoc new file mode 100644 index 00000000000..26f7525fe1e --- /dev/null +++ b/docs/asciidoc/query-dsl/bool-dsl/operators/and-operator-usage.asciidoc @@ -0,0 +1,96 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[and-operator-usage]] +== And Operator Usage + +[source,csharp] +---- +var lotsOfAnds = Enumerable.Range(0, 100).Aggregate(new QueryContainer(), (q, c) => q && Query, q => q); + +LotsOfAnds(lotsOfAnds); +---- + +[source,csharp] +---- +QueryContainer container = null; + +container &= Query; + +LotsOfAnds(container); +---- + +[source,csharp] +---- +var container = new QueryContainer(); + +container &= Query; + +LotsOfAnds(container); +---- + +[source,csharp] +---- +lotsOfAnds.Should().NotBeNull(); + +lotsOfAnds.Bool.Should().NotBeNull(); + +lotsOfAnds.Bool.Must.Should().NotBeEmpty().And.HaveCount(100); +---- + +[source,csharp] +---- +ReturnsBool(Query && Query, q => q.Query() && q.Query(), b => +{ + b.Must.Should().NotBeEmpty().And.HaveCount(2); + b.Should.Should().BeNull(); + b.MustNot.Should().BeNull(); + b.Filter.Should().BeNull(); +}); +b.Must.Should().NotBeEmpty().And.HaveCount(2); +b.Should.Should().BeNull(); +b.MustNot.Should().BeNull(); +b.Filter.Should().BeNull(); +ReturnsBool(Query && Query && ConditionlessQuery, q => q.Query() && q.Query() && q.ConditionlessQuery(), b => +{ + b.Must.Should().NotBeEmpty().And.HaveCount(2); + b.Should.Should().BeNull(); + b.MustNot.Should().BeNull(); + b.Filter.Should().BeNull(); +}); +b.Must.Should().NotBeEmpty().And.HaveCount(2); +b.Should.Should().BeNull(); +b.MustNot.Should().BeNull(); +b.Filter.Should().BeNull(); +ReturnsSingleQuery(Query && ConditionlessQuery, q => q.Query() && q.ConditionlessQuery(), + c => c.Term.Value.Should().NotBeNull()); +ReturnsSingleQuery(ConditionlessQuery && Query, q => q.ConditionlessQuery() && q.Query(), + c => c.Term.Value.Should().NotBeNull()); +ReturnsSingleQuery(Query && NullQuery, q => q.Query() && q.NullQuery(), + c => c.Term.Value.Should().NotBeNull()); +ReturnsSingleQuery(NullQuery && Query, q=> q.NullQuery() && q.Query(), + c => c.Term.Value.Should().NotBeNull()); +ReturnsSingleQuery(ConditionlessQuery && ConditionlessQuery && ConditionlessQuery && Query, + q => q.ConditionlessQuery() && q.ConditionlessQuery() && q.ConditionlessQuery() && q.Query(), + c => c.Term.Value.Should().NotBeNull()); +ReturnsSingleQuery( + NullQuery && NullQuery && ConditionlessQuery && Query, + q=>q.NullQuery() && q.NullQuery() && q.ConditionlessQuery() && q.Query(), + c => c.Term.Value.Should().NotBeNull()); +ReturnsNull(NullQuery && ConditionlessQuery, q=> q.NullQuery() && q.ConditionlessQuery()); +ReturnsNull(ConditionlessQuery && NullQuery, q=>q.ConditionlessQuery() && q.NullQuery()); +ReturnsNull(ConditionlessQuery && ConditionlessQuery, q=>q.ConditionlessQuery() && q.ConditionlessQuery()); +ReturnsNull( + ConditionlessQuery && ConditionlessQuery && ConditionlessQuery && ConditionlessQuery, + q=>q.ConditionlessQuery() && q.ConditionlessQuery() && q.ConditionlessQuery() && q.ConditionlessQuery() + +); +ReturnsNull( + NullQuery && ConditionlessQuery && ConditionlessQuery && ConditionlessQuery, + q=>q.NullQuery() && q.ConditionlessQuery() && q.ConditionlessQuery() && q.ConditionlessQuery() +); +---- + diff --git a/docs/asciidoc/query-dsl/bool-dsl/operators/not-operator-usage.asciidoc b/docs/asciidoc/query-dsl/bool-dsl/operators/not-operator-usage.asciidoc new file mode 100644 index 00000000000..141d422d7f0 --- /dev/null +++ b/docs/asciidoc/query-dsl/bool-dsl/operators/not-operator-usage.asciidoc @@ -0,0 +1,103 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[not-operator-usage]] +== Not Operator Usage + +[source,csharp] +---- +var lotsOfNots = Enumerable.Range(0, 100).Aggregate(new QueryContainer(), (q, c) => q || Query, q => q); + +LotsOfNots(lotsOfNots); +---- + +[source,csharp] +---- +QueryContainer container = null; + +container |= Query; + +LotsOfNots(container); +---- + +[source,csharp] +---- +var container = new QueryContainer(); + +container |= Query; + +LotsOfNots(container); +---- + +[source,csharp] +---- +lotsOfNots.Should().NotBeNull(); + +lotsOfNots.Bool.Should().NotBeNull(); + +lotsOfNots.Bool.Should.Should().NotBeEmpty().And.HaveCount(100); +---- + +[source,csharp] +---- +ReturnsBool(!Query && !Query, q => !q.Query() && !q.Query(), b => +{ + b.MustNot.Should().NotBeEmpty().And.HaveCount(2); + b.Must.Should().BeNull(); + b.Should.Should().BeNull(); + b.Filter.Should().BeNull(); +}); +b.MustNot.Should().NotBeEmpty().And.HaveCount(2); +b.Must.Should().BeNull(); +b.Should.Should().BeNull(); +b.Filter.Should().BeNull(); +ReturnsBool(!Query || !Query || !ConditionlessQuery, q => !q.Query() || !q.Query() || !q.ConditionlessQuery(), b => +{ + b.Should.Should().NotBeEmpty().And.HaveCount(2); + b.Must.Should().BeNull(); + b.MustNot.Should().BeNull(); + b.Filter.Should().BeNull(); + foreach (IQueryContainer q in b.Should) + { + q.Bool.Should().NotBeNull(); + q.Bool.MustNot.Should().NotBeEmpty().And.HaveCount(1); + } +}); +b.Should.Should().NotBeEmpty().And.HaveCount(2); +b.Must.Should().BeNull(); +b.MustNot.Should().BeNull(); +b.Filter.Should().BeNull(); +q.Bool.Should().NotBeNull(); +q.Bool.MustNot.Should().NotBeEmpty().And.HaveCount(1); +ReturnsSingleQuery(!Query || !ConditionlessQuery, q => !q.Query() || !q.ConditionlessQuery(), + c => c.Bool.MustNot.Should().NotBeNull().And.HaveCount(1)); +ReturnsSingleQuery(!ConditionlessQuery || !Query, q => !q.ConditionlessQuery() || !q.Query(), + c => c.Bool.MustNot.Should().NotBeNull().And.HaveCount(1)); +ReturnsSingleQuery(!Query || !NullQuery, q => !q.Query() || !q.NullQuery(), + c => c.Bool.MustNot.Should().NotBeNull().And.HaveCount(1)); +ReturnsSingleQuery(!NullQuery && !Query, q => !q.NullQuery() && !q.Query(), + c => c.Bool.MustNot.Should().NotBeNull().And.HaveCount(1)); +ReturnsSingleQuery(!ConditionlessQuery || !ConditionlessQuery && !ConditionlessQuery || !Query, + q => !q.ConditionlessQuery() || !q.ConditionlessQuery() && !q.ConditionlessQuery() || !q.Query(), + c => c.Bool.MustNot.Should().NotBeNull().And.HaveCount(1)); +ReturnsSingleQuery( + !NullQuery || !NullQuery || !ConditionlessQuery || !Query, + q => !q.NullQuery() || !q.NullQuery() || !q.ConditionlessQuery() || !q.Query(), + c => c.Bool.MustNot.Should().NotBeNull()); +ReturnsNull(!NullQuery || !ConditionlessQuery, q => !q.NullQuery() || !q.ConditionlessQuery()); +ReturnsNull(!ConditionlessQuery && !NullQuery, q => !q.ConditionlessQuery() && !q.NullQuery()); +ReturnsNull(!ConditionlessQuery || !ConditionlessQuery, q => !q.ConditionlessQuery() || !q.ConditionlessQuery()); +ReturnsNull( + !ConditionlessQuery || !ConditionlessQuery || !ConditionlessQuery || !ConditionlessQuery, + q => !q.ConditionlessQuery() || !q.ConditionlessQuery() || !q.ConditionlessQuery() || !q.ConditionlessQuery() + +); +ReturnsNull( + !NullQuery || !ConditionlessQuery || !ConditionlessQuery || !ConditionlessQuery, + q => !q.NullQuery() || !q.ConditionlessQuery() || !q.ConditionlessQuery() || !q.ConditionlessQuery() +); +---- + diff --git a/docs/asciidoc/query-dsl/bool-dsl/operators/or-operator-usage.asciidoc b/docs/asciidoc/query-dsl/bool-dsl/operators/or-operator-usage.asciidoc new file mode 100644 index 00000000000..db7a5e887ff --- /dev/null +++ b/docs/asciidoc/query-dsl/bool-dsl/operators/or-operator-usage.asciidoc @@ -0,0 +1,107 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[or-operator-usage]] +== Or Operator Usage + +[source,csharp] +---- +var lotsOfOrs = Enumerable.Range(0, 100).Aggregate(new QueryContainer(), (q, c) => q || Query, q => q); + +LotsOfOrs(lotsOfOrs); +---- + +[source,csharp] +---- +QueryContainer container = null; + +container |= Query; + +LotsOfOrs(container); +---- + +[source,csharp] +---- +var container = new QueryContainer(); + +container |= Query; + +LotsOfOrs(container); +---- + +[source,csharp] +---- +lotsOfOrs.Should().NotBeNull(); + +lotsOfOrs.Bool.Should().NotBeNull(); + +lotsOfOrs.Bool.Should.Should().NotBeEmpty().And.HaveCount(100); +---- + +[source,csharp] +---- +ReturnsBool(Query || Query, q => q.Query() || q.Query(), b => +{ + b.Should.Should().NotBeEmpty().And.HaveCount(2); + b.Must.Should().BeNull(); + b.MustNot.Should().BeNull(); + b.Filter.Should().BeNull(); +}); +b.Should.Should().NotBeEmpty().And.HaveCount(2); +b.Must.Should().BeNull(); +b.MustNot.Should().BeNull(); +b.Filter.Should().BeNull(); +ReturnsBool(Query || Query || ConditionlessQuery, q => q.Query() || q.Query() || q.ConditionlessQuery(), b => +{ + b.Should.Should().NotBeEmpty().And.HaveCount(2); + b.Must.Should().BeNull(); + b.MustNot.Should().BeNull(); + b.Filter.Should().BeNull(); +}); +b.Should.Should().NotBeEmpty().And.HaveCount(2); +b.Must.Should().BeNull(); +b.MustNot.Should().BeNull(); +b.Filter.Should().BeNull(); +ReturnsBool(Query || Query || ConditionlessQuery, q => q.Query() || q.Query() || q.ConditionlessQuery(), b => +{ + b.Should.Should().NotBeEmpty().And.HaveCount(2); + b.Must.Should().BeNull(); + b.MustNot.Should().BeNull(); + b.Filter.Should().BeNull(); +}); +b.Should.Should().NotBeEmpty().And.HaveCount(2); +b.Must.Should().BeNull(); +b.MustNot.Should().BeNull(); +b.Filter.Should().BeNull(); +ReturnsSingleQuery(Query || ConditionlessQuery, q => q.Query() || q.ConditionlessQuery(), + c => c.Term.Value.Should().NotBeNull()); +ReturnsSingleQuery(ConditionlessQuery || Query, q => q.ConditionlessQuery() || q.Query(), + c => c.Term.Value.Should().NotBeNull()); +ReturnsSingleQuery(Query || NullQuery, q => q.Query() || q.NullQuery(), + c => c.Term.Value.Should().NotBeNull()); +ReturnsSingleQuery(NullQuery || Query, q=> q.NullQuery() || q.Query(), + c => c.Term.Value.Should().NotBeNull()); +ReturnsSingleQuery(ConditionlessQuery || ConditionlessQuery || ConditionlessQuery || Query, + q => q.ConditionlessQuery() || q.ConditionlessQuery() || q.ConditionlessQuery() || q.Query(), + c => c.Term.Value.Should().NotBeNull()); +ReturnsSingleQuery( + NullQuery || NullQuery || ConditionlessQuery || Query, + q=>q.NullQuery() || q.NullQuery() || q.ConditionlessQuery() || q.Query(), + c => c.Term.Value.Should().NotBeNull()); +ReturnsNull(NullQuery || ConditionlessQuery, q=> q.NullQuery() || q.ConditionlessQuery()); +ReturnsNull(ConditionlessQuery || NullQuery, q=>q.ConditionlessQuery() || q.NullQuery()); +ReturnsNull(ConditionlessQuery || ConditionlessQuery, q=>q.ConditionlessQuery() || q.ConditionlessQuery()); +ReturnsNull( + ConditionlessQuery || ConditionlessQuery || ConditionlessQuery || ConditionlessQuery, + q=>q.ConditionlessQuery() || q.ConditionlessQuery() || q.ConditionlessQuery() || q.ConditionlessQuery() + +); +ReturnsNull( + NullQuery || ConditionlessQuery || ConditionlessQuery || ConditionlessQuery, + q=>q.NullQuery() || q.ConditionlessQuery() || q.ConditionlessQuery() || q.ConditionlessQuery() +); +---- + diff --git a/docs/asciidoc/query-dsl/bool-dsl/operators/unary-add-operator-usage.asciidoc b/docs/asciidoc/query-dsl/bool-dsl/operators/unary-add-operator-usage.asciidoc new file mode 100644 index 00000000000..7f8ac00ce55 --- /dev/null +++ b/docs/asciidoc/query-dsl/bool-dsl/operators/unary-add-operator-usage.asciidoc @@ -0,0 +1,114 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[unary-add-operator-usage]] +== Unary Add Operator Usage + +[source,csharp] +---- +var lotsOfUnaryAdds = Enumerable.Range(0, 100).Aggregate(new QueryContainer(), (q, c) => q && +Query, q => q); + +LotsOfUnaryAdds(lotsOfUnaryAdds); +---- + +[source,csharp] +---- +QueryContainer container = null; + +container &= +Query; + +LotsOfUnaryAdds(container); +---- + +[source,csharp] +---- +var container = new QueryContainer(); + +container &= +Query; + +LotsOfUnaryAdds(container); +---- + +[source,csharp] +---- +lotsOfUnaryAdds.Should().NotBeNull(); + +lotsOfUnaryAdds.Bool.Should().NotBeNull(); + +lotsOfUnaryAdds.Bool.Filter.Should().NotBeEmpty().And.HaveCount(100); +---- + +[source,csharp] +---- +var container = new QueryContainer(); + +container |= +Query; + +var c = container as IQueryContainer; + +c.Bool.Should.Should().NotBeEmpty().And.HaveCount(100); +---- + +[source,csharp] +---- +ReturnsBool(+Query && +Query, q => +q.Query() && +q.Query(), b => +{ + b.Filter.Should().NotBeEmpty().And.HaveCount(2); + b.Must.Should().BeNull(); + b.Should.Should().BeNull(); + b.MustNot.Should().BeNull(); +}); +b.Filter.Should().NotBeEmpty().And.HaveCount(2); +b.Must.Should().BeNull(); +b.Should.Should().BeNull(); +b.MustNot.Should().BeNull(); +ReturnsBool(+Query || +Query || +ConditionlessQuery, q => +q.Query() || +q.Query() || +q.ConditionlessQuery(), b => +{ + b.Should.Should().NotBeEmpty().And.HaveCount(2); + b.Must.Should().BeNull(); + b.MustNot.Should().BeNull(); + b.MustNot.Should().BeNull(); + foreach (IQueryContainer q in b.Should) + { + q.Bool.Should().NotBeNull(); + q.Bool.Filter.Should().NotBeEmpty().And.HaveCount(1); + } +}); +b.Should.Should().NotBeEmpty().And.HaveCount(2); +b.Must.Should().BeNull(); +b.MustNot.Should().BeNull(); +b.MustNot.Should().BeNull(); +q.Bool.Should().NotBeNull(); +q.Bool.Filter.Should().NotBeEmpty().And.HaveCount(1); +ReturnsSingleQuery(+Query || +ConditionlessQuery, q => +q.Query() || +q.ConditionlessQuery(), + c => c.Bool.Filter.Should().NotBeNull().And.HaveCount(1)); +ReturnsSingleQuery(+ConditionlessQuery || +Query, q => +q.ConditionlessQuery() || +q.Query(), + c => c.Bool.Filter.Should().NotBeNull().And.HaveCount(1)); +ReturnsSingleQuery(+Query || +NullQuery, q => +q.Query() || +q.NullQuery(), + c => c.Bool.Filter.Should().NotBeNull().And.HaveCount(1)); +ReturnsSingleQuery(+NullQuery && +Query, q => +q.NullQuery() && +q.Query(), + c => c.Bool.Filter.Should().NotBeNull().And.HaveCount(1)); +ReturnsSingleQuery(+ConditionlessQuery || +ConditionlessQuery && +ConditionlessQuery || +Query, + q => +q.ConditionlessQuery() || +q.ConditionlessQuery() && +q.ConditionlessQuery() || +q.Query(), + c => c.Bool.Filter.Should().NotBeNull().And.HaveCount(1)); +ReturnsSingleQuery( + +NullQuery || +NullQuery || +ConditionlessQuery || +Query, + q => +q.NullQuery() || +q.NullQuery() || +q.ConditionlessQuery() || +q.Query(), + c => c.Bool.Filter.Should().NotBeNull()); +ReturnsNull(+NullQuery || +ConditionlessQuery, q => +q.NullQuery() || +q.ConditionlessQuery()); +ReturnsNull(+ConditionlessQuery && +NullQuery, q => +q.ConditionlessQuery() && +q.NullQuery()); +ReturnsNull(+ConditionlessQuery || +ConditionlessQuery, q => +q.ConditionlessQuery() || +q.ConditionlessQuery()); +ReturnsNull( + +ConditionlessQuery || +ConditionlessQuery || +ConditionlessQuery || +ConditionlessQuery, + q => +q.ConditionlessQuery() || +q.ConditionlessQuery() || +q.ConditionlessQuery() || +q.ConditionlessQuery() + +); +ReturnsNull( + +NullQuery || +ConditionlessQuery || +ConditionlessQuery || +ConditionlessQuery, + q => +q.NullQuery() || +q.ConditionlessQuery() || +q.ConditionlessQuery() || +q.ConditionlessQuery() +); +---- + diff --git a/docs/asciidoc/query-dsl/compound/and/and-query-usage.asciidoc b/docs/asciidoc/query-dsl/compound/and/and-query-usage.asciidoc new file mode 100644 index 00000000000..936a2d2d602 --- /dev/null +++ b/docs/asciidoc/query-dsl/compound/and/and-query-usage.asciidoc @@ -0,0 +1,62 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[and-query-usage]] +== And Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.And(c => c + .Name("named_query") + .Boost(1.1) + .Filters( + qq => qq.MatchAll(m => m.Name("query1")), + qq => qq.MatchAll(m => m.Name("query2")) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new AndQuery() +{ + Name = "named_query", + Boost = 1.1, + Filters = new QueryContainer[] { + new MatchAllQuery() { Name = "query1" }, + new MatchAllQuery() { Name = "query2" }, + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "and": { + "_name": "named_query", + "boost": 1.1, + "filters": [ + { + "match_all": { + "_name": "query1" + } + }, + { + "match_all": { + "_name": "query2" + } + } + ] + } +} +---- + diff --git a/docs/asciidoc/query-dsl/compound/bool/bool-dsl-complex-query-usage.asciidoc b/docs/asciidoc/query-dsl/compound/bool/bool-dsl-complex-query-usage.asciidoc new file mode 100644 index 00000000000..3989690781a --- /dev/null +++ b/docs/asciidoc/query-dsl/compound/bool/bool-dsl-complex-query-usage.asciidoc @@ -0,0 +1,233 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[bool-dsl-complex-query-usage]] +== Bool Dsl Complex Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q.Query() && q.Query() +//second bool +|| ( + //complex nested bool + (+q.Query() || +q.Query() || !q.Query() && (!q.Query() && !q.ConditionlessQuery())) + // simple nested or + && (q.Query() || q.Query() || q.Query()) + //all conditionless bool + && (q.NullQuery() || +q.ConditionlessQuery() || !q.ConditionlessQuery()) + // actual bool query + && (base.QueryFluent(q))) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +Query && Query +//second bool +|| ( + //complex nested bool + (+Query || +Query || !Query && (!Query && !ConditionlessQuery)) + // simple nested or + && (Query || Query || Query) + //all conditionless bool + && (NullQuery || +ConditionlessQuery || !ConditionlessQuery) + // actual bool query + && (base.QueryInitializer)) +---- + +[source,javascript] +.Example json output +---- +{ + "bool": { + "should": [ + { + "bool": { + "must": [ + { + "term": { + "x": { + "value": "y" + } + } + }, + { + "term": { + "x": { + "value": "y" + } + } + } + ] + } + }, + { + "bool": { + "must": [ + { + "bool": { + "should": [ + { + "bool": { + "filter": [ + { + "term": { + "x": { + "value": "y" + } + } + } + ] + } + }, + { + "bool": { + "filter": [ + { + "term": { + "x": { + "value": "y" + } + } + } + ] + } + }, + { + "bool": { + "must_not": [ + { + "term": { + "x": { + "value": "y" + } + } + }, + { + "term": { + "x": { + "value": "y" + } + } + } + ] + } + }, + { + "term": { + "x": { + "value": "y" + } + } + }, + { + "term": { + "x": { + "value": "y" + } + } + }, + { + "term": { + "x": { + "value": "y" + } + } + } + ] + } + }, + { + "bool": { + "must": [ + { + "match_all": {} + } + ], + "must_not": [ + { + "match_all": {} + } + ], + "should": [ + { + "match_all": {} + } + ], + "filter": [ + { + "match_all": {} + } + ], + "minimum_should_match": 1, + "boost": 2.0 + } + } + ] + } + } + ] + } +} +---- + +[source,csharp] +---- +container.Bool.Should().NotBeNull(); + +container.Bool.Should.Should().HaveCount(2); + +container.Bool.MustNot.Should().BeNull(); + +container.Bool.Filter.Should().BeNull(); + +container.Bool.Must.Should().BeNull(); + +var firstBool = (container.Bool.Should.First() as IQueryContainer)?.Bool; + +firstBool.Should().NotBeNull(); + +firstBool.Must.Should().HaveCount(2); + +firstBool.MustNot.Should().BeNull(); + +firstBool.Filter.Should().BeNull(); + +firstBool.Should.Should().BeNull(); + +var secondBool = (container.Bool.Should.Last() as IQueryContainer)?.Bool; + +secondBool.Should().NotBeNull(); + +secondBool.Must.Should().HaveCount(2); //the last bool query was all conditionless + +secondBool.MustNot.Should().BeNull(); + +secondBool.Filter.Should().BeNull(); + +secondBool.Should.Should().BeNull(); + +var complexBool = (secondBool.Must.First() as IQueryContainer)?.Bool; + +complexBool.Should().NotBeNull(); + +complexBool.Should.Should().HaveCount(6); + +var mustNotsBool = (complexBool.Should.Cast().FirstOrDefault(q => q.Bool != null && q.Bool.MustNot != null))?.Bool; + +mustNotsBool.Should().NotBeNull(); + +mustNotsBool.MustNot.Should().HaveCount(2); //one of the three must nots was conditionless +---- + +[source,csharp] +---- +this.AssertShape(this.QueryInitializer); +---- + diff --git a/docs/asciidoc/query-dsl/compound/bool/bool-query-usage.asciidoc b/docs/asciidoc/query-dsl/compound/bool/bool-query-usage.asciidoc new file mode 100644 index 00000000000..6fd39a3da8b --- /dev/null +++ b/docs/asciidoc/query-dsl/compound/bool/bool-query-usage.asciidoc @@ -0,0 +1,69 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[bool-query-usage]] +== Bool Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.Bool(b => b + .MustNot(m => m.MatchAll()) + .Should(m => m.MatchAll()) + .Must(m => m.MatchAll()) + .Filter(f => f.MatchAll()) + .MinimumShouldMatch(1) + .Boost(2)) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new BoolQuery() +{ + MustNot = new QueryContainer[] { new MatchAllQuery() }, + Should = new QueryContainer[] { new MatchAllQuery() }, + Must = new QueryContainer[] { new MatchAllQuery() }, + Filter = new QueryContainer[] { new MatchAllQuery() }, + MinimumShouldMatch = 1, + Boost = 2 +} +---- + +[source,javascript] +.Example json output +---- +{ + "bool": { + "must": [ + { + "match_all": {} + } + ], + "must_not": [ + { + "match_all": {} + } + ], + "should": [ + { + "match_all": {} + } + ], + "filter": [ + { + "match_all": {} + } + ], + "minimum_should_match": 1, + "boost": 2.0 + } +} +---- + diff --git a/docs/asciidoc/query-dsl/compound/boosting/boosting-query-usage.asciidoc b/docs/asciidoc/query-dsl/compound/boosting/boosting-query-usage.asciidoc new file mode 100644 index 00000000000..8f9932976ef --- /dev/null +++ b/docs/asciidoc/query-dsl/compound/boosting/boosting-query-usage.asciidoc @@ -0,0 +1,59 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[boosting-query-usage]] +== Boosting Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.Boosting(c => c + .Name("named_query") + .Boost(1.1) + .Positive(qq => qq.MatchAll(m => m.Name("filter"))) + .Negative(qq => qq.MatchAll(m => m.Name("query"))) + .NegativeBoost(1.12) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new BoostingQuery() +{ + Name = "named_query", + Boost = 1.1, + PositiveQuery = new MatchAllQuery { Name ="filter" }, + NegativeQuery= new MatchAllQuery() { Name = "query" }, + NegativeBoost = 1.12 +} +---- + +[source,javascript] +.Example json output +---- +{ + "boosting": { + "_name": "named_query", + "boost": 1.1, + "negative": { + "match_all": { + "_name": "query" + } + }, + "negative_boost": 1.12, + "positive": { + "match_all": { + "_name": "filter" + } + } + } +} +---- + diff --git a/docs/asciidoc/query-dsl/compound/constant-score/constant-score-query-usage.asciidoc b/docs/asciidoc/query-dsl/compound/constant-score/constant-score-query-usage.asciidoc new file mode 100644 index 00000000000..e06551baddb --- /dev/null +++ b/docs/asciidoc/query-dsl/compound/constant-score/constant-score-query-usage.asciidoc @@ -0,0 +1,49 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[constant-score-query-usage]] +== Constant Score Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.ConstantScore(c => c + .Name("named_query") + .Boost(1.1) + .Filter(qq => qq.MatchAll(m => m.Name("filter"))) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new ConstantScoreQuery() +{ + Name = "named_query", + Boost = 1.1, + Filter = new MatchAllQuery { Name = "filter" }, +} +---- + +[source,javascript] +.Example json output +---- +{ + "constant_score": { + "_name": "named_query", + "boost": 1.1, + "filter": { + "match_all": { + "_name": "filter" + } + } + } +} +---- + diff --git a/docs/asciidoc/query-dsl/compound/dismax/dismax-query-usage.asciidoc b/docs/asciidoc/query-dsl/compound/dismax/dismax-query-usage.asciidoc new file mode 100644 index 00000000000..54466b1a0cb --- /dev/null +++ b/docs/asciidoc/query-dsl/compound/dismax/dismax-query-usage.asciidoc @@ -0,0 +1,65 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[dismax-query-usage]] +== Dismax Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.DisMax(c => c + .Name("named_query") + .Boost(1.1) + .TieBreaker(1.11) + .Queries( + qq => qq.MatchAll(m => m.Name("query1")), + qq => qq.MatchAll(m => m.Name("query2")) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new DisMaxQuery() +{ + Name = "named_query", + Boost = 1.1, + TieBreaker = 1.11, + Queries = new QueryContainer[] { + new MatchAllQuery() { Name = "query1" }, + new MatchAllQuery() { Name = "query2" }, + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "dis_max": { + "_name": "named_query", + "boost": 1.1, + "queries": [ + { + "match_all": { + "_name": "query1" + } + }, + { + "match_all": { + "_name": "query2" + } + } + ], + "tie_breaker": 1.11 + } +} +---- + diff --git a/docs/asciidoc/query-dsl/compound/filtered/filtered-query-usage.asciidoc b/docs/asciidoc/query-dsl/compound/filtered/filtered-query-usage.asciidoc new file mode 100644 index 00000000000..73c5c33fc59 --- /dev/null +++ b/docs/asciidoc/query-dsl/compound/filtered/filtered-query-usage.asciidoc @@ -0,0 +1,56 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[filtered-query-usage]] +== Filtered Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.Filtered(c => c + .Name("named_query") + .Boost(1.1) + .Filter(qq => qq.MatchAll(m => m.Name("filter"))) + .Query(qq => qq.MatchAll(m => m.Name("query"))) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new FilteredQuery() +{ + Name = "named_query", + Boost = 1.1, + Filter = new MatchAllQuery { Name ="filter" }, + Query = new MatchAllQuery() { Name = "query" }, +} +---- + +[source,javascript] +.Example json output +---- +{ + "filtered": { + "_name": "named_query", + "boost": 1.1, + "filter": { + "match_all": { + "_name": "filter" + } + }, + "query": { + "match_all": { + "_name": "query" + } + } + } +} +---- + diff --git a/docs/asciidoc/query-dsl/compound/function-score/function-score-query-usage.asciidoc b/docs/asciidoc/query-dsl/compound/function-score/function-score-query-usage.asciidoc new file mode 100644 index 00000000000..af7aa2e4e3a --- /dev/null +++ b/docs/asciidoc/query-dsl/compound/function-score/function-score-query-usage.asciidoc @@ -0,0 +1,144 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[function-score-query-usage]] +== Function Score Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.FunctionScore(c => c + .Name("named_query") + .Boost(1.1) + .Query(qq => qq.MatchAll()) + .BoostMode(FunctionBoostMode.Multiply) + .ScoreMode(FunctionScoreMode.Sum) + .MaxBoost(20.0) + .MinScore(1.0) + .Functions(f => f + .Exponential(b => b.Field(p => p.NumberOfCommits).Decay(0.5).Origin(1.0).Scale(0.1).Weight(2.1)) + .GaussDate(b => b.Field(p => p.LastActivity).Origin(DateMath.Now).Decay(0.5).Scale("1d")) + .LinearGeoLocation(b => b.Field(p => p.Location).Origin(new GeoLocation(70, -70)).Scale(Distance.Miles(1)).MultiValueMode(MultiValueMode.Average)) + .FieldValueFactor(b => b.Field("x").Factor(1.1).Missing(0.1).Modifier(FieldValueFactorModifier.Ln)) + .RandomScore(1337) + .RandomScore("randomstring") + .Weight(1.0) + .ScriptScore(ss => ss.Script(s => s.File("x"))) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new FunctionScoreQuery() +{ + Name = "named_query", + Boost = 1.1, + Query = new MatchAllQuery { }, + BoostMode = FunctionBoostMode.Multiply, + ScoreMode = FunctionScoreMode.Sum, + MaxBoost = 20.0, + MinScore = 1.0, + Functions = new List + { + new ExponentialDecayFunction { Origin = 1.0, Decay = 0.5, Field = Field(p=>p.NumberOfCommits), Scale = 0.1, Weight = 2.1 }, + new GaussDateDecayFunction { Origin = DateMath.Now, Field = Field(p=>p.LastActivity), Decay = 0.5, Scale = TimeSpan.FromDays(1) }, + new LinearGeoDecayFunction { Origin = new GeoLocation(70, -70), Field = Field(p=>p.Location), Scale = Distance.Miles(1), MultiValueMode = MultiValueMode.Average }, + new FieldValueFactorFunction + { + Field = "x", Factor = 1.1, Missing = 0.1, Modifier = FieldValueFactorModifier.Ln + }, + new RandomScoreFunction { Seed = 1337 }, + new RandomScoreFunction { Seed = "randomstring" }, + new WeightFunction { Weight = 1.0}, + new ScriptScoreFunction { Script = new ScriptQuery { File = "x" } } + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "function_score": { + "_name": "named_query", + "boost": 1.1, + "boost_mode": "multiply", + "functions": [ + { + "exp": { + "numberOfCommits": { + "origin": 1.0, + "scale": 0.1, + "decay": 0.5 + } + }, + "weight": 2.1 + }, + { + "gauss": { + "lastActivity": { + "origin": "now", + "scale": "1d", + "decay": 0.5 + } + } + }, + { + "linear": { + "location": { + "origin": { + "lat": 70.0, + "lon": -70.0 + }, + "scale": "1.0mi" + }, + "multi_value_mode": "avg" + } + }, + { + "field_value_factor": { + "field": "x", + "factor": 1.1, + "missing": 0.1, + "modifier": "ln" + } + }, + { + "random_score": { + "seed": 1337 + } + }, + { + "random_score": { + "seed": "randomstring" + } + }, + { + "weight": 1.0 + }, + { + "script_score": { + "script": { + "file": "x" + } + } + } + ], + "max_boost": 20.0, + "min_score": 1.0, + "query": { + "match_all": {} + }, + "score_mode": "sum" + } +} +---- + diff --git a/docs/asciidoc/query-dsl/compound/indices/indices-no-match-query-usage.asciidoc b/docs/asciidoc/query-dsl/compound/indices/indices-no-match-query-usage.asciidoc new file mode 100644 index 00000000000..0625dd68a8f --- /dev/null +++ b/docs/asciidoc/query-dsl/compound/indices/indices-no-match-query-usage.asciidoc @@ -0,0 +1,55 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[indices-no-match-query-usage]] +== Indices No Match Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.Indices(c => c + .Name("named_query") + .Boost(1.1) + .Indices(Nest.Indices.All) + .Query(qq => qq.MatchAll()) + .NoMatchQuery(NoMatchShortcut.All) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new IndicesQuery() +{ + Name = "named_query", + Boost = 1.1, + Indices = Nest.Indices.All, + Query = new MatchAllQuery(), + NoMatchQuery = new NoMatchQueryContainer { Shortcut = NoMatchShortcut.All } +} +---- + +[source,javascript] +.Example json output +---- +{ + "indices": { + "_name": "named_query", + "boost": 1.1, + "indices": [ + "_all" + ], + "no_match_query": "all", + "query": { + "match_all": {} + } + } +} +---- + diff --git a/docs/asciidoc/query-dsl/compound/indices/indices-query-usage.asciidoc b/docs/asciidoc/query-dsl/compound/indices/indices-query-usage.asciidoc new file mode 100644 index 00000000000..18fb2e392f5 --- /dev/null +++ b/docs/asciidoc/query-dsl/compound/indices/indices-query-usage.asciidoc @@ -0,0 +1,60 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[indices-query-usage]] +== Indices Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.Indices(c => c + .Name("named_query") + .Boost(1.1) + .Indices(Index()) + .Query(qq => qq.MatchAll()) + .NoMatchQuery(qq => qq.MatchAll(m => m.Name("no_match"))) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new IndicesQuery() +{ + Name = "named_query", + Boost = 1.1, + Indices = Index(), + Query = new MatchAllQuery(), + NoMatchQuery = new MatchAllQuery { Name ="no_match" } + +} +---- + +[source,javascript] +.Example json output +---- +{ + "indices": { + "_name": "named_query", + "boost": 1.1, + "indices": [ + "project" + ], + "no_match_query": { + "match_all": { + "_name": "no_match" + } + }, + "query": { + "match_all": {} + } + } +} +---- + diff --git a/docs/asciidoc/query-dsl/compound/limit/limit-query-usage.asciidoc b/docs/asciidoc/query-dsl/compound/limit/limit-query-usage.asciidoc new file mode 100644 index 00000000000..ee23aba47d8 --- /dev/null +++ b/docs/asciidoc/query-dsl/compound/limit/limit-query-usage.asciidoc @@ -0,0 +1,45 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[limit-query-usage]] +== Limit Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.Limit(c => c + .Name("named_query") + .Boost(1.1) + .Limit(100) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new LimitQuery +{ + Name = "named_query", + Boost = 1.1, + Limit = 100 +} +---- + +[source,javascript] +.Example json output +---- +{ + "limit": { + "_name": "named_query", + "boost": 1.1, + "limit": 100 + } +} +---- + diff --git a/docs/asciidoc/query-dsl/compound/not/not-query-usage.asciidoc b/docs/asciidoc/query-dsl/compound/not/not-query-usage.asciidoc new file mode 100644 index 00000000000..efa03beffc0 --- /dev/null +++ b/docs/asciidoc/query-dsl/compound/not/not-query-usage.asciidoc @@ -0,0 +1,62 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[not-query-usage]] +== Not Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.Not(c => c + .Name("named_query") + .Boost(1.1) + .Filters( + qq => qq.MatchAll(m => m.Name("query1")), + qq => qq.MatchAll(m => m.Name("query2")) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new NotQuery() +{ + Name = "named_query", + Boost = 1.1, + Filters = new QueryContainer[] { + new MatchAllQuery() { Name = "query1" }, + new MatchAllQuery() { Name = "query2" }, + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "not": { + "_name": "named_query", + "boost": 1.1, + "filters": [ + { + "match_all": { + "_name": "query1" + } + }, + { + "match_all": { + "_name": "query2" + } + } + ] + } +} +---- + diff --git a/docs/asciidoc/query-dsl/compound/or/or-query-usage.asciidoc b/docs/asciidoc/query-dsl/compound/or/or-query-usage.asciidoc new file mode 100644 index 00000000000..202ede1811c --- /dev/null +++ b/docs/asciidoc/query-dsl/compound/or/or-query-usage.asciidoc @@ -0,0 +1,62 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[or-query-usage]] +== Or Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.Or(c => c + .Name("named_query") + .Boost(1.1) + .Filters( + qq => qq.MatchAll(m => m.Name("query1")), + qq => qq.MatchAll(m => m.Name("query2")) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new OrQuery() +{ + Name = "named_query", + Boost = 1.1, + Filters = new QueryContainer[] { + new MatchAllQuery() { Name = "query1" }, + new MatchAllQuery() { Name = "query2" }, + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "or": { + "_name": "named_query", + "boost": 1.1, + "filters": [ + { + "match_all": { + "_name": "query1" + } + }, + { + "match_all": { + "_name": "query2" + } + } + ] + } +} +---- + diff --git a/docs/asciidoc/query-dsl/full-text/common-terms/common-terms-usage.asciidoc b/docs/asciidoc/query-dsl/full-text/common-terms/common-terms-usage.asciidoc new file mode 100644 index 00000000000..52f8c6fd1cb --- /dev/null +++ b/docs/asciidoc/query-dsl/full-text/common-terms/common-terms-usage.asciidoc @@ -0,0 +1,67 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[common-terms-usage]] +== Common Terms Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.CommonTerms(c => c + .Field(p => p.Description) + .Analyzer("standard") + .Boost(1.1) + .CutoffFrequency(0.001) + .DisableCoord() + .HighFrequencyOperator(Operator.And) + .LowFrequencyOperator(Operator.Or) + .MinimumShouldMatch(1) + .Name("named_query") + .Query("nelly the elephant not as a") +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new CommonTermsQuery() +{ + Field = Field(p => p.Description), + Analyzer = "standard", + Boost = 1.1, + CutoffFrequency = 0.001, + DisableCoord = true, + HighFrequencyOperator = Operator.And, + LowFrequencyOperator = Operator.Or, + MinimumShouldMatch = 1, + Name = "named_query", + Query = "nelly the elephant not as a" +} +---- + +[source,javascript] +.Example json output +---- +{ + "common": { + "description": { + "_name": "named_query", + "boost": 1.1, + "query": "nelly the elephant not as a", + "cutoff_frequency": 0.001, + "low_freq_operator": "or", + "high_freq_operator": "and", + "minimum_should_match": 1, + "analyzer": "standard", + "disable_coord": true + } + } +} +---- + diff --git a/docs/asciidoc/query-dsl/full-text/match/match-phrase-prefix-usage.asciidoc b/docs/asciidoc/query-dsl/full-text/match/match-phrase-prefix-usage.asciidoc new file mode 100644 index 00000000000..cadfeba1b29 --- /dev/null +++ b/docs/asciidoc/query-dsl/full-text/match/match-phrase-prefix-usage.asciidoc @@ -0,0 +1,83 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[match-phrase-prefix-usage]] +== Match Phrase Prefix Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.MatchPhrasePrefix(c => c + .Field(p => p.Description) + .Analyzer("standard") + .Boost(1.1) + .CutoffFrequency(0.001) + .Query("hello worl") + .Fuzziness(Fuzziness.Auto) + .Lenient() + .FuzzyTranspositions() + .MaxExpansions(2) + .MinimumShouldMatch(2) + .PrefixLength(2) + .Operator(Operator.Or) + .FuzzyRewrite(RewriteMultiTerm.ConstantScoreBoolean) + .Slop(2) + .Name("named_query") +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new MatchPhrasePrefixQuery +{ + Field = Field(p => p.Description), + Analyzer = "standard", + Boost = 1.1, + Name = "named_query", + CutoffFrequency = 0.001, + Query = "hello worl", + Fuzziness = Fuzziness.Auto, + FuzzyTranspositions = true, + MinimumShouldMatch = 2, + FuzzyRewrite = RewriteMultiTerm.ConstantScoreBoolean, + MaxExpansions = 2, + Slop = 2, + Lenient = true, + Operator = Operator.Or, + PrefixLength = 2 +} +---- + +[source,javascript] +.Example json output +---- +{ + "match": { + "description": { + "_name": "named_query", + "boost": 1.1, + "query": "hello worl", + "analyzer": "standard", + "fuzzy_rewrite": "constant_score_boolean", + "fuzziness": "AUTO", + "fuzzy_transpositions": true, + "cutoff_frequency": 0.001, + "prefix_length": 2, + "max_expansions": 2, + "slop": 2, + "lenient": true, + "minimum_should_match": 2, + "operator": "or", + "type": "phrase_prefix" + } + } +} +---- + diff --git a/docs/asciidoc/query-dsl/full-text/match/match-phrase-usage.asciidoc b/docs/asciidoc/query-dsl/full-text/match/match-phrase-usage.asciidoc new file mode 100644 index 00000000000..850077abb54 --- /dev/null +++ b/docs/asciidoc/query-dsl/full-text/match/match-phrase-usage.asciidoc @@ -0,0 +1,83 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[match-phrase-usage]] +== Match Phrase Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.MatchPhrase(c => c + .Field(p => p.Description) + .Analyzer("standard") + .Boost(1.1) + .CutoffFrequency(0.001) + .Query("hello world") + .Fuzziness(Fuzziness.Auto) + .Lenient() + .FuzzyTranspositions() + .MaxExpansions(2) + .MinimumShouldMatch(2) + .PrefixLength(2) + .Operator(Operator.Or) + .FuzzyRewrite(RewriteMultiTerm.ConstantScoreBoolean) + .Slop(2) + .Name("named_query") +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new MatchPhraseQuery +{ + Field = Field(p=>p.Description), + Analyzer = "standard", + Boost = 1.1, + Name = "named_query", + CutoffFrequency = 0.001, + Query = "hello world", + Fuzziness = Fuzziness.Auto, + FuzzyTranspositions = true, + MinimumShouldMatch = 2, + FuzzyRewrite = RewriteMultiTerm.ConstantScoreBoolean, + MaxExpansions = 2, + Slop = 2, + Lenient = true, + Operator = Operator.Or, + PrefixLength = 2 +} +---- + +[source,javascript] +.Example json output +---- +{ + "match": { + "description": { + "_name": "named_query", + "boost": 1.1, + "query": "hello world", + "analyzer": "standard", + "fuzzy_rewrite": "constant_score_boolean", + "fuzziness": "AUTO", + "fuzzy_transpositions": true, + "cutoff_frequency": 0.001, + "prefix_length": 2, + "max_expansions": 2, + "slop": 2, + "lenient": true, + "minimum_should_match": 2, + "operator": "or", + "type": "phrase" + } + } +} +---- + diff --git a/docs/asciidoc/query-dsl/full-text/match/match-usage.asciidoc b/docs/asciidoc/query-dsl/full-text/match/match-usage.asciidoc new file mode 100644 index 00000000000..174b2c7f7dc --- /dev/null +++ b/docs/asciidoc/query-dsl/full-text/match/match-usage.asciidoc @@ -0,0 +1,82 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[match-usage]] +== Match Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.Match(c => c + .Field(p => p.Description) + .Analyzer("standard") + .Boost(1.1) + .CutoffFrequency(0.001) + .Query("hello world") + .Fuzziness(Fuzziness.Auto) + .Lenient() + .FuzzyTranspositions() + .MaxExpansions(2) + .MinimumShouldMatch(2) + .PrefixLength(2) + .Operator(Operator.Or) + .FuzzyRewrite(RewriteMultiTerm.ConstantScoreBoolean) + .Slop(2) + .Name("named_query") +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new MatchQuery +{ + Field = Field(p=>p.Description), + Analyzer = "standard", + Boost = 1.1, + Name = "named_query", + CutoffFrequency = 0.001, + Query = "hello world", + Fuzziness = Fuzziness.Auto, + FuzzyTranspositions = true, + MinimumShouldMatch = 2, + FuzzyRewrite = RewriteMultiTerm.ConstantScoreBoolean, + MaxExpansions = 2, + Slop = 2, + Lenient = true, + Operator = Operator.Or, + PrefixLength = 2 +} +---- + +[source,javascript] +.Example json output +---- +{ + "match": { + "description": { + "_name": "named_query", + "boost": 1.1, + "query": "hello world", + "analyzer": "standard", + "fuzzy_rewrite": "constant_score_boolean", + "fuzziness": "AUTO", + "fuzzy_transpositions": true, + "cutoff_frequency": 0.001, + "prefix_length": 2, + "max_expansions": 2, + "slop": 2, + "lenient": true, + "minimum_should_match": 2, + "operator": "or" + } + } +} +---- + diff --git a/docs/asciidoc/query-dsl/full-text/multi-match/multi-match-usage.asciidoc b/docs/asciidoc/query-dsl/full-text/multi-match/multi-match-usage.asciidoc new file mode 100644 index 00000000000..af365d8a1a7 --- /dev/null +++ b/docs/asciidoc/query-dsl/full-text/multi-match/multi-match-usage.asciidoc @@ -0,0 +1,124 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[multi-match-usage]] +== Multi Match Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.MultiMatch(c => c + .Fields(f => f.Field(p=>p.Description).Field("myOtherField")) + .Query("hello world") + .Analyzer("standard") + .Boost(1.1) + .Slop(2) + .Fuzziness(Fuzziness.Auto) + .PrefixLength(2) + .MaxExpansions(2) + .Operator(Operator.Or) + .MinimumShouldMatch(2) + .FuzzyRewrite(RewriteMultiTerm.ConstantScoreBoolean) + .TieBreaker(1.1) + .CutoffFrequency(0.001) + .Lenient() + .ZeroTermsQuery(ZeroTermsQuery.All) + .Name("named_query") +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new MultiMatchQuery +{ + Fields = Field(p=>p.Description).And("myOtherField"), + Query = "hello world", + Analyzer = "standard", + Boost = 1.1, + Slop = 2, + Fuzziness = Fuzziness.Auto, + PrefixLength = 2, + MaxExpansions = 2, + Operator = Operator.Or, + MinimumShouldMatch = 2, + FuzzyRewrite = RewriteMultiTerm.ConstantScoreBoolean, + TieBreaker = 1.1, + CutoffFrequency = 0.001, + Lenient = true, + ZeroTermsQuery = ZeroTermsQuery.All, + Name = "named_query", +} +---- + +[source,javascript] +.Example json output +---- +{ + "multi_match": { + "_name": "named_query", + "boost": 1.1, + "query": "hello world", + "analyzer": "standard", + "fuzzy_rewrite": "constant_score_boolean", + "fuzziness": "AUTO", + "cutoff_frequency": 0.001, + "prefix_length": 2, + "max_expansions": 2, + "slop": 2, + "lenient": true, + "tie_breaker": 1.1, + "minimum_should_match": 2, + "operator": "or", + "fields": [ + "description", + "myOtherField" + ], + "zero_terms_query": "all" + } +} +---- + +=== Fluent DSL Example + +[source,csharp] +---- +q +.MultiMatch(c => c + //.Fields(f => f.Field(p=>p.Description, 2.2).Field("myOtherField^0.3")) + .Fields(Field(p=>p.Description, 2.2).And("myOtherField^0.3")) + .Query("hello world") +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new MultiMatchQuery +{ + Fields = Field(p=>p.Description, 2.2).And("myOtherField^0.3"), + Query = "hello world", +} +---- + +[source,javascript] +.Example json output +---- +{ + "multi_match": { + "query": "hello world", + "fields": [ + "description^2.2", + "myOtherField^0.3" + ] + } +} +---- + diff --git a/docs/asciidoc/query-dsl/full-text/query-string/query-string-usage.asciidoc b/docs/asciidoc/query-dsl/full-text/query-string/query-string-usage.asciidoc new file mode 100644 index 00000000000..165e0db6a4d --- /dev/null +++ b/docs/asciidoc/query-dsl/full-text/query-string/query-string-usage.asciidoc @@ -0,0 +1,120 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[query-string-usage]] +== Query String Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.QueryString(c => c + .Name("named_query") + .Boost(1.1) + .Fields(f => f.Field(p=>p.Description).Field("myOtherField")) + .Query("hello world") + .DefaultField(p=>p.Description) + .DefaultOperator(Operator.Or) + .Analyzer("standard") + .QuoteAnalyzer("quote-an") + .AllowLeadingWildcard() + .AutoGeneratePhraseQueries() + .MaximumDeterminizedStates(2) + .LowercaseExpendedTerms() + .EnablePositionIncrements() + .Escape() + .UseDisMax() + .FuzzyPrefixLength(2) + .FuzzyMaxExpansions(3) + .FuzzyRewrite(RewriteMultiTerm.ConstantScore) + .Rewrite(RewriteMultiTerm.ConstantScore) + .Fuziness(Fuzziness.Auto) + .TieBreaker(1.2) + .AnalyzeWildcard() + .MinimumShouldMatch(2) + .QuoteFieldSuffix("'") + .Lenient() + .Locale("en_US") + .Timezone("root") +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new QueryStringQuery +{ + Fields = Field(p=>p.Description).And("myOtherField"), + Boost = 1.1, + Name = "named_query", + Query = "hello world", + DefaultField = Field(p=>p.Description), + DefaultOperator = Operator.Or, + Analyzer = "standard", + QuoteAnalyzer = "quote-an", + AllowLeadingWildcard = true, + AutoGeneratePhraseQueries = true, + MaximumDeterminizedStates = 2, + LowercaseExpendedTerms = true, + EnablePositionIncrements = true, + Escape = true, + UseDisMax = true, + FuzzyPrefixLength = 2, + FuzzyMaxExpansions = 3, + FuzzyRewrite = RewriteMultiTerm.ConstantScore, + Rewrite = RewriteMultiTerm.ConstantScore, + Fuzziness = Fuzziness.Auto, + TieBreaker = 1.2, + AnalyzeWildcard = true, + MinimumShouldMatch = 2, + QuoteFieldSuffix = "'", + Lenient = true, + Locale = "en_US", + Timezone = "root" +} +---- + +[source,javascript] +.Example json output +---- +{ + "query_string": { + "_name": "named_query", + "boost": 1.1, + "query": "hello world", + "default_field": "description", + "default_operator": "or", + "analyzer": "standard", + "quote_analyzer": "quote-an", + "allow_leading_wildcard": true, + "lowercase_expanded_terms": true, + "enable_position_increments": true, + "fuzzy_max_expansions": 3, + "fuziness": "AUTO", + "fuzzy_prefix_length": 2, + "analyze_wildcard": true, + "auto_generate_phrase_queries": true, + "max_determinized_states": 2, + "minimum_should_match": 2, + "lenient": true, + "locale": "en_US", + "time_zone": "root", + "fields": [ + "description", + "myOtherField" + ], + "use_dis_max": true, + "tie_breaker": 1.2, + "rewrite": "constant_score", + "fuzzy_rewrite": "constant_score", + "quote_field_suffix": "'", + "escape": true + } +} +---- + diff --git a/docs/asciidoc/query-dsl/full-text/simple-query-string/simple-query-string-usage.asciidoc b/docs/asciidoc/query-dsl/full-text/simple-query-string/simple-query-string-usage.asciidoc new file mode 100644 index 00000000000..b3236c9c12a --- /dev/null +++ b/docs/asciidoc/query-dsl/full-text/simple-query-string/simple-query-string-usage.asciidoc @@ -0,0 +1,75 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[simple-query-string-usage]] +== Simple Query String Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.SimpleQueryString(c => c + .Name("named_query") + .Boost(1.1) + .Fields(f => f.Field(p=>p.Description).Field("myOtherField")) + .Query("hello world") + .Analyzer("standard") + .DefaultOperator(Operator.Or) + .Flags(SimpleQueryStringFlags.And|SimpleQueryStringFlags.Near) + .Locale("en_US") + .LowercaseExpendedTerms() + .Lenient() + .AnalyzeWildcard() + .MinimumShouldMatch("30%") +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SimpleQueryStringQuery +{ + Name = "named_query", + Boost = 1.1, + Fields = Field(p=>p.Description).And("myOtherField"), + Query = "hello world", + Analyzer = "standard", + DefaultOperator = Operator.Or, + Flags = SimpleQueryStringFlags.And|SimpleQueryStringFlags.Near, + Locale = "en_US", + LowercaseExpendedTerms = true, + Lenient = true, + AnalyzeWildcard = true, + MinimumShouldMatch = "30%" +} +---- + +[source,javascript] +.Example json output +---- +{ + "simple_query_string": { + "_name": "named_query", + "boost": 1.1, + "fields": [ + "description", + "myOtherField" + ], + "query": "hello world", + "analyzer": "standard", + "default_operator": "or", + "flags": "AND|NEAR", + "locale": "en_US", + "lowercase_expanded_terms": true, + "lenient": true, + "analyze_wildcard": true, + "minimum_should_match": "30%" + } +} +---- + diff --git a/docs/asciidoc/query-dsl/geo/bounding-box/geo-bounding-box-query-usage.asciidoc b/docs/asciidoc/query-dsl/geo/bounding-box/geo-bounding-box-query-usage.asciidoc new file mode 100644 index 00000000000..75eb66470bd --- /dev/null +++ b/docs/asciidoc/query-dsl/geo/bounding-box/geo-bounding-box-query-usage.asciidoc @@ -0,0 +1,75 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[geo-bounding-box-query-usage]] +== Geo Bounding Box Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.GeoBoundingBox(g=>g + .Boost(1.1) + .Name("named_query") + .Field(p=>p.Location) + .BoundingBox(b=>b + .TopLeft(34, -34) + .BottomRight(-34, 34) + ) + .Coerce() + .IgnoreMalformed() + .ValidationMethod(GeoValidationMethod.Strict) + .Type(GeoExecution.Indexed) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new GeoBoundingBoxQuery +{ + Boost = 1.1, + Name = "named_query", + Field = Infer.Field(p => p.Location), + BoundingBox = new Nest.BoundingBox + { + TopLeft = new GeoLocation(34,-34), + BottomRight = new GeoLocation(-34,34), + }, + Type = GeoExecution.Indexed, + Coerce = true, + IgnoreMalformed = true, + ValidationMethod = GeoValidationMethod.Strict +} +---- + +[source,javascript] +.Example json output +---- +{ + "geo_bounding_box": { + "type": "indexed", + "coerce": true, + "ignore_malformed": true, + "validation_method": "strict", + "_name": "named_query", + "boost": 1.1, + "location": { + "top_left": { + "lat": 34.0, + "lon": -34.0 + }, + "bottom_right": { + "lat": -34.0, + "lon": 34.0 + } + } + } +} +---- + diff --git a/docs/asciidoc/query-dsl/geo/distance-range/geo-distance-range-query-usage.asciidoc b/docs/asciidoc/query-dsl/geo/distance-range/geo-distance-range-query-usage.asciidoc new file mode 100644 index 00000000000..55f572f3208 --- /dev/null +++ b/docs/asciidoc/query-dsl/geo/distance-range/geo-distance-range-query-usage.asciidoc @@ -0,0 +1,77 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[geo-distance-range-query-usage]] +== Geo Distance Range Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.GeoDistanceRange(g=>g + .Boost(1.1) + .Name("named_query") + .Field(p=>p.Location) + .DistanceType(GeoDistanceType.Arc) + .Coerce() + .GreaterThanOrEqualTo(200, DistanceUnit.Kilometers) + .GreaterThan(200, DistanceUnit.Kilometers) + .IgnoreMalformed() + .Location(new GeoLocation(40, -70)) + .Optimize(GeoOptimizeBBox.Indexed) + .LessThanOrEqualTo(Nest.Distance.Miles(400)) + .LessThan(Nest.Distance.Miles(400)) + .ValidationMethod(GeoValidationMethod.Strict) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new GeoDistanceRangeQuery +{ + Boost = 1.1, + Name = "named_query", + Field = Infer.Field(p=>p.Location), + DistanceType = GeoDistanceType.Arc, + Coerce = true, + GreaterThanOrEqualTo = Nest.Distance.Kilometers(200), + IgnoreMalformed = true, + GreaterThan = Nest.Distance.Kilometers(200), + LessThan = Nest.Distance.Miles(400), + Location = new GeoLocation(40, -70), + OptimizeBoundingBox = GeoOptimizeBBox.Indexed, + LessThanOrEqualTo = Nest.Distance.Miles(400), + ValidationMethod = GeoValidationMethod.Strict +} +---- + +[source,javascript] +.Example json output +---- +{ + "geo_distance_range": { + "gt": "200.0km", + "gte": "200.0km", + "lt": "400.0mi", + "lte": "400.0mi", + "distance_type": "arc", + "optimize_bbox": "indexed", + "coerce": true, + "ignore_malformed": true, + "validation_method": "strict", + "_name": "named_query", + "boost": 1.1, + "location": { + "lat": 40.0, + "lon": -70.0 + } + } +} +---- + diff --git a/docs/asciidoc/query-dsl/geo/distance/geo-distance-query-usage.asciidoc b/docs/asciidoc/query-dsl/geo/distance/geo-distance-query-usage.asciidoc new file mode 100644 index 00000000000..d4cc505c60d --- /dev/null +++ b/docs/asciidoc/query-dsl/geo/distance/geo-distance-query-usage.asciidoc @@ -0,0 +1,68 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[geo-distance-query-usage]] +== Geo Distance Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.GeoDistance(g=>g + .Boost(1.1) + .Name("named_query") + .Field(p=>p.Location) + .DistanceType(GeoDistanceType.Arc) + .Coerce() + .Location(34, -34) + .Distance("200.0m") + .IgnoreMalformed() + .Optimize(GeoOptimizeBBox.Memory) + .ValidationMethod(GeoValidationMethod.Strict) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new GeoDistanceQuery +{ + Boost = 1.1, + Name = "named_query", + Field = Infer.Field(p => p.Location), + DistanceType = GeoDistanceType.Arc, + Coerce = true, + Location = new GeoLocation(34,-34), + Distance = "200.0m", + IgnoreMalformed = true, + OptimizeBoundingBox = GeoOptimizeBBox.Memory, + ValidationMethod = GeoValidationMethod.Strict +} +---- + +[source,javascript] +.Example json output +---- +{ + "geo_distance": { + "_name": "named_query", + "boost": 1.1, + "distance": "200.0m", + "optimize_bbox": "memory", + "distance_type": "arc", + "coerce": true, + "ignore_malformed": true, + "validation_method": "strict", + "location": { + "lat": 34.0, + "lon": -34.0 + } + } +} +---- + diff --git a/docs/asciidoc/query-dsl/geo/hash-cell/geo-hash-cell-query-usage.asciidoc b/docs/asciidoc/query-dsl/geo/hash-cell/geo-hash-cell-query-usage.asciidoc new file mode 100644 index 00000000000..9db82c4a816 --- /dev/null +++ b/docs/asciidoc/query-dsl/geo/hash-cell/geo-hash-cell-query-usage.asciidoc @@ -0,0 +1,56 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[geo-hash-cell-query-usage]] +== Geo Hash Cell Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.GeoHashCell(c => c + .Name("named_query") + .Boost(1.1) + .Field(p=>p.Location) + .Location(new GeoLocation(13.4080, 52.5186)) + .Neighbors() + .Precision(Nest.Distance.Meters(3)) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new GeoHashCellQuery +{ + Boost = 1.1, + Name = "named_query", + Field = Infer.Field(p=>p.Location), + Location = new GeoLocation(13.4080, 52.5186), + Neighbors = true, + Precision = Nest.Distance.Meters(3) +} +---- + +[source,javascript] +.Example json output +---- +{ + "geohash_cell": { + "_name": "named_query", + "boost": 1.1, + "precision": "3.0m", + "neighbors": true, + "location": { + "lat": 13.408, + "lon": 52.5186 + } + } +} +---- + diff --git a/docs/asciidoc/query-dsl/geo/polygon/geo-polygon-query-usage.asciidoc b/docs/asciidoc/query-dsl/geo/polygon/geo-polygon-query-usage.asciidoc new file mode 100644 index 00000000000..15f49d76c2d --- /dev/null +++ b/docs/asciidoc/query-dsl/geo/polygon/geo-polygon-query-usage.asciidoc @@ -0,0 +1,67 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[geo-polygon-query-usage]] +== Geo Polygon Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.GeoPolygon(c => c + .Name("named_query") + .Boost(1.1) + .Field(p=>p.Location) + .IgnoreMalformed() + .Coerce() + .ValidationMethod(GeoValidationMethod.Strict) + .Points( new GeoLocation(45,-45), new GeoLocation(-34,34)) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new GeoPolygonQuery +{ + Boost = 1.1, + Name = "named_query", + ValidationMethod = GeoValidationMethod.Strict, + Coerce = true, + IgnoreMalformed = true, + Points = new [] { new GeoLocation(45,-45), new GeoLocation(-34,34), }, + Field = Field(p=>p.Location) +} +---- + +[source,javascript] +.Example json output +---- +{ + "geo_polygon": { + "_name": "named_query", + "boost": 1.1, + "coerce": true, + "ignore_malformed": true, + "validation_method": "strict", + "location": { + "points": [ + { + "lat": 45.0, + "lon": -45.0 + }, + { + "lat": -34.0, + "lon": 34.0 + } + ] + } + } +} +---- + diff --git a/docs/asciidoc/query-dsl/geo/shape/circle/geo-shape-circle-usage.asciidoc b/docs/asciidoc/query-dsl/geo/shape/circle/geo-shape-circle-usage.asciidoc new file mode 100644 index 00000000000..a3185ee4299 --- /dev/null +++ b/docs/asciidoc/query-dsl/geo/shape/circle/geo-shape-circle-usage.asciidoc @@ -0,0 +1,36 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[geo-shape-circle-usage]] +== Geo Shape Circle Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.GeoShapeCircle(c => c + .Name("named_query") + .Boost(1.1) + .Field(p=>p.Location) + .Coordinates(this._coordinates) + .Radius("100m") +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new GeoShapeCircleQuery +{ + Name = "named_query", + Boost = 1.1, + Field = Field(p=>p.Location), + Shape = new CircleGeoShape(this._coordinates) { Radius = "100m" } +} +---- + diff --git a/docs/asciidoc/query-dsl/geo/shape/envelope/geo-envelope-usage.asciidoc b/docs/asciidoc/query-dsl/geo/shape/envelope/geo-envelope-usage.asciidoc new file mode 100644 index 00000000000..67a68dca84f --- /dev/null +++ b/docs/asciidoc/query-dsl/geo/shape/envelope/geo-envelope-usage.asciidoc @@ -0,0 +1,35 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[geo-envelope-usage]] +== Geo Envelope Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.GeoShapeEnvelope(c => c + .Name("named_query") + .Boost(1.1) + .Field(p=>p.Location) + .Coordinates(this._coordinates) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new GeoShapeEnvelopeQuery +{ + Name = "named_query", + Boost = 1.1, + Field = Field(p=>p.Location), + Shape = new EnvelopeGeoShape(this._coordinates) +} +---- + diff --git a/docs/asciidoc/query-dsl/geo/shape/indexed-shape/geo-indexed-shape-usage.asciidoc b/docs/asciidoc/query-dsl/geo/shape/indexed-shape/geo-indexed-shape-usage.asciidoc new file mode 100644 index 00000000000..935d4062bc7 --- /dev/null +++ b/docs/asciidoc/query-dsl/geo/shape/indexed-shape/geo-indexed-shape-usage.asciidoc @@ -0,0 +1,63 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[geo-indexed-shape-usage]] +== Geo Indexed Shape Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.GeoIndexedShape(c => c + .Name("named_query") + .Boost(1.1) + .Field(p=>p.Location) + .IndexedShape(p=>p + .Id(2) + .Path(pp=>pp.Location) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new GeoIndexedShapeQuery +{ + Name = "named_query", + Boost = 1.1, + Field = Field(p=>p.Location), + IndexedShape = new FieldLookup + { + Id = 2, + Index = Index(), + Type = Type(), + Path = Field(p=>p.Location) + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "geo_shape": { + "location": { + "_name": "named_query", + "boost": 1.1, + "indexed_shape": { + "id": 2, + "type": "project", + "index": "project", + "path": "location" + } + } + } +} +---- + diff --git a/docs/asciidoc/query-dsl/geo/shape/line-string/geo-line-string-usage.asciidoc b/docs/asciidoc/query-dsl/geo/shape/line-string/geo-line-string-usage.asciidoc new file mode 100644 index 00000000000..1d47da60b4a --- /dev/null +++ b/docs/asciidoc/query-dsl/geo/shape/line-string/geo-line-string-usage.asciidoc @@ -0,0 +1,35 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[geo-line-string-usage]] +== Geo Line String Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.GeoShapeLineString(c => c + .Name("named_query") + .Boost(1.1) + .Field(p=>p.Location) + .Coordinates(this._coordinates) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new GeoShapeLineStringQuery +{ + Name = "named_query", + Boost = 1.1, + Field = Field(p=>p.Location), + Shape = new LineStringGeoShape(this._coordinates) +} +---- + diff --git a/docs/asciidoc/query-dsl/geo/shape/multi-line-string/geo-multi-line-string-usage.asciidoc b/docs/asciidoc/query-dsl/geo/shape/multi-line-string/geo-multi-line-string-usage.asciidoc new file mode 100644 index 00000000000..d1844b32a90 --- /dev/null +++ b/docs/asciidoc/query-dsl/geo/shape/multi-line-string/geo-multi-line-string-usage.asciidoc @@ -0,0 +1,35 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[geo-multi-line-string-usage]] +== Geo Multi Line String Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.GeoShapeMultiLineString(c => c + .Name("named_query") + .Boost(1.1) + .Field(p=>p.Location) + .Coordinates(this._coordinates) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new GeoShapeMultiLineStringQuery +{ + Name = "named_query", + Boost = 1.1, + Field = Field(p=>p.Location), + Shape = new MultiLineStringGeoShape(this._coordinates) +} +---- + diff --git a/docs/asciidoc/query-dsl/geo/shape/multi-point/geo-multi-point-usage.asciidoc b/docs/asciidoc/query-dsl/geo/shape/multi-point/geo-multi-point-usage.asciidoc new file mode 100644 index 00000000000..2ec90304aed --- /dev/null +++ b/docs/asciidoc/query-dsl/geo/shape/multi-point/geo-multi-point-usage.asciidoc @@ -0,0 +1,35 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[geo-multi-point-usage]] +== Geo Multi Point Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.GeoShapeMultiPoint(c => c + .Name("named_query") + .Boost(1.1) + .Field(p=>p.Location) + .Coordinates(this._coordinates) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new GeoShapeMultiPointQuery +{ + Name = "named_query", + Boost = 1.1, + Field = Field(p=>p.Location), + Shape = new MultiPointGeoShape(this._coordinates) +} +---- + diff --git a/docs/asciidoc/query-dsl/geo/shape/point/geo-point-usage.asciidoc b/docs/asciidoc/query-dsl/geo/shape/point/geo-point-usage.asciidoc new file mode 100644 index 00000000000..11c185b017f --- /dev/null +++ b/docs/asciidoc/query-dsl/geo/shape/point/geo-point-usage.asciidoc @@ -0,0 +1,35 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[geo-point-usage]] +== Geo Point Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.GeoShapePoint(c => c + .Name("named_query") + .Boost(1.1) + .Field(p=>p.Location) + .Coordinates(this._coordinates) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new GeoShapePointQuery +{ + Name = "named_query", + Boost = 1.1, + Field = Field(p=>p.Location), + Shape = new PointGeoShape(this._coordinates) +} +---- + diff --git a/docs/asciidoc/query-dsl/geo/shape/polygon/geo-polygon-usage.asciidoc b/docs/asciidoc/query-dsl/geo/shape/polygon/geo-polygon-usage.asciidoc new file mode 100644 index 00000000000..9b0a72c8f31 --- /dev/null +++ b/docs/asciidoc/query-dsl/geo/shape/polygon/geo-polygon-usage.asciidoc @@ -0,0 +1,35 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[geo-polygon-usage]] +== Geo Polygon Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.GeoShapePolygon(c => c + .Name("named_query") + .Boost(1.1) + .Field(p => p.Location) + .Coordinates(this._coordinates) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new GeoShapePolygonQuery +{ + Name = "named_query", + Boost = 1.1, + Field = Field(p => p.Location), + Shape = new PolygonGeoShape(this._coordinates) { } +} +---- + diff --git a/docs/asciidoc/query-dsl/joining/has-child/has-child-query-usage.asciidoc b/docs/asciidoc/query-dsl/joining/has-child/has-child-query-usage.asciidoc new file mode 100644 index 00000000000..3e5083af76e --- /dev/null +++ b/docs/asciidoc/query-dsl/joining/has-child/has-child-query-usage.asciidoc @@ -0,0 +1,63 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[has-child-query-usage]] +== Has Child Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.HasChild(c => c + .Name("named_query") + .Boost(1.1) + .InnerHits(i=>i.Explain()) + .MaxChildren(5) + .MinChildren(1) + .ScoreMode(ChildScoreMode.Average) + .Query(qq=>qq.MatchAll()) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new HasChildQuery +{ + Name = "named_query", + Boost = 1.1, + Type = Infer.Type(), + InnerHits = new InnerHits { Explain = true }, + MaxChildren = 5, + MinChildren = 1, + Query = new MatchAllQuery(), + ScoreMode = ChildScoreMode.Average +} +---- + +[source,javascript] +.Example json output +---- +{ + "has_child": { + "_name": "named_query", + "boost": 1.1, + "type": "developer", + "score_mode": "avg", + "min_children": 1, + "max_children": 5, + "query": { + "match_all": {} + }, + "inner_hits": { + "explain": true + } + } +} +---- + diff --git a/docs/asciidoc/query-dsl/joining/has-parent/has-parent-query-usage.asciidoc b/docs/asciidoc/query-dsl/joining/has-parent/has-parent-query-usage.asciidoc new file mode 100644 index 00000000000..30488fd07a7 --- /dev/null +++ b/docs/asciidoc/query-dsl/joining/has-parent/has-parent-query-usage.asciidoc @@ -0,0 +1,58 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[has-parent-query-usage]] +== Has Parent Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.HasParent(c => c + .Name("named_query") + .Boost(1.1) + .InnerHits(i=>i.Explain()) + .ScoreMode(ParentScoreMode.Score) + .Query(qq=>qq.MatchAll()) + +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new HasParentQuery +{ + Name = "named_query", + Boost = 1.1, + Type = Infer.Type(), + InnerHits = new InnerHits { Explain = true }, + Query = new MatchAllQuery(), + ScoreMode = ParentScoreMode.Score +} +---- + +[source,javascript] +.Example json output +---- +{ + "has_parent": { + "_name": "named_query", + "boost": 1.1, + "type": "developer", + "score_mode": "score", + "query": { + "match_all": {} + }, + "inner_hits": { + "explain": true + } + } +} +---- + diff --git a/docs/asciidoc/query-dsl/joining/nested/nested-query-usage.asciidoc b/docs/asciidoc/query-dsl/joining/nested/nested-query-usage.asciidoc new file mode 100644 index 00000000000..7cac48a1388 --- /dev/null +++ b/docs/asciidoc/query-dsl/joining/nested/nested-query-usage.asciidoc @@ -0,0 +1,55 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[nested-query-usage]] +== Nested Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.Nested(c => c + .Name("named_query") + .Boost(1.1) + .InnerHits(i=>i.Explain()) + .Query(qq=>qq.MatchAll()) + .Path(p=>p.CuratedTags) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new NestedQuery +{ + Name = "named_query", + Boost = 1.1, + InnerHits = new InnerHits { Explain = true }, + Query = new MatchAllQuery(), + Path = Field(p=>p.CuratedTags) +} +---- + +[source,javascript] +.Example json output +---- +{ + "nested": { + "_name": "named_query", + "boost": 1.1, + "query": { + "match_all": {} + }, + "path": "curatedTags", + "inner_hits": { + "explain": true + } + } +} +---- + diff --git a/docs/asciidoc/query-dsl/nest-specific/raw/raw-combine-usage.asciidoc b/docs/asciidoc/query-dsl/nest-specific/raw/raw-combine-usage.asciidoc new file mode 100644 index 00000000000..5250786a5b3 --- /dev/null +++ b/docs/asciidoc/query-dsl/nest-specific/raw/raw-combine-usage.asciidoc @@ -0,0 +1,47 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[raw-combine-usage]] +== Raw Combine Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q.Raw(RawTermQuery) && q.Term("x", "y") +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new RawQuery(RawTermQuery) +&& new TermQuery { Field = "x", Value = "y" } +---- + +[source,javascript] +.Example json output +---- +{ + "bool": { + "must": [ + { + "term": { + "fieldname": "value" + } + }, + { + "term": { + "x": { + "value": "y" + } + } + } + ] + } +} +---- + diff --git a/docs/asciidoc/query-dsl/nest-specific/raw/raw-query-usage.asciidoc b/docs/asciidoc/query-dsl/nest-specific/raw/raw-query-usage.asciidoc new file mode 100644 index 00000000000..8a981e5c065 --- /dev/null +++ b/docs/asciidoc/query-dsl/nest-specific/raw/raw-query-usage.asciidoc @@ -0,0 +1,34 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[raw-query-usage]] +== Raw Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.Raw(RawTermQuery) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new RawQuery(RawTermQuery) +---- + +[source,javascript] +.Example json output +---- +{ + "term": { + "fieldname": "value" + } +} +---- + diff --git a/docs/asciidoc/query-dsl/span/container/span-containing-query-usage.asciidoc b/docs/asciidoc/query-dsl/span/container/span-containing-query-usage.asciidoc new file mode 100644 index 00000000000..7b7118358e7 --- /dev/null +++ b/docs/asciidoc/query-dsl/span/container/span-containing-query-usage.asciidoc @@ -0,0 +1,64 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[span-containing-query-usage]] +== Span Containing Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.SpanContaining(sn => sn + .Name("named_query") + .Boost(1.1) + .Little(i=>i + .SpanTerm(st=>st.Field("field1").Value("hoya")) + ) + .Big(e=>e + .SpanTerm(st=>st.Field("field1").Value("hoya2")) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SpanContainingQuery +{ + Name = "named_query", + Boost = 1.1, + Little = new SpanQuery { SpanTerm = new SpanTermQuery { Field = "field1", Value = "hoya"} }, + Big = new SpanQuery { SpanTerm = new SpanTermQuery { Field = "field1", Value = "hoya2"} }, +} +---- + +[source,javascript] +.Example json output +---- +{ + "span_containing": { + "_name": "named_query", + "boost": 1.1, + "little": { + "span_term": { + "field1": { + "value": "hoya" + } + } + }, + "big": { + "span_term": { + "field1": { + "value": "hoya2" + } + } + } + } +} +---- + diff --git a/docs/asciidoc/query-dsl/span/first/span-first-query-usage.asciidoc b/docs/asciidoc/query-dsl/span/first/span-first-query-usage.asciidoc new file mode 100644 index 00000000000..73b2725f3b7 --- /dev/null +++ b/docs/asciidoc/query-dsl/span/first/span-first-query-usage.asciidoc @@ -0,0 +1,59 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[span-first-query-usage]] +== Span First Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.SpanFirst(c => c + .Name("named_query") + .Boost(1.1) + .Match(sq=>sq + .SpanTerm(st=>st.Field(p=>p.Name).Value("value")) + ) + .End(3) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SpanFirstQuery +{ + Name = "named_query", + Boost = 1.1, + End = 3, + Match = new SpanQuery + { + SpanTerm = new SpanTermQuery { Field = "name", Value = "value" } + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "span_first": { + "_name": "named_query", + "boost": 1.1, + "match": { + "span_term": { + "name": { + "value": "value" + } + } + }, + "end": 3 + } +} +---- + diff --git a/docs/asciidoc/query-dsl/span/multi-term/span-multi-term-query-usage.asciidoc b/docs/asciidoc/query-dsl/span/multi-term/span-multi-term-query-usage.asciidoc new file mode 100644 index 00000000000..8e0db66aebe --- /dev/null +++ b/docs/asciidoc/query-dsl/span/multi-term/span-multi-term-query-usage.asciidoc @@ -0,0 +1,53 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[span-multi-term-query-usage]] +== Span Multi Term Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.SpanMultiTerm(c => c + .Name("named_query") + .Boost(1.1) + .Match(sq=>sq + .Prefix(pr=>pr.Field(p=>p.Name).Value("pre-*")) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SpanMultiTermQuery +{ + Name = "named_query", + Boost = 1.1, + Match = new PrefixQuery { Field = "name", Value = "pre-*" } +} +---- + +[source,javascript] +.Example json output +---- +{ + "span_multi": { + "_name": "named_query", + "boost": 1.1, + "match": { + "prefix": { + "name": { + "value": "pre-*" + } + } + } + } +} +---- + diff --git a/docs/asciidoc/query-dsl/span/near/span-near-query-usage.asciidoc b/docs/asciidoc/query-dsl/span/near/span-near-query-usage.asciidoc new file mode 100644 index 00000000000..aaac1e9a051 --- /dev/null +++ b/docs/asciidoc/query-dsl/span/near/span-near-query-usage.asciidoc @@ -0,0 +1,85 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[span-near-query-usage]] +== Span Near Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.SpanNear(sn => sn + .Name("named_query") + .Boost(1.1) + .Clauses( + c=>c.SpanTerm(st=>st.Field("field").Value("value1")), + c=>c.SpanTerm(st=>st.Field("field").Value("value2")), + c=>c.SpanTerm(st=>st.Field("field").Value("value3")) + ) + .Slop(12) + .InOrder(false) + .CollectPayloads(false) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SpanNearQuery +{ + Name = "named_query", + Boost = 1.1, + Clauses = new List + { + new SpanQuery { SpanTerm = new SpanTermQuery { Field = "field", Value = "value1" } }, + new SpanQuery { SpanTerm = new SpanTermQuery { Field = "field", Value = "value2" } }, + new SpanQuery { SpanTerm = new SpanTermQuery { Field = "field", Value = "value3" } } + }, + Slop = 12, + InOrder = false, + CollectPayloads = false +} +---- + +[source,javascript] +.Example json output +---- +{ + "span_near": { + "clauses": [ + { + "span_term": { + "field": { + "value": "value1" + } + } + }, + { + "span_term": { + "field": { + "value": "value2" + } + } + }, + { + "span_term": { + "field": { + "value": "value3" + } + } + } + ], + "slop": 12, + "in_order": false, + "collect_payloads": false, + "_name": "named_query", + "boost": 1.1 + } +} +---- + diff --git a/docs/asciidoc/query-dsl/span/not/span-not-query-usage.asciidoc b/docs/asciidoc/query-dsl/span/not/span-not-query-usage.asciidoc new file mode 100644 index 00000000000..6ef96d2e2a0 --- /dev/null +++ b/docs/asciidoc/query-dsl/span/not/span-not-query-usage.asciidoc @@ -0,0 +1,85 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[span-not-query-usage]] +== Span Not Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.SpanNot(sn => sn + .Name("named_query") + .Boost(1.1) + .Dist(12) + .Post(13) + .Pre(14) + .Include(i => i + .SpanTerm(st => st.Field("field1").Value("hoya")) + ) + .Exclude(e => e + .SpanTerm(st => st.Field("field1").Value("hoya2")) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SpanNotQuery +{ + Name = "named_query", + Boost = 1.1, + Dist = 12, + Post = 13, + Pre = 14, + Include = new SpanQuery + { + SpanTerm = new SpanTermQuery + { + Field = "field1", Value = "hoya" + } + }, + Exclude = new SpanQuery + { + SpanTerm = new SpanTermQuery + { + Field = "field1", Value = "hoya2" + } + }, +} +---- + +[source,javascript] +.Example json output +---- +{ + "span_not": { + "_name": "named_query", + "boost": 1.1, + "include": { + "span_term": { + "field1": { + "value": "hoya" + } + } + }, + "exclude": { + "span_term": { + "field1": { + "value": "hoya2" + } + } + }, + "pre": 14, + "post": 13, + "dist": 12 + } +} +---- + diff --git a/docs/asciidoc/query-dsl/span/or/span-or-query-usage.asciidoc b/docs/asciidoc/query-dsl/span/or/span-or-query-usage.asciidoc new file mode 100644 index 00000000000..83640752cd1 --- /dev/null +++ b/docs/asciidoc/query-dsl/span/or/span-or-query-usage.asciidoc @@ -0,0 +1,76 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[span-or-query-usage]] +== Span Or Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.SpanOr(sn => sn + .Name("named_query") + .Boost(1.1) + .Clauses( + c => c.SpanTerm(st => st.Field("field").Value("value1")), + c => c.SpanTerm(st => st.Field("field").Value("value2")), + c => c.SpanTerm(st => st.Field("field").Value("value3")) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SpanOrQuery +{ + Name = "named_query", + Boost = 1.1, + Clauses = new List + { + new SpanQuery { SpanTerm = new SpanTermQuery { Field = "field", Value = "value1" } }, + new SpanQuery { SpanTerm = new SpanTermQuery { Field = "field", Value = "value2" } }, + new SpanQuery { SpanTerm = new SpanTermQuery { Field = "field", Value = "value3" } } + }, +} +---- + +[source,javascript] +.Example json output +---- +{ + "span_or": { + "_name": "named_query", + "boost": 1.1, + "clauses": [ + { + "span_term": { + "field": { + "value": "value1" + } + } + }, + { + "span_term": { + "field": { + "value": "value2" + } + } + }, + { + "span_term": { + "field": { + "value": "value3" + } + } + } + ] + } +} +---- + diff --git a/docs/asciidoc/query-dsl/span/term/span-term-query-usage.asciidoc b/docs/asciidoc/query-dsl/span/term/span-term-query-usage.asciidoc new file mode 100644 index 00000000000..3465be21e88 --- /dev/null +++ b/docs/asciidoc/query-dsl/span/term/span-term-query-usage.asciidoc @@ -0,0 +1,49 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[span-term-query-usage]] +== Span Term Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.SpanTerm(c => c + .Name("named_query") + .Boost(1.1) + .Field("user") + .Value("kimchy") +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SpanTermQuery +{ + Name = "named_query", + Boost = 1.1, + Value = "kimchy", + Field = "user" +} +---- + +[source,javascript] +.Example json output +---- +{ + "span_term": { + "user": { + "_name": "named_query", + "boost": 1.1, + "value": "kimchy" + } + } +} +---- + diff --git a/docs/asciidoc/query-dsl/span/within/span-within-query-usage.asciidoc b/docs/asciidoc/query-dsl/span/within/span-within-query-usage.asciidoc new file mode 100644 index 00000000000..2aaddef7c6a --- /dev/null +++ b/docs/asciidoc/query-dsl/span/within/span-within-query-usage.asciidoc @@ -0,0 +1,64 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[span-within-query-usage]] +== Span Within Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.SpanWithin(sn => sn + .Name("named_query") + .Boost(1.1) + .Little(i=>i + .SpanTerm(st=>st.Field("field1").Value("hoya")) + ) + .Big(e=>e + .SpanTerm(st=>st.Field("field1").Value("hoya2")) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SpanWithinQuery +{ + Name = "named_query", + Boost = 1.1, + Little = new SpanQuery { SpanTerm = new SpanTermQuery { Field = "field1", Value = "hoya"} }, + Big = new SpanQuery { SpanTerm = new SpanTermQuery { Field = "field1", Value = "hoya2"} }, +} +---- + +[source,javascript] +.Example json output +---- +{ + "span_within": { + "_name": "named_query", + "boost": 1.1, + "little": { + "span_term": { + "field1": { + "value": "hoya" + } + } + }, + "big": { + "span_term": { + "field1": { + "value": "hoya2" + } + } + } + } +} +---- + diff --git a/docs/asciidoc/query-dsl/specialized/more-like-this/more-like-this-query-usage.asciidoc b/docs/asciidoc/query-dsl/specialized/more-like-this/more-like-this-query-usage.asciidoc new file mode 100644 index 00000000000..bbb1ae7e330 --- /dev/null +++ b/docs/asciidoc/query-dsl/specialized/more-like-this/more-like-this-query-usage.asciidoc @@ -0,0 +1,110 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[more-like-this-query-usage]] +== More Like This Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.MoreLikeThis(sn => sn + .Name("named_query") + .Boost(1.1) + .Like(l=>l + .Document(d=>d .Id(Project.Instance.Name)) + .Text("some long text") + ) + .Analyzer("some_analyzer") + .BoostTerms(1.1) + .Include() + .MaxDocumentFrequency(12) + .MaxQueryTerms(12) + .MaxWordLength(300) + .MinDocumentFrequency(1) + .MinTermFrequency(1) + .MinWordLength(10) + .StopWords("and", "the") + .MinimumShouldMatch(1) + .Fields(f=>f.Field(p=>p.Name)) + .Unlike(l=>l + .Text("not like this text") + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new MoreLikeThisQuery +{ + Name = "named_query", + Boost = 1.1, + Fields = Fields(p=>p.Name), + Like = new List + { + new LikeDocument(Project.Instance.Name), + "some long text" + }, + Analyzer = "some_analyzer", + BoostTerms = 1.1, + Include = true, + MaxDocumentFrequency = 12, + MaxQueryTerms = 12, + MaxWordLength = 300, + MinDocumentFrequency = 1, + MinTermFrequency = 1, + MinWordLength = 10, + MinimumShouldMatch = 1, + StopWords = new [] { "and", "the"}, + Unlike = new List + { + "not like this text" + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "mlt": { + "fields": [ + "name" + ], + "minimum_should_match": 1, + "stop_words": [ + "and", + "the" + ], + "min_term_freq": 1, + "max_query_terms": 12, + "min_doc_freq": 1, + "max_doc_freq": 12, + "min_word_len": 10, + "max_word_len": 300, + "boost_terms": 1.1, + "analyzer": "some_analyzer", + "include": true, + "like": [ + { + "_index": "project", + "_type": "project", + "_id": "Durgan LLC" + }, + "some long text" + ], + "unlike": [ + "not like this text" + ], + "_name": "named_query", + "boost": 1.1 + } +} +---- + diff --git a/docs/asciidoc/query-dsl/specialized/script/script-query-usage.asciidoc b/docs/asciidoc/query-dsl/specialized/script/script-query-usage.asciidoc new file mode 100644 index 00000000000..c669ac13d84 --- /dev/null +++ b/docs/asciidoc/query-dsl/specialized/script/script-query-usage.asciidoc @@ -0,0 +1,55 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[script-query-usage]] +== Script Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.Script(sn => sn + .Name("named_query") + .Boost(1.1) + .Inline(_templateString) + .Params(p=>p.Add("param1", 1)) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new ScriptQuery +{ + Name = "named_query", + Boost = 1.1, + Inline = _templateString, + Params = new Dictionary + { + { "param1", 1 } + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "script": { + "_name": "named_query", + "boost": 1.1, + "script": { + "inline": "doc['num1'].value > param1", + "params": { + "param1": 1 + } + } + } +} +---- + diff --git a/docs/asciidoc/query-dsl/specialized/template/template-query-usage.asciidoc b/docs/asciidoc/query-dsl/specialized/template/template-query-usage.asciidoc new file mode 100644 index 00000000000..13361285b06 --- /dev/null +++ b/docs/asciidoc/query-dsl/specialized/template/template-query-usage.asciidoc @@ -0,0 +1,53 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[template-query-usage]] +== Template Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.Template(sn => sn + .Name("named_query") + .Boost(1.1) + .Inline(_templateString) + .Params(p=>p.Add("query_string", "all about search")) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new TemplateQuery +{ + Name = "named_query", + Boost = 1.1, + Inline = _templateString, + Params = new Dictionary + { + { "query_string", "all about search" } + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "template": { + "_name": "named_query", + "boost": 1.1, + "inline": "{ \"match\": { \"text\": \"{{query_string}}\" } }", + "params": { + "query_string": "all about search" + } + } +} +---- + diff --git a/docs/asciidoc/query-dsl/term-level/exists/exists-query-usage.asciidoc b/docs/asciidoc/query-dsl/term-level/exists/exists-query-usage.asciidoc new file mode 100644 index 00000000000..974d4b6c7ac --- /dev/null +++ b/docs/asciidoc/query-dsl/term-level/exists/exists-query-usage.asciidoc @@ -0,0 +1,45 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[exists-query-usage]] +== Exists Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.Exists(c => c + .Name("named_query") + .Boost(1.1) + .Field(p => p.Description) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new ExistsQuery +{ + Name = "named_query", + Boost = 1.1, + Field = "description", +} +---- + +[source,javascript] +.Example json output +---- +{ + "exists": { + "_name": "named_query", + "boost": 1.1, + "field": "description" + } +} +---- + diff --git a/docs/asciidoc/query-dsl/term-level/fuzzy/fuzzy-date-query-usage.asciidoc b/docs/asciidoc/query-dsl/term-level/fuzzy/fuzzy-date-query-usage.asciidoc new file mode 100644 index 00000000000..bee0aeb2619 --- /dev/null +++ b/docs/asciidoc/query-dsl/term-level/fuzzy/fuzzy-date-query-usage.asciidoc @@ -0,0 +1,64 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[fuzzy-date-query-usage]] +== Fuzzy Date Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.FuzzyDate(c => c + .Name("named_query") + .Boost(1.1) + .Field(p => p.Description) + .Fuzziness(TimeSpan.FromDays(2)) + .Value(Project.Instance.StartedOn) + .MaxExpansions(100) + .PrefixLength(3) + .Rewrite(RewriteMultiTerm.ConstantScore) + .Transpositions() +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new FuzzyDateQuery +{ + Name = "named_query", + Boost = 1.1, + Field = "description", + Fuzziness = TimeSpan.FromDays(2), + Value = Project.Instance.StartedOn, + MaxExpansions = 100, + PrefixLength = 3, + Rewrite = RewriteMultiTerm.ConstantScore, + Transpositions = true +} +---- + +[source,javascript] +.Example json output +---- +{ + "fuzzy": { + "description": { + "_name": "named_query", + "boost": 1.1, + "fuzziness": "2d", + "max_expansions": 100, + "prefix_length": 3, + "rewrite": "constant_score", + "transpositions": true, + "value": "2015-01-01T00:00:00" + } + } +} +---- + diff --git a/docs/asciidoc/query-dsl/term-level/fuzzy/fuzzy-numeric-query-usage.asciidoc b/docs/asciidoc/query-dsl/term-level/fuzzy/fuzzy-numeric-query-usage.asciidoc new file mode 100644 index 00000000000..bc8810eafe2 --- /dev/null +++ b/docs/asciidoc/query-dsl/term-level/fuzzy/fuzzy-numeric-query-usage.asciidoc @@ -0,0 +1,64 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[fuzzy-numeric-query-usage]] +== Fuzzy Numeric Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.FuzzyNumeric(c => c + .Name("named_query") + .Boost(1.1) + .Field(p => p.Description) + .Fuzziness(2) + .Value(12) + .MaxExpansions(100) + .PrefixLength(3) + .Rewrite(RewriteMultiTerm.ConstantScore) + .Transpositions() +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new FuzzyNumericQuery +{ + Name = "named_query", + Boost = 1.1, + Field = "description", + Fuzziness = 2, + Value = 12, + MaxExpansions = 100, + PrefixLength = 3, + Rewrite = RewriteMultiTerm.ConstantScore, + Transpositions = true +} +---- + +[source,javascript] +.Example json output +---- +{ + "fuzzy": { + "description": { + "_name": "named_query", + "boost": 1.1, + "fuzziness": 2.0, + "max_expansions": 100, + "prefix_length": 3, + "rewrite": "constant_score", + "transpositions": true, + "value": 12.0 + } + } +} +---- + diff --git a/docs/asciidoc/query-dsl/term-level/fuzzy/fuzzy-query-usage.asciidoc b/docs/asciidoc/query-dsl/term-level/fuzzy/fuzzy-query-usage.asciidoc new file mode 100644 index 00000000000..1a0d703c63d --- /dev/null +++ b/docs/asciidoc/query-dsl/term-level/fuzzy/fuzzy-query-usage.asciidoc @@ -0,0 +1,64 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[fuzzy-query-usage]] +== Fuzzy Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.Fuzzy(c => c + .Name("named_query") + .Boost(1.1) + .Field(p => p.Description) + .Fuzziness(Fuzziness.Auto) + .Value("ki") + .MaxExpansions(100) + .PrefixLength(3) + .Rewrite(RewriteMultiTerm.ConstantScore) + .Transpositions() +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new FuzzyQuery +{ + Name = "named_query", + Boost = 1.1, + Field = "description", + Fuzziness = Fuzziness.Auto, + Value = "ki", + MaxExpansions = 100, + PrefixLength = 3, + Rewrite = RewriteMultiTerm.ConstantScore, + Transpositions = true +} +---- + +[source,javascript] +.Example json output +---- +{ + "fuzzy": { + "description": { + "_name": "named_query", + "boost": 1.1, + "fuzziness": "AUTO", + "max_expansions": 100, + "prefix_length": 3, + "rewrite": "constant_score", + "transpositions": true, + "value": "ki" + } + } +} +---- + diff --git a/docs/asciidoc/query-dsl/term-level/ids/ids-query-usage.asciidoc b/docs/asciidoc/query-dsl/term-level/ids/ids-query-usage.asciidoc new file mode 100644 index 00000000000..19cd0489925 --- /dev/null +++ b/docs/asciidoc/query-dsl/term-level/ids/ids-query-usage.asciidoc @@ -0,0 +1,56 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[ids-query-usage]] +== Ids Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.Ids(c => c + .Name("named_query") + .Boost(1.1) + .Values(1, 2, 3, 4) + .Types(typeof(Project), typeof(Developer)) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new IdsQuery +{ + Name = "named_query", + Boost = 1.1, + Values = new List { 1, 2,3,4 }, + Types = Type().And() +} +---- + +[source,javascript] +.Example json output +---- +{ + "ids": { + "_name": "named_query", + "boost": 1.1, + "types": [ + "project", + "developer" + ], + "values": [ + 1, + 2, + 3, + 4 + ] + } +} +---- + diff --git a/docs/asciidoc/query-dsl/term-level/missing/missing-query-usage.asciidoc b/docs/asciidoc/query-dsl/term-level/missing/missing-query-usage.asciidoc new file mode 100644 index 00000000000..01a6f3ef835 --- /dev/null +++ b/docs/asciidoc/query-dsl/term-level/missing/missing-query-usage.asciidoc @@ -0,0 +1,51 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[missing-query-usage]] +== Missing Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.Missing(c => c + .Name("named_query") + .Boost(1.1) + .Field(p => p.Description) + .NullValue() + .Existence() +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new MissingQuery +{ + Name = "named_query", + Boost = 1.1, + Field = "description", + NullValue = true, + Existence = true +} +---- + +[source,javascript] +.Example json output +---- +{ + "missing": { + "_name": "named_query", + "boost": 1.1, + "existence": true, + "field": "description", + "null_value": true + } +} +---- + diff --git a/docs/asciidoc/query-dsl/term-level/prefix/prefix-query-usage.asciidoc b/docs/asciidoc/query-dsl/term-level/prefix/prefix-query-usage.asciidoc new file mode 100644 index 00000000000..719986ba859 --- /dev/null +++ b/docs/asciidoc/query-dsl/term-level/prefix/prefix-query-usage.asciidoc @@ -0,0 +1,52 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[prefix-query-usage]] +== Prefix Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.Prefix(c => c + .Name("named_query") + .Boost(1.1) + .Field(p => p.Description) + .Value("proj") + .Rewrite(RewriteMultiTerm.TopTermsBoostN) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new PrefixQuery +{ + Name = "named_query", + Boost = 1.1, + Field = "description", + Value = "proj", + Rewrite = RewriteMultiTerm.TopTermsBoostN +} +---- + +[source,javascript] +.Example json output +---- +{ + "prefix": { + "description": { + "_name": "named_query", + "boost": 1.1, + "rewrite": "top_terms_boost_N", + "value": "proj" + } + } +} +---- + diff --git a/docs/asciidoc/query-dsl/term-level/range/date-range-query-usage.asciidoc b/docs/asciidoc/query-dsl/term-level/range/date-range-query-usage.asciidoc new file mode 100644 index 00000000000..eb60fdb35c6 --- /dev/null +++ b/docs/asciidoc/query-dsl/term-level/range/date-range-query-usage.asciidoc @@ -0,0 +1,64 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[date-range-query-usage]] +== Date Range Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.DateRange(c => c + .Name("named_query") + .Boost(1.1) + .Field(p => p.Description) + .GreaterThan(FixedDate) + .GreaterThanOrEquals(DateMath.Anchored(FixedDate).RoundTo(TimeUnit.Month)) + .LessThan("01/01/2012") + .LessThanOrEquals(DateMath.Now) + .Format("dd/MM/yyyy||yyyy") + .TimeZone("+01:00") +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new DateRangeQuery +{ + Name = "named_query", + Boost = 1.1, + Field = "description", + GreaterThan = FixedDate, + GreaterThanOrEqualTo = DateMath.Anchored(FixedDate).RoundTo(TimeUnit.Month), + LessThan = "01/01/2012", + LessThanOrEqualTo = DateMath.Now, + TimeZone = "+01:00", + Format = "dd/MM/yyyy||yyyy" +} +---- + +[source,javascript] +.Example json output +---- +{ + "range": { + "description": { + "_name": "named_query", + "boost": 1.1, + "format": "dd/MM/yyyy||yyyy", + "gt": "2015-06-06T12:01:02.123", + "gte": "2015-06-06T12:01:02.123||/M", + "lt": "01/01/2012", + "lte": "now", + "time_zone": "+01:00" + } + } +} +---- + diff --git a/docs/asciidoc/query-dsl/term-level/range/numeric-range-query-usage.asciidoc b/docs/asciidoc/query-dsl/term-level/range/numeric-range-query-usage.asciidoc new file mode 100644 index 00000000000..f4c4e64f443 --- /dev/null +++ b/docs/asciidoc/query-dsl/term-level/range/numeric-range-query-usage.asciidoc @@ -0,0 +1,58 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[numeric-range-query-usage]] +== Numeric Range Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.Range(c => c + .Name("named_query") + .Boost(1.1) + .Field(p => p.Description) + .GreaterThan(1.0) + .GreaterThanOrEquals(1.1) + .LessThan(2.1) + .LessThanOrEquals(2.0) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new NumericRangeQuery +{ + Name = "named_query", + Boost = 1.1, + Field = "description", + GreaterThan = 1.0, + GreaterThanOrEqualTo = 1.1, + LessThan = 2.1, + LessThanOrEqualTo = 2.0 +} +---- + +[source,javascript] +.Example json output +---- +{ + "range": { + "description": { + "_name": "named_query", + "boost": 1.1, + "gt": 1.0, + "gte": 1.1, + "lt": 2.1, + "lte": 2.0 + } + } +} +---- + diff --git a/docs/asciidoc/query-dsl/term-level/range/term-range-query-usage.asciidoc b/docs/asciidoc/query-dsl/term-level/range/term-range-query-usage.asciidoc new file mode 100644 index 00000000000..ee93de92f42 --- /dev/null +++ b/docs/asciidoc/query-dsl/term-level/range/term-range-query-usage.asciidoc @@ -0,0 +1,58 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[term-range-query-usage]] +== Term Range Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.TermRange(c => c + .Name("named_query") + .Boost(1.1) + .Field(p => p.Description) + .GreaterThan("foo") + .GreaterThanOrEquals("foof") + .LessThan("bar") + .LessThanOrEquals("barb") +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new TermRangeQuery +{ + Name = "named_query", + Boost = 1.1, + Field = "description", + GreaterThan = "foo", + GreaterThanOrEqualTo = "foof", + LessThan = "bar", + LessThanOrEqualTo = "barb" +} +---- + +[source,javascript] +.Example json output +---- +{ + "range": { + "description": { + "_name": "named_query", + "boost": 1.1, + "gt": "foo", + "gte": "foof", + "lt": "bar", + "lte": "barb" + } + } +} +---- + diff --git a/docs/asciidoc/query-dsl/term-level/regexp/regexp-query-usage.asciidoc b/docs/asciidoc/query-dsl/term-level/regexp/regexp-query-usage.asciidoc new file mode 100644 index 00000000000..5d99b771e17 --- /dev/null +++ b/docs/asciidoc/query-dsl/term-level/regexp/regexp-query-usage.asciidoc @@ -0,0 +1,55 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[regexp-query-usage]] +== Regexp Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.Regexp(c => c + .Name("named_query") + .Boost(1.1) + .Field(p => p.Description) + .Value("s.*y") + .Flags("INTERSECTION|COMPLEMENT|EMPTY") + .MaximumDeterminizedStates(20000) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new RegexpQuery +{ + Name = "named_query", + Boost = 1.1, + Field = "description", + Value = "s.*y", + Flags = "INTERSECTION|COMPLEMENT|EMPTY", + MaximumDeterminizedStates = 20000 +} +---- + +[source,javascript] +.Example json output +---- +{ + "regexp": { + "description": { + "_name": "named_query", + "boost": 1.1, + "flags": "INTERSECTION|COMPLEMENT|EMPTY", + "max_determinized_states": 20000, + "value": "s.*y" + } + } +} +---- + diff --git a/docs/asciidoc/query-dsl/term-level/term/term-query-usage.asciidoc b/docs/asciidoc/query-dsl/term-level/term/term-query-usage.asciidoc new file mode 100644 index 00000000000..56bfff4117c --- /dev/null +++ b/docs/asciidoc/query-dsl/term-level/term/term-query-usage.asciidoc @@ -0,0 +1,49 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[term-query-usage]] +== Term Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.Term(c => c + .Name("named_query") + .Boost(1.1) + .Field(p => p.Description) + .Value("project description") +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new TermQuery +{ + Name = "named_query", + Boost = 1.1, + Field = "description", + Value = "project description" +} +---- + +[source,javascript] +.Example json output +---- +{ + "term": { + "description": { + "_name": "named_query", + "boost": 1.1, + "value": "project description" + } + } +} +---- + diff --git a/docs/asciidoc/query-dsl/term-level/terms/terms-list-query-usage.asciidoc b/docs/asciidoc/query-dsl/term-level/terms/terms-list-query-usage.asciidoc new file mode 100644 index 00000000000..41549d983dc --- /dev/null +++ b/docs/asciidoc/query-dsl/term-level/terms/terms-list-query-usage.asciidoc @@ -0,0 +1,148 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[terms-list-query-usage]] +== Terms List Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.Terms(c => c + .Name("named_query") + .Boost(1.1) + .Field(p => p.Description) + .DisableCoord() + .MinimumShouldMatch(MinimumShouldMatch.Fixed(2)) + .Terms(new List { "term1", "term2" }) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new TermsQuery +{ + Name = "named_query", + Boost = 1.1, + Field = "description", + Terms = new List { "term1", "term2" }, + DisableCoord = true, + MinimumShouldMatch = 2 +} +---- + +[source,javascript] +.Example json output +---- +{ + "terms": { + "_name": "named_query", + "boost": 1.1, + "description": [ + "term1", + "term2" + ], + "disable_coord": true, + "minimum_should_match": 2 + } +} +---- + +=== Fluent DSL Example + +[source,csharp] +---- +q +.Terms(c => c + .Name("named_query") + .Boost(1.1) + .Field(p => p.Description) + .DisableCoord() + .Terms(_terms) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new TermsQuery +{ + Name = "named_query", + Boost = 1.1, + Field = "description", + Terms = _terms, + DisableCoord = true, +} +---- + +[source,javascript] +.Example json output +---- +{ + "terms": { + "_name": "named_query", + "boost": 1.1, + "description": [ + [ + "term1", + "term2" + ] + ], + "disable_coord": true + } +} +---- + +=== Fluent DSL Example + +[source,csharp] +---- +q +.Terms(c => c + .Name("named_query") + .Boost(1.1) + .Field(p => p.NumberOfCommits) + .DisableCoord() + .Terms(_terms) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new TermsQuery +{ + Name = "named_query", + Boost = 1.1, + Field = "numberOfCommits", + Terms = _terms, + DisableCoord = true, +} +---- + +[source,javascript] +.Example json output +---- +{ + "terms": { + "_name": "named_query", + "boost": 1.1, + "numberOfCommits": [ + [ + "term1", + "term2" + ] + ], + "disable_coord": true + } +} +---- + diff --git a/docs/asciidoc/query-dsl/term-level/terms/terms-lookup-query-usage.asciidoc b/docs/asciidoc/query-dsl/term-level/terms/terms-lookup-query-usage.asciidoc new file mode 100644 index 00000000000..95f1f7ad444 --- /dev/null +++ b/docs/asciidoc/query-dsl/term-level/terms/terms-lookup-query-usage.asciidoc @@ -0,0 +1,58 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[terms-lookup-query-usage]] +== Terms Lookup Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.Terms(c => c + .Name("named_query") + .Boost(1.1) + .Field(p => p.Description) + .TermsLookup(e=>e.Path(p=>p.LastName).Id(12)) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new TermsQuery +{ + Name = "named_query", + Boost = 1.1, + Field = "description", + TermsLookup = new FieldLookup + { + Id = 12, + Index = Index(), + Type = Type(), + Path = Field(p=>p.LastName) + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "terms": { + "_name": "named_query", + "boost": 1.1, + "description": { + "id": 12, + "index": "devs", + "path": "lastName", + "type": "developer" + } + } +} +---- + diff --git a/docs/asciidoc/query-dsl/term-level/terms/terms-query-usage.asciidoc b/docs/asciidoc/query-dsl/term-level/terms/terms-query-usage.asciidoc new file mode 100644 index 00000000000..84e2dcd2185 --- /dev/null +++ b/docs/asciidoc/query-dsl/term-level/terms/terms-query-usage.asciidoc @@ -0,0 +1,79 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[terms-query-usage]] +== Terms Query Usage + +Filters documents that have fields that match any of the provided terms (not analyzed). + +Be sure to read the Elasticsearch documentation on {ref_current}/query-dsl-terms-query.html[Terms query] for more information. + +=== Fluent DSL Example + +[source,csharp] +---- +q +.Terms(c => c + .Name("named_query") + .Boost(1.1) + .Field(p => p.Description) + .DisableCoord() + .MinimumShouldMatch(MinimumShouldMatch.Fixed(2)) + .Terms("term1", "term2") +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new TermsQuery +{ + Name = "named_query", + Boost = 1.1, + Field = "description", + Terms = ExpectedTerms, + DisableCoord = true, + MinimumShouldMatch = 2 +} +---- + +[source,javascript] +.Example json output +---- +{ + "terms": { + "_name": "named_query", + "boost": 1.1, + "description": [ + "term1", + "term2" + ], + "disable_coord": true, + "minimum_should_match": 2 + } +} +---- + +[[single-term-terms-query]] +[float] +== Single term Terms Query + +=== Fluent DSL Example + +[source,csharp] +---- +q +.Terms(c => c + .Name("named_query") + .Boost(1.1) + .Field(p => p.Description) + .DisableCoord() + .MinimumShouldMatch(MinimumShouldMatch.Fixed(2)) + .Terms("term1") +) +---- + diff --git a/docs/asciidoc/query-dsl/term-level/type/type-query-usage.asciidoc b/docs/asciidoc/query-dsl/term-level/type/type-query-usage.asciidoc new file mode 100644 index 00000000000..2bff4a45c6f --- /dev/null +++ b/docs/asciidoc/query-dsl/term-level/type/type-query-usage.asciidoc @@ -0,0 +1,45 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[type-query-usage]] +== Type Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.Type(c => c + .Name("named_query") + .Boost(1.1) + .Value() +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new TypeQuery +{ + Name = "named_query", + Boost = 1.1, + Value = Type() +} +---- + +[source,javascript] +.Example json output +---- +{ + "type": { + "_name": "named_query", + "boost": 1.1, + "value": "developer" + } +} +---- + diff --git a/docs/asciidoc/query-dsl/term-level/wildcard/wildcard-query-usage.asciidoc b/docs/asciidoc/query-dsl/term-level/wildcard/wildcard-query-usage.asciidoc new file mode 100644 index 00000000000..8e23c6dcab2 --- /dev/null +++ b/docs/asciidoc/query-dsl/term-level/wildcard/wildcard-query-usage.asciidoc @@ -0,0 +1,52 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[wildcard-query-usage]] +== Wildcard Query Usage + +=== Fluent DSL Example + +[source,csharp] +---- +q +.Wildcard(c => c + .Name("named_query") + .Boost(1.1) + .Field(p => p.Description) + .Value("p*oj") + .Rewrite(RewriteMultiTerm.TopTermsBoostN) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new WildcardQuery +{ + Name = "named_query", + Boost = 1.1, + Field = "description", + Value = "p*oj", + Rewrite = RewriteMultiTerm.TopTermsBoostN +} +---- + +[source,javascript] +.Example json output +---- +{ + "wildcard": { + "description": { + "_name": "named_query", + "boost": 1.1, + "rewrite": "top_terms_boost_N", + "value": "p*oj" + } + } +} +---- + diff --git a/docs/asciidoc/search/request/explain-usage.asciidoc b/docs/asciidoc/search/request/explain-usage.asciidoc new file mode 100644 index 00000000000..fb132480708 --- /dev/null +++ b/docs/asciidoc/search/request/explain-usage.asciidoc @@ -0,0 +1,34 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[explain-usage]] +== Explain Usage + +Enables explanation for each hit on how its score was computed. + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Explain() +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest { Explain = true } +---- + +[source,javascript] +.Example json output +---- +{ + "explain": true +} +---- + diff --git a/docs/asciidoc/search/request/fielddata-fields-usage.asciidoc b/docs/asciidoc/search/request/fielddata-fields-usage.asciidoc new file mode 100644 index 00000000000..8b266c42f30 --- /dev/null +++ b/docs/asciidoc/search/request/fielddata-fields-usage.asciidoc @@ -0,0 +1,43 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[fielddata-fields-usage]] +== Fielddata Fields Usage + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.FielddataFields(fs => fs + .Field(p => p.Name) + .Field(p => p.LeadDeveloper) + .Field(p => p.StartedOn) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + FielddataFields = new string [] { "name", "leadDeveloper", "startedOn" } +} +---- + +[source,javascript] +.Example json output +---- +{ + "fielddata_fields": [ + "name", + "leadDeveloper", + "startedOn" + ] +} +---- + diff --git a/docs/asciidoc/search/request/fields-usage.asciidoc b/docs/asciidoc/search/request/fields-usage.asciidoc new file mode 100644 index 00000000000..bef2edea17a --- /dev/null +++ b/docs/asciidoc/search/request/fields-usage.asciidoc @@ -0,0 +1,41 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[fields-usage]] +== Fields Usage + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Fields(fs => fs + .Field(p => p.Name) + .Field(p => p.StartedOn) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Fields = Fields(p => p.Name, p => p.StartedOn) +} +---- + +[source,javascript] +.Example json output +---- +{ + "fields": [ + "name", + "startedOn" + ] +} +---- + diff --git a/docs/asciidoc/search/request/from-and-size-usage.asciidoc b/docs/asciidoc/search/request/from-and-size-usage.asciidoc new file mode 100644 index 00000000000..c9569d7154f --- /dev/null +++ b/docs/asciidoc/search/request/from-and-size-usage.asciidoc @@ -0,0 +1,38 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[from-and-size-usage]] +== From And Size Usage + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest() +{ + From = 10, + Size = 12 +} +---- + +[source,javascript] +.Example json output +---- +{ + "from": 10, + "size": 12 +} +---- + +=== Fluent DSL Example + +[source,csharp] +---- +s => s + .From(10) + .Size(12) +---- + diff --git a/docs/asciidoc/search/request/highlighting-usage.asciidoc b/docs/asciidoc/search/request/highlighting-usage.asciidoc new file mode 100644 index 00000000000..d6d248856ea --- /dev/null +++ b/docs/asciidoc/search/request/highlighting-usage.asciidoc @@ -0,0 +1,193 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[highlighting-usage]] +== Highlighting Usage + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Query(q => q + .Match(m => m + .Field(f => f.Name.Suffix("standard")) + .Query("Upton Sons Shield Rice Rowe Roberts") + ) +) +.Highlight(h => h + .PreTags("") + .PostTags("") + .Fields( + fs => fs + .Field(p => p.Name.Suffix("standard")) + .Type(HighlighterType.Plain) + .ForceSource() + .FragmentSize(150) + .NumberOfFragments(3) + .NoMatchSize(150), + fs => fs + .Field(p => p.LeadDeveloper.FirstName) + .Type(HighlighterType.Fvh) + .PreTags("") + .PostTags("") + .HighlightQuery(q => q + .Match(m => m + .Field(p => p.LeadDeveloper.FirstName) + .Query("Kurt Edgardo Naomi Dariana Justice Felton") + ) + ), + fs => fs + .Field(p => p.State.Suffix("offsets")) + .Type(HighlighterType.Postings) + .PreTags("") + .PostTags("") + .HighlightQuery(q => q + .Terms(t => t + .Field(f => f.State.Suffix("offsets")) + .Terms( + StateOfBeing.Stable.ToString().ToLowerInvariant(), + StateOfBeing.BellyUp.ToString().ToLowerInvariant() + ) + ) + ) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Query = new MatchQuery + { + Query = "Upton Sons Shield Rice Rowe Roberts", + Field = "name.standard" + }, + Highlight = new Highlight + { + PreTags = new[] { "" }, + PostTags = new[] { "" }, + Fields = new Dictionary + { + { "name.standard", new HighlightField + { + Type = HighlighterType.Plain, + ForceSource = true, + FragmentSize = 150, + NumberOfFragments = 3, + NoMatchSize = 150 + } + }, + { "leadDeveloper.firstName", new HighlightField + { + Type = HighlighterType.Fvh, + PreTags = new[] { ""}, + PostTags = new[] { ""}, + HighlightQuery = new MatchQuery + { + Field = "leadDeveloper.firstName", + Query = "Kurt Edgardo Naomi Dariana Justice Felton" + } + } + }, + { "state.offsets", new HighlightField + { + Type = HighlighterType.Postings, + PreTags = new[] { ""}, + PostTags = new[] { ""}, + HighlightQuery = new TermsQuery + { + Field = "state.offsets", + Terms = new [] { "stable", "bellyup" } + } + } + } + } + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "query": { + "match": { + "name.standard": { + "query": "Upton Sons Shield Rice Rowe Roberts" + } + } + }, + "highlight": { + "pre_tags": [ + "" + ], + "post_tags": [ + "" + ], + "fields": { + "name.standard": { + "type": "plain", + "force_source": true, + "fragment_size": 150, + "number_of_fragments": 3, + "no_match_size": 150 + }, + "leadDeveloper.firstName": { + "type": "fvh", + "pre_tags": [ + "" + ], + "post_tags": [ + "" + ], + "highlight_query": { + "match": { + "leadDeveloper.firstName": { + "query": "Kurt Edgardo Naomi Dariana Justice Felton" + } + } + } + }, + "state.offsets": { + "type": "postings", + "pre_tags": [ + "" + ], + "post_tags": [ + "" + ], + "highlight_query": { + "terms": { + "state.offsets": [ + "stable", + "bellyup" + ] + } + } + } + } + } +} +---- + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +highlight.Should().Contain(""); +highlight.Should().Contain(""); +highlight.Should().Contain(""); +highlight.Should().Contain(""); +highlight.Should().Contain(""); +highlight.Should().Contain(""); +Assert.True(false, $"highlights contains unexpected key {highlightHit.Key}"); +---- + diff --git a/docs/asciidoc/search/request/index-boost-usage.asciidoc b/docs/asciidoc/search/request/index-boost-usage.asciidoc new file mode 100644 index 00000000000..f872e339457 --- /dev/null +++ b/docs/asciidoc/search/request/index-boost-usage.asciidoc @@ -0,0 +1,45 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[index-boost-usage]] +== Index Boost Usage + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.IndicesBoost(b => b + .Add("index1", 1.4) + .Add("index2", 1.3) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + IndicesBoost = new Dictionary + { + { "index1", 1.4 }, + { "index2", 1.3 } + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "indices_boost": { + "index1": 1.4, + "index2": 1.3 + } +} +---- + diff --git a/docs/asciidoc/search/request/inner-hits-usage.asciidoc b/docs/asciidoc/search/request/inner-hits-usage.asciidoc new file mode 100644 index 00000000000..e5fc2e80b7c --- /dev/null +++ b/docs/asciidoc/search/request/inner-hits-usage.asciidoc @@ -0,0 +1,211 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[inner-hits-usage]] +== Inner Hits Usage + +[source,csharp] +---- +public interface IRoyal +{ + string Name { get; set; } +} +---- + +[source,csharp] +---- +var current = create(); + +var royals = current.ToList(); + +var royal1 = royal; + +bulk.Index(i => i.Document(royal1).Index(this._index).Parent(parent)); + +indexChildren(royal); +---- + +[source,csharp] +---- +var create = this._client.CreateIndex(this._index, c => c + .Settings(s => s + .NumberOfReplicas(0) + .NumberOfShards(1) + ) + .Mappings(map => map + .Map(m => m.AutoMap() + .Properties(props => + RoyalProps(props) + .Nested(n => n.Name(p => p.Foes).AutoMap()) + ) + ) + .Map(m => m.AutoMap().Properties(RoyalProps).Parent()) + .Map(m => m.AutoMap().Properties(RoyalProps).Parent()) + .Map(m => m.AutoMap().Properties(RoyalProps).Parent()) + .Map(m => m.AutoMap().Properties(RoyalProps).Parent()) + ) +); +var kings = King.Generator.Generate(2) + .Select(k => + { + k.Foes = King.Generator.Generate(2).ToList(); + return k; + }); +var bulk = new BulkDescriptor(); +IndexAll(bulk, () => kings, indexChildren: king => + IndexAll(bulk, () => Prince.Generator.Generate(2), king.Name, prince => + IndexAll(bulk, () => Duke.Generator.Generate(3), prince.Name, duke => + IndexAll(bulk, () => Earl.Generator.Generate(5), duke.Name, earl => + IndexAll(bulk, () => Baron.Generator.Generate(1), earl.Name) + ) + ) + ) +); +this._client.Bulk(bulk); +this._client.Refresh(this._index); +---- + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Index(Index) +.InnerHits(ih => ih + .Type("earls", g => g + .Size(5) + .InnerHits(iih => iih + .Type("barons") + ) + .FielddataFields(p => p.Name) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest(Index, typeof(Duke)) +{ + InnerHits = new NamedInnerHits + { + { "earls", new InnerHitsContainer + { + Type = new TypeInnerHit + { + InnerHit = new GlobalInnerHit + { + Size = 5, + FielddataFields = new Field[]{ "name" }, + InnerHits = new NamedInnerHits + { + { "barons", new TypeInnerHit() } + } + } + } + } } + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "inner_hits": { + "earls": { + "type": { + "earl": { + "fielddata_fields": [ + "name" + ], + "inner_hits": { + "barons": { + "type": { + "baron": {} + } + } + }, + "size": 5 + } + } + } + } +} +---- + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Index(Index) +.Query(q => + q.HasChild(hc => hc + .Query(hcq => hcq.MatchAll()) + .InnerHits(ih => ih.Name("princes")) + ) || q.Nested(n => n + .Path(p => p.Foes) + .Query(nq => nq.MatchAll()) + .InnerHits() + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest(Index, typeof(King)) +{ + Query = new HasChildQuery + { + Type = typeof(Prince), + Query = new MatchAllQuery(), + InnerHits = new InnerHits { Name = "princes" } + } || new NestedQuery + { + Path = Field(p => p.Foes), + Query = new MatchAllQuery(), + InnerHits = new InnerHits() + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "query": { + "bool": { + "should": [ + { + "has_child": { + "type": "prince", + "query": { + "match_all": {} + }, + "inner_hits": { + "name": "princes" + } + } + }, + { + "nested": { + "query": { + "match_all": {} + }, + "path": "foes", + "inner_hits": {} + } + } + ] + } + } +} +---- + diff --git a/docs/asciidoc/search/request/min-score-usage.asciidoc b/docs/asciidoc/search/request/min-score-usage.asciidoc new file mode 100644 index 00000000000..7be2b54260d --- /dev/null +++ b/docs/asciidoc/search/request/min-score-usage.asciidoc @@ -0,0 +1,50 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[min-score-usage]] +== Min Score Usage + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.MinScore(0.5) +.Query(q => q + .Term(p => p.Name, "elasticsearch") +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + MinScore = 0.5, + Query = new TermQuery + { + Field = "name", + Value = "elasticsearch" + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "min_score": 0.5, + "query": { + "term": { + "name": { + "value": "elasticsearch" + } + } + } +} +---- + diff --git a/docs/asciidoc/search/request/post-filter-usage.asciidoc b/docs/asciidoc/search/request/post-filter-usage.asciidoc new file mode 100644 index 00000000000..66a0dee434b --- /dev/null +++ b/docs/asciidoc/search/request/post-filter-usage.asciidoc @@ -0,0 +1,37 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[post-filter-usage]] +== Post Filter Usage + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest() +{ + PostFilter = new QueryContainer(new MatchAllQuery()) +} +---- + +[source,javascript] +.Example json output +---- +{ + "post_filter": { + "match_all": {} + } +} +---- + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.PostFilter(f => f.MatchAll()) +---- + diff --git a/docs/asciidoc/search/request/profile-usage.asciidoc b/docs/asciidoc/search/request/profile-usage.asciidoc new file mode 100644 index 00000000000..505d44c60c2 --- /dev/null +++ b/docs/asciidoc/search/request/profile-usage.asciidoc @@ -0,0 +1,50 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[profile-usage]] +== Profile Usage + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Profile() +.Query(q => q + .Term(p => p.Name, "elasticsearch") +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Profile = true, + Query = new TermQuery + { + Field = "name", + Value = "elasticsearch" + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "profile": true, + "query": { + "term": { + "name": { + "value": "elasticsearch" + } + } + } +} +---- + diff --git a/docs/asciidoc/search/request/query-usage.asciidoc b/docs/asciidoc/search/request/query-usage.asciidoc new file mode 100644 index 00000000000..4b507e99e59 --- /dev/null +++ b/docs/asciidoc/search/request/query-usage.asciidoc @@ -0,0 +1,49 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[query-usage]] +== Query Usage + +The query element within the search request body allows to define a query using the Query DSL. + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Query(q => q + .Term(p => p.Name, "elasticsearch") +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Query = new TermQuery + { + Field = "name", + Value = "elasticsearch" + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "query": { + "term": { + "name": { + "value": "elasticsearch" + } + } + } +} +---- + diff --git a/docs/asciidoc/search/request/script-fields-usage.asciidoc b/docs/asciidoc/search/request/script-fields-usage.asciidoc new file mode 100644 index 00000000000..9f38f7add9a --- /dev/null +++ b/docs/asciidoc/search/request/script-fields-usage.asciidoc @@ -0,0 +1,72 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[script-fields-usage]] +== Script Fields Usage + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.ScriptFields(sf=>sf + .ScriptField("test1", sc=>sc + .Inline("doc['my_field_name'].value * 2") + ) + .ScriptField("test2", sc=>sc + .Inline("doc['my_field_name'].value * factor") + .Params(p=>p + .Add("factor", 2.0) + ) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + ScriptFields = new ScriptFields + { + { "test1", new ScriptField + { + Script = new InlineScript("doc['my_field_name'].value * 2") + } }, + { "test2", new InlineScript("doc['my_field_name'].value * factor") + { + Params = new FluentDictionary + { + { "factor", 2.0 } + } + } } + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "script_fields": { + "test1": { + "script": { + "inline": "doc['my_field_name'].value * 2" + } + }, + "test2": { + "script": { + "inline": "doc['my_field_name'].value * factor", + "params": { + "factor": 2.0 + } + } + } + } +} +---- + diff --git a/docs/asciidoc/search/request/sort-usage.asciidoc b/docs/asciidoc/search/request/sort-usage.asciidoc new file mode 100644 index 00000000000..15592c3f987 --- /dev/null +++ b/docs/asciidoc/search/request/sort-usage.asciidoc @@ -0,0 +1,167 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[sort-usage]] +== Sort Usage + +Allows to add one or more sort on specific fields. Each sort can be reversed as well. +The sort is defined on a per field level, with special field name for _score to sort by score. + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Sort(ss => ss + .Ascending(p => p.StartedOn) + .Descending(p => p.Name) + .Descending(SortSpecialField.Score) + .Ascending(SortSpecialField.DocumentIndexOrder) + .Field(f => f + .Field(p => p.LastActivity) + .Order(SortOrder.Descending) + .MissingLast() + .UnmappedType(FieldType.Date) + .Mode(SortMode.Average) + .NestedPath(p => p.Tags) + .NestedFilter(q => q.MatchAll()) + ) + .GeoDistance(g => g + .Field(p => p.Location) + .DistanceType(GeoDistanceType.Arc) + .Order(SortOrder.Ascending) + .Unit(DistanceUnit.Centimeters) + .Mode(SortMode.Min) + .PinTo(new GeoLocation(70, -70), new GeoLocation(-12, 12)) + ) + .Script(sc => sc + .Type("number") + .Ascending() + .Script(script => script + .Inline("doc['numberOfCommits'].value * factor") + .Params(p => p.Add("factor", 1.1)) + ) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Sort = new List + { + new SortField { Field = "startedOn", Order = SortOrder.Ascending }, + new SortField { Field = "name", Order = SortOrder.Descending }, + new SortField { Field = "_score", Order = SortOrder.Descending }, + new SortField { Field = "_doc", Order = SortOrder.Ascending }, + new SortField { + Field = Field(p=>p.LastActivity), + Order = SortOrder.Descending, + Missing = "_last", + UnmappedType = FieldType.Date, + Mode = SortMode.Average, + NestedPath = Field(p=>p.Tags), + NestedFilter = new MatchAllQuery(), + }, + new GeoDistanceSort + { + Field = "location", + Order = SortOrder.Ascending, + DistanceType = GeoDistanceType.Arc, + GeoUnit = DistanceUnit.Centimeters, + Mode = SortMode.Min, + Points = new [] {new GeoLocation(70, -70), new GeoLocation(-12, 12) } + }, + new ScriptSort + { + Type = "number", + Order = SortOrder.Ascending, + Script = new InlineScript("doc['numberOfCommits'].value * factor") + { + Params = new Dictionary + { + { "factor", 1.1 } + } + } + } + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "sort": [ + { + "startedOn": { + "order": "asc" + } + }, + { + "name": { + "order": "desc" + } + }, + { + "_score": { + "order": "desc" + } + }, + { + "_doc": { + "order": "asc" + } + }, + { + "lastActivity": { + "missing": "_last", + "order": "desc", + "mode": "avg", + "nested_filter": { + "match_all": {} + }, + "nested_path": "tags", + "unmapped_type": "date" + } + }, + { + "_geo_distance": { + "location": [ + { + "lat": 70.0, + "lon": -70.0 + }, + { + "lat": -12.0, + "lon": 12.0 + } + ], + "order": "asc", + "mode": "min", + "distance_type": "arc", + "unit": "cm" + } + }, + { + "_script": { + "order": "asc", + "type": "number", + "script": { + "params": { + "factor": 1.1 + }, + "inline": "doc['numberOfCommits'].value * factor" + } + } + } + ] +} +---- + diff --git a/docs/asciidoc/search/request/source-filtering-usage.asciidoc b/docs/asciidoc/search/request/source-filtering-usage.asciidoc new file mode 100644 index 00000000000..dc8d099b34d --- /dev/null +++ b/docs/asciidoc/search/request/source-filtering-usage.asciidoc @@ -0,0 +1,119 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[source-filtering-usage]] +== Source Filtering Usage + +Allows to control how the _source field is returned with every hit. +By default operations return the contents of the _source field unless + you have used the fields parameter or if the _source field is disabled. + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Source(so => so + .Include(f => f + .Fields( + p => p.Name, + p => p.StartedOn + ) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Source = new SourceFilter + { + Include = Fields(p => p.Name, prop => prop.StartedOn) + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "_source": { + "include": [ + "name", + "startedOn" + ] + } +} +---- + +=== Handling Responses + +[source,csharp] +---- +response.IsValid.Should().BeTrue(); +document.Name.Should().NotBeNull(); +document.StartedOn.Should().NotBe(default(DateTime)); +document.Description.Should().BeNull(); +---- + +[source,csharp] +---- +internal class WithSourceFilterProperty +{ + [JsonProperty("_source")] + public ISourceFilter SourceFilter { get; set; } +} +---- + +[source,csharp] +---- +var o = base.Deserialize("{ \"_source\": false }"); + +o.Should().NotBeNull(); + +o.SourceFilter.Should().NotBeNull(); + +o.SourceFilter.Exclude.Should().Contain("*"); +---- + +[source,csharp] +---- +var o = base.Deserialize("{ \"_source\": [\"obj.*\"] }"); + +o.Should().NotBeNull(); + +o.SourceFilter.Should().NotBeNull(); + +o.SourceFilter.Include.Should().Contain("obj.*"); +---- + +[source,csharp] +---- +var o = base.Deserialize("{ \"_source\": \"obj.*\" }"); + +o.Should().NotBeNull(); + +o.SourceFilter.Should().NotBeNull(); + +o.SourceFilter.Include.Should().Contain("obj.*"); +---- + +[source,csharp] +---- +var o = base.Deserialize("{ \"_source\": { \"include\": [\"obj.*\"], \"exclude\": [\"foo.*\"] } }"); + +o.Should().NotBeNull(); + +o.SourceFilter.Should().NotBeNull(); + +o.SourceFilter.Include.Should().Contain("obj.*"); + +o.SourceFilter.Exclude.Should().Contain("foo.*"); +---- + diff --git a/docs/asciidoc/search/request/suggest-usage.asciidoc b/docs/asciidoc/search/request/suggest-usage.asciidoc new file mode 100644 index 00000000000..0519601c49c --- /dev/null +++ b/docs/asciidoc/search/request/suggest-usage.asciidoc @@ -0,0 +1,230 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[suggest-usage]] +== Suggest Usage + +Allows to add one or more sort on specific fields. Each sort can be reversed as well. +The sort is defined on a per field level, with special field name for _score to sort by score. + +=== Handling Responses + +[source,csharp] +---- +var myCompletionSuggest = response.Suggest["my-completion-suggest"]; +myCompletionSuggest.Should().NotBeNull(); +var suggest = myCompletionSuggest.First(); +suggest.Text.Should().Be(Project.Instance.Name); +suggest.Length.Should().BeGreaterThan(0); +var option = suggest.Options.First(); +option.Text.Should().NotBeNullOrEmpty(); +option.Score.Should().BeGreaterThan(0); +var payload = option.Payload(); +payload.Should().NotBeNull(); +payload.Name.Should().Be(Project.Instance.Name); +payload.State.Should().NotBeNull(); +---- + +=== Fluent DSL Example + +[source,csharp] +---- +s => s +.Suggest(ss => ss + .Term("my-term-suggest", t => t + .MaxEdits(1) + .MaxInspections(2) + .MaxTermFrequency(3) + .MinDocFrequency(4) + .MinWordLength(5) + .PrefixLength(6) + .SuggestMode(SuggestMode.Always) + .Analyzer("standard") + .Field(p => p.Name) + .ShardSize(7) + .Size(8) + .Text("hello world") + ) + .Completion("my-completion-suggest", c => c + .Context(ctx => ctx + .Add("color", Project.Projects.First().Suggest.Context.Values.SelectMany(v => v).First()) + ) + .Fuzzy(f => f + .Fuzziness(Fuzziness.Auto) + .MinLength(1) + .PrefixLength(2) + .Transpositions() + .UnicodeAware(false) + ) + .Analyzer("simple") + .Field(p => p.Suggest) + .ShardSize(7) + .Size(8) + .Text(Project.Instance.Name) + ) + .Phrase("my-phrase-suggest", ph => ph + .Collate(c => c + .Query(q => q + .Inline("{ \"match\": { \"{{field_name}}\": \"{{suggestion}}\" }}") + .Params(p => p.Add("field_name", "title")) + ) + .Prune() + ) + .Confidence(10.1) + .DirectGenerator(d => d + .Field(p => p.Description) + ) + .GramSize(1) + .Field(p => p.Name) + .Text("hello world") + .RealWordErrorLikelihood(0.5) + ) +) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SearchRequest +{ + Suggest = new SuggestContainer + { + { "my-term-suggest", new SuggestBucket + { + Text = "hello world", + Term = new TermSuggester + { + MaxEdits = 1, + MaxInspections = 2, + MaxTermFrequency = 3, + MinDocFrequency = 4, + MinWordLen = 5, + PrefixLen = 6, + SuggestMode = SuggestMode.Always, + Analyzer = "standard", + Field = Field(p=>p.Name), + ShardSize = 7, + Size = 8 + } + } }, + { "my-completion-suggest", new SuggestBucket + { + Text = Project.Instance.Name, + Completion = new CompletionSuggester + { + Context = new Dictionary { { "color", Project.Projects.First().Suggest.Context.Values.SelectMany(v => v).First() } }, + Fuzzy = new FuzzySuggester + { + Fuzziness = Fuzziness.Auto, + MinLength = 1, + PrefixLength = 2, + Transpositions = true, + UnicodeAware = false + }, + Analyzer = "simple", + Field = Field(p=>p.Suggest), + ShardSize = 7, + Size = 8 + } + } }, + { "my-phrase-suggest", new SuggestBucket + { + Text = "hello world", + Phrase = new PhraseSuggester + { + Collate = new PhraseSuggestCollate + { + Query = new InlineScript("{ \"match\": { \"{{field_name}}\": \"{{suggestion}}\" }}") + { + Params = new Dictionary + { + { "field_name", "title" } + } + }, + Prune = true + }, + Confidence = 10.1, + DirectGenerator = new List + { + new DirectGenerator { Field = "description" } + }, + GramSize = 1, + Field = "name", + RealWordErrorLikelihood = 0.5 + } + } }, + } +} +---- + +[source,javascript] +.Example json output +---- +{ + "suggest": { + "my-completion-suggest": { + "completion": { + "analyzer": "simple", + "context": { + "color": "red" + }, + "field": "suggest", + "fuzzy": { + "fuzziness": "AUTO", + "min_length": 1, + "prefix_length": 2, + "transpositions": true, + "unicode_aware": false + }, + "shard_size": 7, + "size": 8 + }, + "text": "Durgan LLC" + }, + "my-phrase-suggest": { + "phrase": { + "collate": { + "query": { + "inline": "{ \"match\": { \"{{field_name}}\": \"{{suggestion}}\" }}", + "params": { + "field_name": "title" + } + }, + "prune": true + }, + "confidence": 10.1, + "direct_generator": [ + { + "field": "description" + } + ], + "field": "name", + "gram_size": 1, + "real_word_error_likelihood": 0.5 + }, + "text": "hello world" + }, + "my-term-suggest": { + "term": { + "analyzer": "standard", + "field": "name", + "max_edits": 1, + "max_inspections": 2, + "max_term_freq": 3.0, + "min_doc_freq": 4.0, + "min_word_len": 5, + "prefix_len": 6, + "shard_size": 7, + "size": 8, + "suggest_mode": "always" + }, + "text": "hello world" + } + } +} +---- + diff --git a/docs/asciidoc/search/suggesters/suggest-api.asciidoc b/docs/asciidoc/search/suggesters/suggest-api.asciidoc new file mode 100644 index 00000000000..d1832c9443c --- /dev/null +++ b/docs/asciidoc/search/suggesters/suggest-api.asciidoc @@ -0,0 +1,164 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/current + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +[[suggest-api]] +== Suggest API + +=== Fluent DSL Example + +=== Fluent DSL Example + +[source,csharp] +---- +s => s + .Term("my-term-suggest", t => t + .MaxEdits(1) + .MaxInspections(2) + .MaxTermFrequency(3) + .MinDocFrequency(4) + .MinWordLength(5) + .PrefixLength(6) + .SuggestMode(SuggestMode.Always) + .Analyzer("standard") + .Field(p => p.Name) + .ShardSize(7) + .Size(8) + .Text("hello world") + ) + .Completion("my-completion-suggest", c => c + .Context(ctx => ctx + .Add("color", Project.Projects.First().Suggest.Context.Values.SelectMany(v => v).First()) + ) + .Fuzzy(f => f + .Fuzziness(Fuzziness.Auto) + .MinLength(1) + .PrefixLength(2) + .Transpositions() + .UnicodeAware(false) + ) + .Analyzer("simple") + .Field(p => p.Suggest) + .ShardSize(7) + .Size(8) + .Text(Project.Instance.Name) + ) + .Phrase("my-phrase-suggest", ph => ph + .Collate(c => c + .Query(q => q + .Inline("{ \"match\": { \"{{field_name}}\": \"{{suggestion}}\" }}") + .Params(p => p.Add("field_name", "title")) + ) + .Prune() + ) + .Confidence(10.1) + .DirectGenerator(d => d + .Field(p => p.Description) + ) + .GramSize(1) + .Field(p => p.Name) + .Text("hello world") + .RealWordErrorLikelihood(0.5) + ) +---- + +=== Object Initializer Syntax Example + +[source,csharp] +---- +new SuggestRequest +{ + Suggest = new SuggestContainer + { + { "my-term-suggest", new SuggestBucket + { + Text = "hello world", + Term = new TermSuggester + { + MaxEdits = 1, + MaxInspections = 2, + MaxTermFrequency = 3, + MinDocFrequency = 4, + MinWordLen = 5, + PrefixLen = 6, + SuggestMode = SuggestMode.Always, + Analyzer = "standard", + Field = Field(p=>p.Name), + ShardSize = 7, + Size = 8 + } + } }, + { "my-completion-suggest", new SuggestBucket + { + Text = Project.Instance.Name, + Completion = new CompletionSuggester + { + Context = new Dictionary { { "color", Project.Projects.First().Suggest.Context.Values.SelectMany(v => v).First() } }, + Fuzzy = new FuzzySuggester + { + Fuzziness = Fuzziness.Auto, + MinLength = 1, + PrefixLength = 2, + Transpositions = true, + UnicodeAware = false + }, + Analyzer = "simple", + Field = Field(p=>p.Suggest), + ShardSize = 7, + Size = 8 + } + } }, + { "my-phrase-suggest", new SuggestBucket + { + Text = "hello world", + Phrase = new PhraseSuggester + { + Collate = new PhraseSuggestCollate + { + Query = new InlineScript("{ \"match\": { \"{{field_name}}\": \"{{suggestion}}\" }}") + { + Params = new Dictionary + { + { "field_name", "title" } + } + }, + Prune = true + }, + Confidence = 10.1, + DirectGenerator = new List + { + new DirectGenerator { Field = "description" } + }, + GramSize = 1, + Field = "name", + RealWordErrorLikelihood = 0.5 + } + } }, + } +} +---- + +=== Handling Responses + +Get the suggestions for a suggester by indexing into +the `.Suggestions` on the response + +[source,csharp] +---- +var myCompletionSuggest = response.Suggestions["my-completion-suggest"]; + +myCompletionSuggest.Should().NotBeNull(); +var suggest = myCompletionSuggest.First(); +suggest.Text.Should().Be(Project.Instance.Name); +suggest.Length.Should().BeGreaterThan(0); +var option = suggest.Options.First(); +option.Text.Should().NotBeNullOrEmpty(); +option.Score.Should().BeGreaterThan(0); +var payload = option.Payload(); +payload.Should().NotBeNull(); +payload.Name.Should().Be(Project.Instance.Name); +payload.State.Should().NotBeNull(); +---- + diff --git a/docs/asciidoc/timeoutplot.png b/docs/asciidoc/timeoutplot.png new file mode 100644 index 00000000000..ceb819bff0b Binary files /dev/null and b/docs/asciidoc/timeoutplot.png differ diff --git a/src/CodeGeneration/Nest.Litterateur/AsciiDoc/GeneratedAsciidocVisitor.cs b/src/CodeGeneration/Nest.Litterateur/AsciiDoc/GeneratedAsciidocVisitor.cs new file mode 100644 index 00000000000..9e4bacc5ef1 --- /dev/null +++ b/src/CodeGeneration/Nest.Litterateur/AsciiDoc/GeneratedAsciidocVisitor.cs @@ -0,0 +1,263 @@ +#if !DOTNETCORE +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text; +using System.Text.RegularExpressions; +using System.Threading.Tasks; +using AsciiDoc; + +namespace Nest.Litterateur.AsciiDoc +{ + /// + /// Visits the "raw" asciidoc generated using Roslyn and adds attribute entries, + /// section titles, rearranges sections, etc. + /// + public class GeneratedAsciidocVisitor : NoopVisitor + { + private static readonly Dictionary Ids = new Dictionary(); + + private readonly FileInfo _destination; + private Document _newDocument; + private bool _topLevel = true; + + public GeneratedAsciidocVisitor(FileInfo destination) + { + _destination = destination; + } + + public Document Convert(Document document) + { + document.Accept(this); + return _newDocument; + } + + public override void Visit(Document document) + { + _newDocument = new Document + { + Title = document.Title, + DocType = document.DocType + }; + + foreach (var authorInfo in document.Authors) + { + _newDocument.Authors.Add(authorInfo); + } + + foreach (var attributeEntry in document.Attributes) + { + _newDocument.Attributes.Add(attributeEntry); + } + + if (!document.Attributes.Any(a => a.Name == "ref_current")) + { + _newDocument.Attributes.Add(new AttributeEntry("ref_current", "https://www.elastic.co/guide/en/elasticsearch/reference/current")); + } + + if (!document.Attributes.Any(a => a.Name == "github")) + { + _newDocument.Attributes.Add(new AttributeEntry("github", "https://github.com/elastic/elasticsearch-net")); + } + + if (!document.Attributes.Any(a => a.Name == "nuget")) + { + _newDocument.Attributes.Add(new AttributeEntry("nuget", "https://www.nuget.org/packages")); + } + + // see if the document has some kind of top level title and add one with an anchor if not. + if (document.Title == null && document.Elements.Count > 0) + { + var sectionTitle = document.Elements[0] as SectionTitle; + + if (sectionTitle == null || sectionTitle.Level != 2) + { + var id = Path.GetFileNameWithoutExtension(_destination.Name); + var title = id.LowercaseHyphenToPascal(); + sectionTitle = new SectionTitle(title, 2); + sectionTitle.Attributes.Add(new Anchor(id)); + + _newDocument.Elements.Add(sectionTitle); + } + } + + base.Visit(document); + } + + public override void Visit(IList elements) + { + if (_topLevel) + { + _topLevel = false; + Source exampleJson = null; + Source objectInitializerExample = null; + + for (int index = 0; index < elements.Count; index++) + { + var element = elements[index]; + var source = element as Source; + + if (source != null) + { + // remove empty source blocks + if (string.IsNullOrWhiteSpace(source.Text)) + { + continue; + } + + var method = source.Attributes.OfType().FirstOrDefault(a => a.Name == "method"); + if (method == null) + { + _newDocument.Elements.Add(element); + continue; + } + + if ((method.Value == "expectjson" || method.Value == "queryjson") && + source.Attributes.Count > 1 && + source.Attributes[1].Name == "javascript") + { + exampleJson = source; + continue; + } + + // if there is a section title since the last source block, don't add one + var lastSourceBlock = _newDocument.Elements.LastOrDefault(e => e is Source); + var lastSectionTitle = _newDocument.Elements.OfType().LastOrDefault(e => e.Level == 3); + if (lastSourceBlock != null && lastSectionTitle != null) + { + var lastSectionTitleIndex = _newDocument.Elements.IndexOf(lastSectionTitle); + var lastSourceBlockIndex = _newDocument.Elements.IndexOf(lastSourceBlock); + if (lastSectionTitleIndex > lastSourceBlockIndex) + { + _newDocument.Elements.Add(element); + continue; + } + } + + switch (method.Value) + { + case "fluent": + case "queryfluent": + _newDocument.Elements.Add(new SectionTitle("Fluent DSL Example", 3)); + _newDocument.Elements.Add(element); + + if (objectInitializerExample != null) + { + _newDocument.Elements.Add(new SectionTitle("Object Initializer Syntax Example", 3)); + _newDocument.Elements.Add(objectInitializerExample); + objectInitializerExample = null; + + if (exampleJson != null) + { + _newDocument.Elements.Add(exampleJson); + exampleJson = null; + } + } + break; + case "initializer": + _newDocument.Elements.Add(new SectionTitle("Object Initializer Syntax Example", 3)); + _newDocument.Elements.Add(element); + // Move the example json to after the initializer example + if (exampleJson != null) + { + _newDocument.Elements.Add(exampleJson); + exampleJson = null; + } + break; + case "queryinitializer": + if (objectInitializerExample != null) + { + _newDocument.Elements.Add(new SectionTitle("Object Initializer Syntax Example", 3)); + _newDocument.Elements.Add(objectInitializerExample); + + // Move the example json to after the initializer example + if (exampleJson != null) + { + _newDocument.Elements.Add(exampleJson); + exampleJson = null; + } + } + else + { + objectInitializerExample = source; + } + break; + case "expectresponse": + _newDocument.Elements.Add(new SectionTitle("Handling Responses", 3)); + _newDocument.Elements.Add(element); + break; + default: + _newDocument.Elements.Add(element); + break; + } + } + else + { + _newDocument.Elements.Add(element); + } + } + } + + base.Visit(elements); + } + + public override void Visit(Source source) + { + if (source.Attributes.Count > 1 && + source.Attributes[1].Name == "javascript" && + !source.Attributes.HasTitle) + { + source.Attributes.Add(new Title("Example json output")); + } + + // remove method attributes as the elastic doc generation doesn't like them; it + // expects a linenumbering in the index 2 position of a source block + var methodAttribute = source.Attributes.FirstOrDefault(a => a.Name == "method"); + if (methodAttribute != null) + { + source.Attributes.Remove(methodAttribute); + } + + // Replace tabs with spaces and remove comment escaping from output + // (elastic docs generation does not like this callout format) + source.Text = Regex.Replace(source.Text.Replace("\t", " "), @"//[ \t]*\<(\d+)\>.*", "<$1>"); + + base.Visit(source); + } + + public override void Visit(SectionTitle sectionTitle) + { + if (sectionTitle.Level != 2) + { + base.Visit(sectionTitle); + return; + } + + // Generate an anchor for all Level 2 section titles + if (!sectionTitle.Attributes.HasAnchor) + { + var builder = new StringBuilder(); + using (var writer = new AsciiDocVisitor(new StringWriter(builder))) + { + writer.Visit(sectionTitle.Elements); + } + + var title = builder.ToString().PascalToHyphen(); + sectionTitle.Attributes.Add(new Anchor(title)); + } + + // Check for duplicate ids across documents + var key = sectionTitle.Attributes.Anchor.Id; + string existingFile; + if (Ids.TryGetValue(key, out existingFile)) + { + throw new Exception($"duplicate id {key} in {_destination.FullName}. Id already exists in {existingFile}"); + } + + Ids.Add(key, _destination.FullName); + base.Visit(sectionTitle); + } + } +} +#endif diff --git a/src/CodeGeneration/Nest.Litterateur/AsciiDoc/RawAsciidocVisitor.cs b/src/CodeGeneration/Nest.Litterateur/AsciiDoc/RawAsciidocVisitor.cs new file mode 100644 index 00000000000..59df54a14f1 --- /dev/null +++ b/src/CodeGeneration/Nest.Litterateur/AsciiDoc/RawAsciidocVisitor.cs @@ -0,0 +1,95 @@ +#if !DOTNETCORE +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using AsciiDoc; + +namespace Nest.Litterateur.AsciiDoc +{ + /// + /// Visits raw asciidoc files (i.e. not generated) to make modifications + /// + public class RawAsciidocVisitor : NoopVisitor + { + private readonly FileInfo _destination; + + private static readonly Dictionary IncludeDirectories = new Dictionary + { + { "aggregations.asciidoc", "aggregations-usage.asciidoc" }, + { "query-dsl.asciidoc", "query-dsl-usage.asciidoc" } + }; + + public RawAsciidocVisitor(FileInfo destination) + { + _destination = destination; + } + + public override void Visit(Document document) + { + // check if this document has generated includes to other files + var includeAttribute = document.Attributes.FirstOrDefault(a => a.Name == "includes-from-dirs"); + + if (includeAttribute != null) + { + var thisFileUri = new Uri(_destination.FullName); + var directories = includeAttribute.Value.Split(new[] { ',' }, StringSplitOptions.RemoveEmptyEntries); + + foreach (var directory in directories) + { + foreach (var file in Directory.EnumerateFiles(Path.Combine(Program.OutputDirPath, directory), "*.asciidoc", SearchOption.AllDirectories)) + { + var fileInfo = new FileInfo(file); + var referencedFileUri = new Uri(fileInfo.FullName); + var relativePath = thisFileUri.MakeRelativeUri(referencedFileUri); + var include = new Include(relativePath.OriginalString); + + document.Elements.Add(include); + } + } + } + + base.Visit(document); + } + + public override void Visit(Open open) + { + // include links to all the query dsl usage and aggregation usage pages on the landing query dsl and aggregations pages, respectively. + string usageFilePath; + if (IncludeDirectories.TryGetValue(_destination.Name, out usageFilePath)) + { + var usageDoc = Document.Load(Path.Combine(Program.OutputDirPath, usageFilePath)); + + var includeAttribute = usageDoc.Attributes.FirstOrDefault(a => a.Name == "includes-from-dirs"); + + if (includeAttribute != null) + { + var directories = includeAttribute.Value.Split(new[] { ',' }, StringSplitOptions.RemoveEmptyEntries); + + foreach (var directory in directories) + { + foreach (var file in Directory.EnumerateFiles(Path.Combine(Program.OutputDirPath, directory), "*usage.asciidoc", SearchOption.AllDirectories)) + { + var fileInfo = new FileInfo(file); + var fileNameWithoutExtension = Path.GetFileNameWithoutExtension(fileInfo.Name); + + var listItem = new UnorderedListItem + { + Elements = + { + new Paragraph( + new InternalAnchor(fileNameWithoutExtension, fileNameWithoutExtension.LowercaseHyphenToPascal())) + } + }; + + open.Elements.Add(listItem); + } + } + } + } + + base.Visit(open); + } + } +} +#endif \ No newline at end of file diff --git a/src/CodeGeneration/Nest.Litterateur/Documentation/Blocks/CodeBlock.cs b/src/CodeGeneration/Nest.Litterateur/Documentation/Blocks/CodeBlock.cs index f305486bf12..c94d317fb4d 100644 --- a/src/CodeGeneration/Nest.Litterateur/Documentation/Blocks/CodeBlock.cs +++ b/src/CodeGeneration/Nest.Litterateur/Documentation/Blocks/CodeBlock.cs @@ -1,13 +1,44 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text.RegularExpressions; + namespace Nest.Litterateur.Documentation.Blocks { public class CodeBlock : IDocumentationBlock { - public string Value { get; } - public int LineNumber { get; } - public CodeBlock(string lineOfCode, int lineNumber) + public CodeBlock(string lineOfCode, int lineNumber, Language language, string propertyOrMethodName) { - Value = lineOfCode.Trim(); + Value = ExtractCallOutsFromText(lineOfCode); LineNumber = lineNumber; + Language = language; + PropertyName = propertyOrMethodName?.ToLowerInvariant(); + } + + public List CallOuts { get; } = new List(); + + public Language Language { get; set; } + + public int LineNumber { get; } + + public string PropertyName { get; set; } + + public string Value { get; } + + private string ExtractCallOutsFromText(string lineOfCode) + { + var matches = Regex.Matches(lineOfCode, @"//[ \t]*(?\<\d+\>)[ \t]*(?\S.*)"); + foreach (Match match in matches) + { + CallOuts.Add($"{match.Groups["callout"].Value} {match.Groups["text"].Value}"); + } + + if (CallOuts.Any()) + { + lineOfCode = Regex.Replace(lineOfCode, @"//[ \t]*\<(\d+)\>.*", "//<$1>"); + } + + return lineOfCode.Trim(); } } } \ No newline at end of file diff --git a/src/CodeGeneration/Nest.Litterateur/Documentation/Blocks/CombinedBlock.cs b/src/CodeGeneration/Nest.Litterateur/Documentation/Blocks/CombinedBlock.cs index a40927f0fc2..ac880574611 100644 --- a/src/CodeGeneration/Nest.Litterateur/Documentation/Blocks/CombinedBlock.cs +++ b/src/CodeGeneration/Nest.Litterateur/Documentation/Blocks/CombinedBlock.cs @@ -4,7 +4,7 @@ namespace Nest.Litterateur.Documentation.Blocks { /// /// Used to keep a line of code (could be multiple e.g fluent syntax) and its annotations in one logical unit. - /// So they do not suffer from reoordering based on line number when writing out the documentation + /// So they do not suffer from reordering based on line number when writing out the documentation /// public class CombinedBlock : IDocumentationBlock { diff --git a/src/CodeGeneration/Nest.Litterateur/Documentation/Files/CSharpDocumentationFile.cs b/src/CodeGeneration/Nest.Litterateur/Documentation/Files/CSharpDocumentationFile.cs index 2905f4f8a89..f74e1f25535 100644 --- a/src/CodeGeneration/Nest.Litterateur/Documentation/Files/CSharpDocumentationFile.cs +++ b/src/CodeGeneration/Nest.Litterateur/Documentation/Files/CSharpDocumentationFile.cs @@ -1,4 +1,5 @@ -using System.Collections.Generic; +using System; +using System.Collections.Generic; using System.IO; using System.Linq; using System.Text; @@ -6,49 +7,134 @@ using Nest.Litterateur.Documentation.Blocks; using Nest.Litterateur.Walkers; +#if !DOTNETCORE +using AsciiDoc; +using Nest.Litterateur.AsciiDoc; +#endif + namespace Nest.Litterateur.Documentation.Files { public class CSharpDocumentationFile : DocumentationFile { - internal CSharpDocumentationFile(FileInfo fileLocation) : base(fileLocation) { } + internal CSharpDocumentationFile(FileInfo fileLocation) : base(fileLocation) + { + } + + private string RenderBlocksToDocumentation(IEnumerable blocks) + { + var builder = new StringBuilder(); + var lastBlockWasCodeBlock = false; + var callouts = new List(); + Language? language = null; + string propertyOrMethodName = null; + + RenderBlocksToDocumentation(blocks, builder, ref lastBlockWasCodeBlock, ref callouts, ref language, ref propertyOrMethodName); + if (lastBlockWasCodeBlock) + { + builder.AppendLine("----"); + foreach (var callout in callouts) + { + builder.AppendLine(callout); + } + } + return builder.ToString(); + } - private string RenderBlocksToDocumentation(IEnumerable blocks, StringBuilder builder = null) + private void RenderBlocksToDocumentation( + IEnumerable blocks, + StringBuilder builder, + ref bool lastBlockWasCodeBlock, + ref List callouts, + ref Language? language, + ref string propertyOrMethodName) { - var sb = builder ?? new StringBuilder(); foreach (var block in blocks) { if (block is TextBlock) { - sb.AppendLine(block.Value); + if (lastBlockWasCodeBlock) + { + lastBlockWasCodeBlock = false; + builder.AppendLine("----"); + if (callouts.Any()) + { + foreach (var callout in callouts) + { + builder.AppendLine(callout); + } + builder.AppendLine(); + callouts = new List(); + } + } + + builder.AppendLine(block.Value); } else if (block is CodeBlock) { - sb.AppendLine("[source, csharp]"); - sb.AppendLine("----"); - sb.AppendLine(block.Value); - sb.AppendLine("----"); + var codeBlock = (CodeBlock)block; + + // don't write different language code blocks in the same delimited source block + if (lastBlockWasCodeBlock && (codeBlock.Language != language || codeBlock.PropertyName != propertyOrMethodName)) + { + lastBlockWasCodeBlock = false; + builder.AppendLine("----"); + if (callouts.Any()) + { + foreach (var callout in callouts) + { + builder.AppendLine(callout); + } + builder.AppendLine(); + callouts = new List(); + } + } + + if (!lastBlockWasCodeBlock) + { + builder.AppendLine($"[source,{codeBlock.Language.ToString().ToLowerInvariant()},method=\"{codeBlock.PropertyName ?? "unknown"}\"]"); + builder.AppendLine("----"); + } + else + { + builder.AppendLine(); + } + + builder.AppendLine(codeBlock.Value); + + // add call outs here to write out when closing the block + callouts.AddRange(codeBlock.CallOuts); + lastBlockWasCodeBlock = true; + language = codeBlock.Language; + propertyOrMethodName = codeBlock.PropertyName; } else if (block is CombinedBlock) { - RenderBlocksToDocumentation(MergeAdjacentCodeBlocks(((CombinedBlock)block).Blocks), sb); + var mergedBlocks = MergeAdjacentCodeBlocks(((CombinedBlock)block).Blocks); + RenderBlocksToDocumentation(mergedBlocks, builder, ref lastBlockWasCodeBlock, ref callouts, ref language, ref propertyOrMethodName); } } - return sb.ToString(); } private List MergeAdjacentCodeBlocks(IEnumerable unmergedBlocks) { var blocks = new List(); List collapseCodeBlocks = null; + List collapseCallouts = null; int lineNumber = 0; + Language? language = null; + string propertyOrMethodName = null; + foreach (var b in unmergedBlocks) { - //if current block is not a code block and we;ve been collapsing code blocks - //at this point close that buffre and add a new codeblock + //if current block is not a code block and we've been collapsing code blocks + //at this point close that buffer and add a new codeblock if (!(b is CodeBlock) && collapseCodeBlocks != null) { - blocks.Add(new CodeBlock(string.Join("\r\n", collapseCodeBlocks), lineNumber)); + var block = new CodeBlock(string.Join(Environment.NewLine, collapseCodeBlocks), lineNumber, language.Value, propertyOrMethodName); + block.CallOuts.AddRange(collapseCallouts); + blocks.Add(block); collapseCodeBlocks = null; + collapseCallouts = null; } //if not a codeblock simply add it to the final list @@ -57,23 +143,42 @@ private List MergeAdjacentCodeBlocks(IEnumerable(); - collapseCodeBlocks.Add(b.Value); - lineNumber = b.LineNumber; + if (collapseCallouts == null) collapseCallouts = new List(); + + var codeBlock = (CodeBlock)b; + + if ((language != null && codeBlock.Language != language) || + (propertyOrMethodName != null && codeBlock.PropertyName != propertyOrMethodName)) + { + blocks.Add(codeBlock); + continue; + } + + language = codeBlock.Language; + propertyOrMethodName = codeBlock.PropertyName; + collapseCodeBlocks.Add(codeBlock.Value); + collapseCallouts.AddRange(codeBlock.CallOuts); + + lineNumber = codeBlock.LineNumber; } //make sure we flush our code buffer if (collapseCodeBlocks != null) - blocks.Add(new CodeBlock(string.Join("\r\n", collapseCodeBlocks), lineNumber)); + { + var joinedCodeBlock = new CodeBlock(string.Join(Environment.NewLine, collapseCodeBlocks), lineNumber, language.Value, propertyOrMethodName); + joinedCodeBlock.CallOuts.AddRange(collapseCallouts); + blocks.Add(joinedCodeBlock); + } return blocks; } public override void SaveToDocumentationFolder() { var code = File.ReadAllText(this.FileLocation.FullName); - var ast = CSharpSyntaxTree.ParseText(code); + var walker = new DocumentationFileWalker(); walker.Visit(ast.GetRoot()); var blocks = walker.Blocks.OrderBy(b => b.LineNumber).ToList(); @@ -81,9 +186,29 @@ public override void SaveToDocumentationFolder() var mergedBlocks = MergeAdjacentCodeBlocks(blocks); var body = this.RenderBlocksToDocumentation(mergedBlocks); + var docFile = this.CreateDocumentationLocation(); + +#if !DOTNETCORE + CleanDocumentAndWriteToFile(body, docFile); +#else + File.WriteAllText(docFile.FullName, body); +#endif + } - var docFileName = this.CreateDocumentationLocation(); - File.WriteAllText(docFileName.FullName, body); +#if !DOTNETCORE + private void CleanDocumentAndWriteToFile(string body, FileInfo docFile) + { + // tidy up the asciidoc + var document = Document.Parse(body); + + // add attributes and write to destination + using (var file = new StreamWriter(docFile.FullName)) + { + var visitor = new GeneratedAsciidocVisitor(docFile); + document = visitor.Convert(document); + document.Accept(new AsciiDocVisitor(file)); + } } +#endif } } diff --git a/src/CodeGeneration/Nest.Litterateur/Documentation/Files/DocumentationFile.cs b/src/CodeGeneration/Nest.Litterateur/Documentation/Files/DocumentationFile.cs index 4d8faf0aa90..381cbf949db 100644 --- a/src/CodeGeneration/Nest.Litterateur/Documentation/Files/DocumentationFile.cs +++ b/src/CodeGeneration/Nest.Litterateur/Documentation/Files/DocumentationFile.cs @@ -1,6 +1,9 @@ using System; +using System.Collections.Generic; using System.IO; +using System.Reflection.Emit; using System.Text.RegularExpressions; +using Nest.Litterateur; namespace Nest.Litterateur.Documentation.Files { @@ -26,7 +29,9 @@ public static DocumentationFile Load(FileInfo fileLocation) return new CSharpDocumentationFile(fileLocation); case ".gif": case ".jpg": + case ".jpeg": case ".png": + return new ImageDocumentationFile(fileLocation); case ".asciidoc": return new RawDocumentationFile(fileLocation); } @@ -37,13 +42,19 @@ public static DocumentationFile Load(FileInfo fileLocation) protected virtual FileInfo CreateDocumentationLocation() { var testFullPath = this.FileLocation.FullName; - var testInDocumenationFolder = Regex.Replace(testFullPath, @"(^.+\\Tests\\|\" + this.Extension + "$)", "") + ".asciidoc"; - var documenationTargetPath = Path.GetFullPath(Path.Combine(Program.OutputFolder, testInDocumenationFolder)); - var fileInfo = new FileInfo(documenationTargetPath); + var testInDocumentationFolder = + Regex.Replace(testFullPath, @"(^.+\\Tests\\|\" + this.Extension + "$)", "") + .TrimEnd(".doc") + .TrimEnd("Tests") + .PascalToHyphen() + ".asciidoc"; + + var documentationTargetPath = Path.GetFullPath(Path.Combine(Program.OutputDirPath, testInDocumentationFolder)); + var fileInfo = new FileInfo(documentationTargetPath); if (fileInfo.Directory != null) Directory.CreateDirectory(fileInfo.Directory.FullName); + return fileInfo; - } + } } } \ No newline at end of file diff --git a/src/CodeGeneration/Nest.Litterateur/Documentation/Files/ImageDocumentationFile.cs b/src/CodeGeneration/Nest.Litterateur/Documentation/Files/ImageDocumentationFile.cs new file mode 100644 index 00000000000..ee958349255 --- /dev/null +++ b/src/CodeGeneration/Nest.Litterateur/Documentation/Files/ImageDocumentationFile.cs @@ -0,0 +1,36 @@ +using System.IO; +using System.Text.RegularExpressions; + +namespace Nest.Litterateur.Documentation.Files +{ + public class ImageDocumentationFile : DocumentationFile + { + public ImageDocumentationFile(FileInfo fileLocation) : base(fileLocation) { } + + public override void SaveToDocumentationFolder() + { + var docFileName = this.CreateDocumentationLocation(); + + // copy for asciidoc to work (path is relative to file) + this.FileLocation.CopyTo(docFileName.FullName, true); + + // copy to the root as well, for the doc generation process (path is relative to root) + this.FileLocation.CopyTo(Path.Combine(Program.OutputDirPath, docFileName.Name), true); + } + + protected override FileInfo CreateDocumentationLocation() + { + var testFullPath = this.FileLocation.FullName; + + var testInDocumenationFolder = Regex.Replace(testFullPath, @"(^.+\\Tests\\|\" + this.Extension + "$)", "") + .PascalToHyphen() + this.Extension; + + var documentationTargetPath = Path.GetFullPath(Path.Combine(Program.OutputDirPath, testInDocumenationFolder)); + + var fileInfo = new FileInfo(documentationTargetPath); + if (fileInfo.Directory != null) + Directory.CreateDirectory(fileInfo.Directory.FullName); + return fileInfo; + } + } +} diff --git a/src/CodeGeneration/Nest.Litterateur/Documentation/Files/RawDocumentationFile.cs b/src/CodeGeneration/Nest.Litterateur/Documentation/Files/RawDocumentationFile.cs index eb1a4d01550..1f7c453bdc8 100644 --- a/src/CodeGeneration/Nest.Litterateur/Documentation/Files/RawDocumentationFile.cs +++ b/src/CodeGeneration/Nest.Litterateur/Documentation/Files/RawDocumentationFile.cs @@ -1,5 +1,11 @@ +using System; using System.IO; +using System.Linq; using System.Text.RegularExpressions; +#if !DOTNETCORE +using AsciiDoc; +using Nest.Litterateur.AsciiDoc; +#endif namespace Nest.Litterateur.Documentation.Files { @@ -11,15 +17,30 @@ public override void SaveToDocumentationFolder() { //we simply do a copy of the markdown file var docFileName = this.CreateDocumentationLocation(); + +#if !DOTNETCORE + var document = Document.Load(FileLocation.FullName); + + // make any modifications + var rawVisitor = new RawAsciidocVisitor(FileLocation); + document.Accept(rawVisitor); + + // write out asciidoc to file + using (var visitor = new AsciiDocVisitor(docFileName.FullName)) + { + document.Accept(visitor); + } +#else this.FileLocation.CopyTo(docFileName.FullName, true); +#endif } protected override FileInfo CreateDocumentationLocation() { var testFullPath = this.FileLocation.FullName; - var testInDocumenationFolder = Regex.Replace(testFullPath, @"(^.+\\Tests\\|\" + this.Extension + "$)", "") + this.Extension; + var testInDocumenationFolder = Regex.Replace(testFullPath, @"(^.+\\Tests\\|\" + this.Extension + "$)", "").PascalToHyphen() + this.Extension; - var documenationTargetPath = Path.GetFullPath(Path.Combine(Program.OutputFolder, testInDocumenationFolder)); + var documenationTargetPath = Path.GetFullPath(Path.Combine(Program.OutputDirPath, testInDocumenationFolder)); var fileInfo = new FileInfo(documenationTargetPath); if (fileInfo.Directory != null) Directory.CreateDirectory(fileInfo.Directory.FullName); diff --git a/src/CodeGeneration/Nest.Litterateur/EnumerableExtensions.cs b/src/CodeGeneration/Nest.Litterateur/EnumerableExtensions.cs index 7f67ed50c2f..9c82fffdb04 100644 --- a/src/CodeGeneration/Nest.Litterateur/EnumerableExtensions.cs +++ b/src/CodeGeneration/Nest.Litterateur/EnumerableExtensions.cs @@ -1,5 +1,4 @@ -using System; -using System.Collections.Generic; +using System.Collections.Generic; using System.Linq; namespace Nest.Litterateur diff --git a/src/CodeGeneration/Nest.Litterateur/Language.cs b/src/CodeGeneration/Nest.Litterateur/Language.cs new file mode 100644 index 00000000000..07dea31177f --- /dev/null +++ b/src/CodeGeneration/Nest.Litterateur/Language.cs @@ -0,0 +1,8 @@ +namespace Nest.Litterateur +{ + public enum Language + { + CSharp, + JavaScript + } +} \ No newline at end of file diff --git a/src/CodeGeneration/Nest.Litterateur/Linker/Linker.cs b/src/CodeGeneration/Nest.Litterateur/Linker/Linker.cs deleted file mode 100644 index 80578ed3a1b..00000000000 --- a/src/CodeGeneration/Nest.Litterateur/Linker/Linker.cs +++ /dev/null @@ -1,16 +0,0 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; -using System.Threading.Tasks; - -namespace Nest.Litterateur.Linker -{ - /// - /// Goes over the generated docs, does heuristical touchups and writes outs ascii docs links at the bottom of files - /// - public class Linker - { - - } -} diff --git a/src/CodeGeneration/Nest.Litterateur/LitUp.cs b/src/CodeGeneration/Nest.Litterateur/LitUp.cs index d4256b43960..17d7588dd59 100644 --- a/src/CodeGeneration/Nest.Litterateur/LitUp.cs +++ b/src/CodeGeneration/Nest.Litterateur/LitUp.cs @@ -1,5 +1,6 @@ using System; using System.Collections.Generic; +using System.Diagnostics; using System.IO; using System.Linq; using Nest.Litterateur.Documentation.Files; @@ -9,8 +10,9 @@ namespace Nest.Litterateur public static class LitUp { private static readonly string[] SkipFolders = { "Nest.Tests.Literate", "Debug", "Release" }; - public static IEnumerable InputFiles(string extension) => - from f in Directory.GetFiles(Program.InputFolder, $"*.{extension}", SearchOption.AllDirectories) + + public static IEnumerable InputFiles(string path) => + from f in Directory.GetFiles(Program.InputDirPath, $"{path}", SearchOption.AllDirectories) let dir = new DirectoryInfo(f) where dir?.Parent != null && !SkipFolders.Contains(dir.Parent.Name) select DocumentationFile.Load(new FileInfo(f)); @@ -19,17 +21,29 @@ public static IEnumerable> Input { get { - yield return InputFiles("doc.cs"); - yield return InputFiles("asciidoc"); - yield return InputFiles("ping"); - yield return InputFiles("gif"); + yield return InputFiles("*.doc.cs"); + yield return InputFiles("*UsageTests.cs"); + yield return InputFiles("*.png"); + yield return InputFiles("*.gif"); + yield return InputFiles("*.jpg"); + // process asciidocs last as they may have generated + // includes to other output asciidocs + yield return InputFiles("*.asciidoc"); } } public static void Go(string[] args) { - foreach (var file in Input.SelectMany(s=>s)) + foreach (var file in Input.SelectMany(s => s)) + { file.SaveToDocumentationFolder(); + } + +#if !DOTNETCORE + if (Debugger.IsAttached) + Console.WriteLine("Press any key to continue..."); + Console.ReadKey(); +#endif } } } \ No newline at end of file diff --git a/src/CodeGeneration/Nest.Litterateur/Program.cs b/src/CodeGeneration/Nest.Litterateur/Program.cs index 8ecdf2ab64d..5e42aa21701 100644 --- a/src/CodeGeneration/Nest.Litterateur/Program.cs +++ b/src/CodeGeneration/Nest.Litterateur/Program.cs @@ -1,32 +1,27 @@ using System.IO; -using Nest.Litterateur.Documentation; namespace Nest.Litterateur { public static class Program { - private static string DefaultTestFolder; - private static string DefaultDocFolder; - static Program() { var currentDirectory = new DirectoryInfo(Directory.GetCurrentDirectory()); if (currentDirectory.Name == "Nest.Litterateur" && currentDirectory.Parent.Name == "CodeGeneration") { - DefaultTestFolder = @"..\..\Tests"; - DefaultDocFolder = @"..\..\..\docs\asciidoc"; + InputDirPath = @"..\..\Tests"; + OutputDirPath = @"..\..\..\docs\asciidoc"; } else { - DefaultTestFolder = @"..\..\..\..\..\src\Tests"; - DefaultDocFolder = @"..\..\..\..\..\docs\asciidoc"; + InputDirPath = @"..\..\..\..\..\src\Tests"; + OutputDirPath = @"..\..\..\..\..\docs\asciidoc"; } } - public static string InputFolder => DefaultTestFolder; + public static string InputDirPath { get; } - - public static string OutputFolder => DefaultDocFolder; + public static string OutputDirPath { get; } static void Main(string[] args) => LitUp.Go(args); } diff --git a/src/CodeGeneration/Nest.Litterateur/StringExtensions.cs b/src/CodeGeneration/Nest.Litterateur/StringExtensions.cs new file mode 100644 index 00000000000..ea279952f55 --- /dev/null +++ b/src/CodeGeneration/Nest.Litterateur/StringExtensions.cs @@ -0,0 +1,219 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.IO; +using System.Linq; +using System.Reflection; +using System.Text; +using System.Text.RegularExpressions; +using Microsoft.CodeAnalysis; +using Microsoft.CodeAnalysis.CSharp; +using Newtonsoft.Json; + +namespace Nest.Litterateur +{ + public static class StringExtensions + { + public static string PascalToHyphen(this string input) + { + if (string.IsNullOrEmpty(input)) return string.Empty; + + return Regex.Replace( + Regex.Replace( + Regex.Replace(input, @"([A-Z]+)([A-Z][a-z])", "$1-$2"), @"([a-z\d])([A-Z])", "$1-$2") + , @"[-\s]", "-").TrimEnd('-').ToLower(); + } + + public static string LowercaseHyphenToPascal(this string lowercaseHyphenatedInput) + { + return Regex.Replace(lowercaseHyphenatedInput.Replace("-", " "), @"\b([a-z])", m => m.Captures[0].Value.ToUpper()); + } + + public static string TrimEnd(this string input, string trim) + { + if (string.IsNullOrEmpty(input)) return string.Empty; + + return input.EndsWith(trim, StringComparison.OrdinalIgnoreCase) + ? input.Substring(0, input.Length - trim.Length) + : input; + } + + public static string RemoveLeadingAndTrailingMultiLineComments(this string input) + { + var match = Regex.Match(input, @"^(?[ \t]*\/\*)"); + + if (match.Success) + { + input = input.Substring(match.Groups["value"].Value.Length); + } + + match = Regex.Match(input, @"(?\*\/[ \t]*)$"); + + if (match.Success) + { + input = input.Substring(0, input.Length - match.Groups["value"].Value.Length); + } + + return input; + } + + public static string RemoveLeadingSpacesAndAsterisk(this string input) + { + var match = Regex.Match(input, @"^(?[ \t]*\*\s?).*"); + + if (match.Success) + { + input = input.Substring(match.Groups["value"].Value.Length); + } + + return input; + } + + public static string RemoveNumberOfLeadingTabsAfterNewline(this string input, int numberOfTabs) + { + var firstTab = input.IndexOf("\t", StringComparison.OrdinalIgnoreCase); + + if (firstTab == -1) + { + return input; + } + int count = 0; + char firstNonTabCharacter = Char.MinValue; + + for (int i = firstTab; i < input.Length; i++) + { + if (input[i] != '\t') + { + firstNonTabCharacter = input[i]; + count = i - firstTab; + break; + } + } + + if (firstNonTabCharacter == '{' && numberOfTabs != count) + { + numberOfTabs = count; + } + + return Regex.Replace( + Regex.Replace( + input, + $"(?[\n|\r\n]+\t{{{numberOfTabs}}})", + m => m.Value.Replace("\t", string.Empty) + ), + $"(?[\n|\r\n]+\\s{{{numberOfTabs * 4}}})", + m => m.Value.Replace(" ", string.Empty) + ); + } + + public static string[] SplitOnNewLines(this string input, StringSplitOptions options) + { + return input.Split(new[] { "\r\n", "\n" }, options); + } + +#if !DOTNETCORE + // TODO: Hack of replacements in anonymous types that represent json. This can be resolved by referencing tests assembly when building the dynamic assembly, + // but might want to put doc generation at same directory level as Tests to reference project directly. + private static Dictionary Substitutions = new Dictionary + { + { "FixedDate", "new DateTime(2015, 06, 06, 12, 01, 02, 123)" }, + { "FirstNameToFind", "\"pierce\"" }, + { "Project.Projects.First().Suggest.Context.Values.SelectMany(v => v).First()", "\"red\"" }, + { "Project.Instance.Name", "\"Durgan LLC\"" }, + { "Project.InstanceAnonymous", "new {name = \"Koch, Collier and Mohr\", state = \"BellyUp\",startedOn = " + + "\"2015-01-01T00:00:00\",lastActivity = \"0001-01-01T00:00:00\",leadDeveloper = " + + "new { gender = \"Male\", id = 0, firstName = \"Martijn\", lastName = \"Laarman\" }," + + "location = new { lat = 42.1523, lon = -80.321 }}" }, + { "_templateString", "\"{ \\\"match\\\": { \\\"text\\\": \\\"{{query_string}}\\\" } }\"" }, + { "base.QueryJson", "new{ @bool = new { must = new[] { new { match_all = new { } } }, must_not = new[] { new { match_all = new { } } }, should = new[] { new { match_all = new { } } }, filter = new[] { new { match_all = new { } } }, minimum_should_match = 1, boost = 2.0, } }" }, + { "ExpectedTerms", "new [] { \"term1\", \"term2\" }" }, + { "_ctxNumberofCommits", "\"_source.numberOfCommits > 0\"" } + }; + + public static bool TryGetJsonForAnonymousType(this string anonymousTypeString, out string json) + { + json = null; + + foreach (var substitution in Substitutions) + { + anonymousTypeString = anonymousTypeString.Replace(substitution.Key, substitution.Value); + } + + var text = + $@" + using System; + using System.Collections.Generic; + using System.ComponentModel; + using Newtonsoft.Json; + using Newtonsoft.Json.Linq; + + namespace Temporary + {{ + public class Json + {{ + public string Write() + {{ + var o = {anonymousTypeString}; + var json = JsonConvert.SerializeObject(o, Formatting.Indented); + return json; + }} + }} + }}"; + + var syntaxTree = CSharpSyntaxTree.ParseText(text); + var assemblyName = Path.GetRandomFileName(); + var references = new MetadataReference[] + { + MetadataReference.CreateFromFile(typeof(object).GetTypeInfo().Assembly.Location), + MetadataReference.CreateFromFile(typeof(Enumerable).GetTypeInfo().Assembly.Location), + MetadataReference.CreateFromFile(typeof(JsonConvert).GetTypeInfo().Assembly.Location), + MetadataReference.CreateFromFile(typeof(ITypedList).GetTypeInfo().Assembly.Location), + }; + + var compilation = + CSharpCompilation.Create( + assemblyName, + new[] { syntaxTree }, + references, + new CSharpCompilationOptions(OutputKind.DynamicallyLinkedLibrary)); + + using (var ms = new MemoryStream()) + { + var result = compilation.Emit(ms); + + if (!result.Success) + { + var failures = result.Diagnostics.Where(diagnostic => + diagnostic.IsWarningAsError || + diagnostic.Severity == DiagnosticSeverity.Error); + + var builder = new StringBuilder($"Unable to serialize: {anonymousTypeString}"); + foreach (var diagnostic in failures) + { + builder.AppendLine($"{diagnostic.Id}: {diagnostic.GetMessage()}"); + } + builder.AppendLine(new string('-', 30)); + + Console.Error.WriteLine(builder.ToString()); + return false; + } + + ms.Seek(0, SeekOrigin.Begin); + + var assembly = Assembly.Load(ms.ToArray()); + var type = assembly.GetType("Temporary.Json"); + var obj = Activator.CreateInstance(type); + + var output = type.InvokeMember("Write", + BindingFlags.Default | BindingFlags.InvokeMethod, + null, + obj, + new object[] { }); + + json = output.ToString(); + return true; + } + } +#endif + } +} diff --git a/src/CodeGeneration/Nest.Litterateur/Walkers/CodeWithDocumentationWalker.cs b/src/CodeGeneration/Nest.Litterateur/Walkers/CodeWithDocumentationWalker.cs index 2af708577a0..4854669fd34 100644 --- a/src/CodeGeneration/Nest.Litterateur/Walkers/CodeWithDocumentationWalker.cs +++ b/src/CodeGeneration/Nest.Litterateur/Walkers/CodeWithDocumentationWalker.cs @@ -4,33 +4,42 @@ using Nest.Litterateur.Documentation; using System; using System.Collections.Generic; +using System.IO; using System.Linq; +using System.Reflection; using System.Text; using System.Text.RegularExpressions; +using Microsoft.CodeAnalysis.Emit; using Nest.Litterateur.Documentation.Blocks; namespace Nest.Litterateur.Walkers { class CodeWithDocumentationWalker : CSharpSyntaxWalker { - public List Blocks { get; } = new List(); - public List TextBlocks { get; } = new List(); - private bool _firstVisit = true; private string _code; + private readonly string _propertyOrMethodName; + public int ClassDepth { get; } + + public List Blocks { get; } = new List(); + + public List TextBlocks { get; } = new List(); + private readonly int? _lineNumberOverride; /// /// We want to support inlining /** */ documentations because its super handy /// to document fluent code, what ensues is total hackery /// - /// + /// the depth of the class /// line number used for sorting - public CodeWithDocumentationWalker(int classDepth = 1, int? lineNumber = null) : base(SyntaxWalkerDepth.StructuredTrivia) + /// the name of the property that we are walking + public CodeWithDocumentationWalker(int classDepth = 1, int? lineNumber = null, string propertyOrMethodName = null) : base(SyntaxWalkerDepth.StructuredTrivia) { ClassDepth = classDepth; _lineNumberOverride = lineNumber; + _propertyOrMethodName = propertyOrMethodName; } public override void Visit(SyntaxNode node) @@ -40,25 +49,38 @@ public override void Visit(SyntaxNode node) _firstVisit = false; var repeatedTabs = 2 + ClassDepth; + var language = Language.CSharp; _code = node.WithoutLeadingTrivia().WithTrailingTrivia().ToFullString(); + _code = _code.RemoveNumberOfLeadingTabsAfterNewline(repeatedTabs); - // find x or more repeated tabs and trim x number of tabs from the start - _code = Regex.Replace(_code, $"\t{{{repeatedTabs},}}", match => match.Value.Substring(repeatedTabs)); +#if !DOTNETCORE + if (_propertyOrMethodName == "ExpectJson" || _propertyOrMethodName == "QueryJson") + { + // try to get the json for the anonymous type. + // Only supports system types and Json.Net LINQ objects e.g. JObject + string json; + if (_code.TryGetJsonForAnonymousType(out json)) + { + language = Language.JavaScript; + _code = json; + } + } +#endif + // TODO: Can do this once we get the generic arguments from the Property declaration + //if (_propertyName == "Fluent") + //{ + // // need to know what type we're operating on + // _code += $"client.Search({_code});"; + //} var nodeLine = node.SyntaxTree.GetLineSpan(node.Span).StartLinePosition.Line; - var line = _lineNumberOverride ?? nodeLine; - - var codeBlocks = Regex.Split(_code, @"\/\*\*.*?\*\/", RegexOptions.Singleline) - .Select(b => b.TrimStart('\r', '\n').TrimEnd('\r', '\n', '\t')) - .Where(b => !string.IsNullOrEmpty(b) && b != ";") - .Select(b=>new CodeBlock(b, line)) - .ToList(); + var codeBlocks = ParseCodeBlocks(_code, line, language, _propertyOrMethodName); base.Visit(node); - var nodeHasLeadingTriva = node.HasLeadingTrivia && node.GetLeadingTrivia() - .Any(c=>c.Kind() == SyntaxKind.MultiLineDocumentationCommentTrivia); + var nodeHasLeadingTriva = node.HasLeadingTrivia && + node.GetLeadingTrivia().Any(c => c.Kind() == SyntaxKind.MultiLineDocumentationCommentTrivia); var blocks = codeBlocks.Intertwine(this.TextBlocks, swap: nodeHasLeadingTriva); this.Blocks.Add(new CombinedBlock(blocks, line)); return; @@ -74,20 +96,15 @@ public override void VisitBlock(BlockSyntax node) _firstVisit = false; foreach (var statement in node.Statements) { - var leadingTabs = new string('\t', 3 + ClassDepth); + var repeatedTabs = 3 + ClassDepth; SyntaxNode formattedStatement = statement; - _code = formattedStatement.WithoutLeadingTrivia().WithTrailingTrivia().ToFullString().Replace(leadingTabs, string.Empty); + _code = formattedStatement.WithoutLeadingTrivia().WithTrailingTrivia().ToFullString(); + _code = _code.RemoveNumberOfLeadingTabsAfterNewline(repeatedTabs); var nodeLine = formattedStatement.SyntaxTree.GetLineSpan(node.Span).StartLinePosition.Line; - var line = _lineNumberOverride ?? nodeLine; - - var codeBlocks = Regex.Split(_code, @"\/\*\*.*?\*\/", RegexOptions.Singleline) - .Select(b => b.TrimStart('\r', '\n').TrimEnd('\r', '\n', '\t')) - .Where(b => !string.IsNullOrEmpty(b) && b != ";") - .Select(b => new CodeBlock(b, line)) - .ToList(); + var codeBlocks = ParseCodeBlocks(_code, line, Language.CSharp, _propertyOrMethodName); this.Blocks.AddRange(codeBlocks); } @@ -96,19 +113,41 @@ public override void VisitBlock(BlockSyntax node) } } - public override void VisitXmlText(XmlTextSyntax node) + public override void VisitTrivia(SyntaxTrivia trivia) { - var nodeLine = node.SyntaxTree.GetLineSpan(node.Span).StartLinePosition.Line; - var line = _lineNumberOverride ?? nodeLine; - var text = node.TextTokens - .Where(n => n.Kind() == SyntaxKind.XmlTextLiteralToken) - .Aggregate(new StringBuilder(), (a, t) => a.AppendLine(t.Text.TrimStart()), a => a.ToString()); + if (trivia.Kind() != SyntaxKind.MultiLineDocumentationCommentTrivia) + { + base.VisitTrivia(trivia); + return; + } - this.TextBlocks.Add(new TextBlock(text, line)); + var tokens = trivia.ToFullString() + .RemoveLeadingAndTrailingMultiLineComments() + .SplitOnNewLines(StringSplitOptions.None); + var builder = new StringBuilder(); - base.VisitXmlText(node); - } + foreach (var token in tokens) + { + var currentToken = token.RemoveLeadingSpacesAndAsterisk(); + var decodedToken = System.Net.WebUtility.HtmlDecode(currentToken); + builder.AppendLine(decodedToken); + } + + var text = builder.ToString(); + var line = _firstVisit + ? trivia.SyntaxTree.GetLineSpan(trivia.Span).StartLinePosition.Line + : _lineNumberOverride.GetValueOrDefault(0); + this.Blocks.Add(new TextBlock(text, line)); + } + private List ParseCodeBlocks(string code, int line, Language language, string propertyName) + { + return Regex.Split(code, @"\/\*\*.*?\*\/", RegexOptions.Singleline) + .Select(b => b.TrimStart('\r', '\n').TrimEnd('\r', '\n', '\t')) + .Where(b => !string.IsNullOrEmpty(b) && b != ";") + .Select(b => new CodeBlock(b, line, language, propertyName)) + .ToList(); + } } } diff --git a/src/CodeGeneration/Nest.Litterateur/Walkers/DocumentationFileWalker.cs b/src/CodeGeneration/Nest.Litterateur/Walkers/DocumentationFileWalker.cs index 6ed9d53a679..0002dfc7c86 100644 --- a/src/CodeGeneration/Nest.Litterateur/Walkers/DocumentationFileWalker.cs +++ b/src/CodeGeneration/Nest.Litterateur/Walkers/DocumentationFileWalker.cs @@ -1,7 +1,6 @@ using Microsoft.CodeAnalysis; using Microsoft.CodeAnalysis.CSharp; using Microsoft.CodeAnalysis.CSharp.Syntax; -using Nest.Litterateur.Documentation; using System; using System.Collections.Generic; using System.Linq; @@ -12,43 +11,84 @@ namespace Nest.Litterateur.Walkers { class DocumentationFileWalker : CSharpSyntaxWalker { + private static readonly string[] PropertyOrMethodNamesOfInterest = + { + "ExpectJson", + "QueryJson", + "Fluent", + "Initializer", + "QueryFluent", + "QueryInitializer" + }; + + private string _propertyOrMethodName; + public DocumentationFileWalker() : base(SyntaxWalkerDepth.StructuredTrivia) { } private int ClassDepth { get; set; } + public int InterfaceDepth { get; set; } private bool InsideMultiLineDocumentation { get; set; } private bool InsideAutoIncludeMethodBlock { get; set; } private bool InsideFluentOrInitializerExample { get; set; } + private bool IncludeMethodBlockContainsLambda { get; set; } + private int EndLine { get; set; } public List Blocks { get; } = new List(); + public override void VisitInterfaceDeclaration(InterfaceDeclarationSyntax node) + { + if (node.ChildNodes().All(childNode => childNode is PropertyDeclarationSyntax || childNode is AttributeListSyntax)) + { + // simple nested interface + var line = node.SyntaxTree.GetLineSpan(node.Span).StartLinePosition.Line; + var walker = new CodeWithDocumentationWalker(0, line); + walker.Visit(node); + this.Blocks.AddRange(walker.Blocks); + } + } + public override void VisitClassDeclaration(ClassDeclarationSyntax node) { ++ClassDepth; if (ClassDepth == 1) { base.VisitClassDeclaration(node); - } - // are we dealing with a simple nested POCO? + } else if (node.ChildNodes().All(childNode => childNode is PropertyDeclarationSyntax || childNode is AttributeListSyntax)) - { + { + // simple nested POCO var line = node.SyntaxTree.GetLineSpan(node.Span).StartLinePosition.Line; var walker = new CodeWithDocumentationWalker(ClassDepth - 2, line); walker.Visit(node); this.Blocks.AddRange(walker.Blocks); } + else + { + var methods = node.ChildNodes().OfType(); + if (!methods.Any(m => m.AttributeLists.SelectMany(a => a.Attributes).Any())) + { + // nested class with methods that are not unit or integration tests e.g. example PropertyVisitor in Automap.doc.cs + var line = node.SyntaxTree.GetLineSpan(node.Span).StartLinePosition.Line; + var walker = new CodeWithDocumentationWalker(ClassDepth - 2, line); + walker.Visit(node); + this.Blocks.AddRange(walker.Blocks); + } + } --ClassDepth; } public override void VisitPropertyDeclaration(PropertyDeclarationSyntax node) { - var propertyName = node.Identifier.Text; - if (propertyName == "Fluent") - { - this.InsideFluentOrInitializerExample = true; - base.VisitPropertyDeclaration(node); - this.InsideFluentOrInitializerExample = false; - } - else if (propertyName == "Initializer") + _propertyOrMethodName = node.Identifier.Text; + if (PropertyOrMethodNamesOfInterest.Contains(_propertyOrMethodName)) { + // TODO: Look to get the generic types for the call so that we can prettify the fluent and OIS calls in docs e.g. client.Search({Call}); + // var genericArguments = node.DescendantNodes().OfType().FirstOrDefault(); + // List arguments = new List(); + // if (genericArguments != null) + // { + // arguments.AddRange(genericArguments.TypeArgumentList.Arguments); + // } + this.InsideFluentOrInitializerExample = true; base.VisitPropertyDeclaration(node); this.InsideFluentOrInitializerExample = false; @@ -57,11 +97,11 @@ public override void VisitPropertyDeclaration(PropertyDeclarationSyntax node) public override void VisitArrowExpressionClause(ArrowExpressionClauseSyntax node) { - if (!this.InsideFluentOrInitializerExample) return; + if (!this.InsideFluentOrInitializerExample && !PropertyOrMethodNamesOfInterest.Contains(_propertyOrMethodName)) return; var syntaxNode = node?.ChildNodes()?.LastOrDefault()?.WithAdditionalAnnotations(); if (syntaxNode == null) return; var line = node.SyntaxTree.GetLineSpan(node.Span).StartLinePosition.Line; - var walker = new CodeWithDocumentationWalker(ClassDepth, line); + var walker = new CodeWithDocumentationWalker(ClassDepth, line, _propertyOrMethodName); walker.Visit(syntaxNode); this.Blocks.AddRange(walker.Blocks); } @@ -72,7 +112,7 @@ public override void VisitAccessorDeclaration(AccessorDeclarationSyntax node) var syntaxNode = node?.ChildNodes()?.LastOrDefault()?.WithAdditionalAnnotations() as BlockSyntax; if (syntaxNode == null) return; var line = node.SyntaxTree.GetLineSpan(node.Span).StartLinePosition.Line; - var walker = new CodeWithDocumentationWalker(ClassDepth, line); + var walker = new CodeWithDocumentationWalker(ClassDepth, line, _propertyOrMethodName); walker.VisitBlock(syntaxNode); this.Blocks.AddRange(walker.Blocks); } @@ -80,8 +120,11 @@ public override void VisitAccessorDeclaration(AccessorDeclarationSyntax node) public override void VisitMethodDeclaration(MethodDeclarationSyntax node) { if (this.ClassDepth == 1) this.InsideAutoIncludeMethodBlock = true; + _propertyOrMethodName = node.Identifier.Text; base.VisitMethodDeclaration(node); this.InsideAutoIncludeMethodBlock = false; + this.IncludeMethodBlockContainsLambda = false; + this.EndLine = 0; } public override void VisitExpressionStatement(ExpressionStatementSyntax node) @@ -89,19 +132,27 @@ public override void VisitExpressionStatement(ExpressionStatementSyntax node) if (this.InsideAutoIncludeMethodBlock) { var line = node.SyntaxTree.GetLineSpan(node.Span).StartLinePosition.Line; + + // this lambda has already been included so skip it + if (IncludeMethodBlockContainsLambda && this.EndLine >= line) + { + return; + } + var allchildren = node.DescendantNodesAndTokens(descendIntoTrivia: true); if (allchildren.Any(a => a.Kind() == SyntaxKind.MultiLineDocumentationCommentTrivia)) { - var walker = new CodeWithDocumentationWalker(ClassDepth, line); + var walker = new CodeWithDocumentationWalker(ClassDepth, line, _propertyOrMethodName); walker.Visit(node.WithAdditionalAnnotations()); this.Blocks.AddRange(walker.Blocks); return; } base.VisitExpressionStatement(node); - this.Blocks.Add(new CodeBlock(node.WithoutLeadingTrivia().ToFullString(), line)); + var code = node.WithoutLeadingTrivia().ToFullString(); + code = code.RemoveNumberOfLeadingTabsAfterNewline(ClassDepth + 2); + this.Blocks.Add(new CodeBlock(code, line, Language.CSharp, _propertyOrMethodName)); } else base.VisitExpressionStatement(node); - } public override void VisitLocalDeclarationStatement(LocalDeclarationStatementSyntax node) @@ -109,15 +160,25 @@ public override void VisitLocalDeclarationStatement(LocalDeclarationStatementSyn if (this.InsideAutoIncludeMethodBlock) { var allchildren = node.DescendantNodesAndTokens(descendIntoTrivia: true); - var line = node.SyntaxTree.GetLineSpan(node.Span).StartLinePosition.Line; + var linePositionSpan = node.SyntaxTree.GetLineSpan(node.Span); + var line = linePositionSpan.StartLinePosition.Line; if (allchildren.Any(a => a.Kind() == SyntaxKind.MultiLineDocumentationCommentTrivia)) { - var walker = new CodeWithDocumentationWalker(ClassDepth, line); + var walker = new CodeWithDocumentationWalker(ClassDepth, line, _propertyOrMethodName); walker.Visit(node.WithAdditionalAnnotations()); this.Blocks.AddRange(walker.Blocks); return; } - this.Blocks.Add(new CodeBlock(node.WithoutLeadingTrivia().ToFullString(), line)); + var code = node.WithoutLeadingTrivia().ToFullString(); + code = code.RemoveNumberOfLeadingTabsAfterNewline(ClassDepth + 2); + this.Blocks.Add(new CodeBlock(code, line, Language.CSharp, _propertyOrMethodName)); + + if (allchildren.Any(a => a.Kind() == SyntaxKind.SimpleLambdaExpression)) + { + // nested lambda inside this local declaration + this.IncludeMethodBlockContainsLambda = true; + this.EndLine = linePositionSpan.EndLinePosition.Line; + } } base.VisitLocalDeclarationStatement(node); } @@ -131,23 +192,24 @@ public override void VisitTrivia(SyntaxTrivia trivia) } this.InsideMultiLineDocumentation = true; - this.CreateTextBlocksFromTrivia(trivia); - this.InsideMultiLineDocumentation = false; - } - private void CreateTextBlocksFromTrivia(SyntaxTrivia trivia) - { - var tokens = trivia.ToFullString().TrimStart('/', '*').TrimEnd('*', '/').Split('\n'); + var tokens = trivia.ToFullString() + .RemoveLeadingAndTrailingMultiLineComments() + .SplitOnNewLines(StringSplitOptions.None); var builder = new StringBuilder(); + foreach (var token in tokens) { - var decodedToken = System.Net.WebUtility.HtmlDecode(token.Trim().Trim('*').Trim()); + var currentToken = token.RemoveLeadingSpacesAndAsterisk(); + var decodedToken = System.Net.WebUtility.HtmlDecode(currentToken); builder.AppendLine(decodedToken); } var text = builder.ToString(); var line = trivia.SyntaxTree.GetLineSpan(trivia.Span).StartLinePosition.Line; this.Blocks.Add(new TextBlock(text, line)); + + this.InsideMultiLineDocumentation = false; } } } diff --git a/src/CodeGeneration/Nest.Litterateur/project.json b/src/CodeGeneration/Nest.Litterateur/project.json index 3365fd2e146..c826977e990 100644 --- a/src/CodeGeneration/Nest.Litterateur/project.json +++ b/src/CodeGeneration/Nest.Litterateur/project.json @@ -5,7 +5,7 @@ "emitEntryPoint": true }, "dependencies": { - + "Newtonsoft.Json": "8.0.2" }, "commands": { "Nest.Litterateur": "Nest.Litterateur" @@ -29,12 +29,14 @@ "System.Runtime": "", "System.Runtime.Serialization": "", "System.Threading.Tasks": "", - "System.Text.Encoding": "" + "System.Text.Encoding": "", + "System.IO": "" }, - "dependencies": { - "Microsoft.CSharp": "4.0.1-beta-23409", - "Microsoft.CodeAnalysis": "1.1.1" - } + "dependencies": { + "Microsoft.CSharp": "4.0.1-beta-23409", + "Microsoft.CodeAnalysis": "1.1.1", + "AsciiDoc": "1.0.0-*" + } }, "dotnet5.1": { "compilationOptions": { "define": [ "DOTNETCORE" ] }, @@ -44,6 +46,7 @@ "System.Reflection": "4.1.0-beta-23225", "System.Collections.Specialized": "4.0.0-beta-23109", "System.Linq": "4.0.0-beta-23109", + "System.IO": "4.0.0-beta-23109", "System.IO.FileSystem": "4.0.0-beta-23109", "System.IO.Compression": "4.0.0-beta-23109", "System.Runtime.Serialization.Primitives": "4.0.10-beta-23109", diff --git a/src/CodeGeneration/Nest.Litterateur/project.lock.json b/src/CodeGeneration/Nest.Litterateur/project.lock.json new file mode 100644 index 00000000000..3f8112a00d6 --- /dev/null +++ b/src/CodeGeneration/Nest.Litterateur/project.lock.json @@ -0,0 +1,5037 @@ +{ + "locked": false, + "version": 2, + "targets": { + "DNX,Version=v4.5.1": { + "AsciiDoc/1.0.0": { + "type": "project", + "framework": "DNX,Version=v4.5.1" + }, + "Microsoft.CodeAnalysis/1.1.1": { + "type": "package", + "dependencies": { + "Microsoft.CodeAnalysis.CSharp.Workspaces": "[1.1.1]", + "Microsoft.CodeAnalysis.VisualBasic.Workspaces": "[1.1.1]" + } + }, + "Microsoft.CodeAnalysis.Analyzers/1.1.0": { + "type": "package", + "frameworkAssemblies": [ + "System" + ] + }, + "Microsoft.CodeAnalysis.Common/1.1.1": { + "type": "package", + "dependencies": { + "Microsoft.CodeAnalysis.Analyzers": "1.1.0", + "System.Collections.Immutable": "1.1.37", + "System.Reflection.Metadata": "1.1.0" + }, + "compile": { + "lib/net45/Microsoft.CodeAnalysis.dll": {} + }, + "runtime": { + "lib/net45/Microsoft.CodeAnalysis.dll": {} + } + }, + "Microsoft.CodeAnalysis.CSharp/1.1.1": { + "type": "package", + "dependencies": { + "Microsoft.CodeAnalysis.Common": "[1.1.1]" + }, + "compile": { + "lib/net45/Microsoft.CodeAnalysis.CSharp.dll": {} + }, + "runtime": { + "lib/net45/Microsoft.CodeAnalysis.CSharp.dll": {} + } + }, + "Microsoft.CodeAnalysis.CSharp.Workspaces/1.1.1": { + "type": "package", + "dependencies": { + "Microsoft.CodeAnalysis.CSharp": "[1.1.1]", + "Microsoft.CodeAnalysis.Workspaces.Common": "[1.1.1]" + }, + "compile": { + "lib/net45/Microsoft.CodeAnalysis.CSharp.Workspaces.dll": {} + }, + "runtime": { + "lib/net45/Microsoft.CodeAnalysis.CSharp.Workspaces.dll": {} + } + }, + "Microsoft.CodeAnalysis.VisualBasic/1.1.1": { + "type": "package", + "dependencies": { + "Microsoft.CodeAnalysis.Common": "1.1.1" + }, + "compile": { + "lib/net45/Microsoft.CodeAnalysis.VisualBasic.dll": {} + }, + "runtime": { + "lib/net45/Microsoft.CodeAnalysis.VisualBasic.dll": {} + } + }, + "Microsoft.CodeAnalysis.VisualBasic.Workspaces/1.1.1": { + "type": "package", + "dependencies": { + "Microsoft.CodeAnalysis.VisualBasic": "[1.1.1]", + "Microsoft.CodeAnalysis.Workspaces.Common": "[1.1.1]" + }, + "compile": { + "lib/net45/Microsoft.CodeAnalysis.VisualBasic.Workspaces.dll": {} + }, + "runtime": { + "lib/net45/Microsoft.CodeAnalysis.VisualBasic.Workspaces.dll": {} + } + }, + "Microsoft.CodeAnalysis.Workspaces.Common/1.1.1": { + "type": "package", + "dependencies": { + "Microsoft.CodeAnalysis.Common": "[1.1.1]", + "Microsoft.Composition": "1.0.27" + }, + "compile": { + "lib/net45/Microsoft.CodeAnalysis.Workspaces.Desktop.dll": {}, + "lib/net45/Microsoft.CodeAnalysis.Workspaces.dll": {} + }, + "runtime": { + "lib/net45/Microsoft.CodeAnalysis.Workspaces.Desktop.dll": {}, + "lib/net45/Microsoft.CodeAnalysis.Workspaces.dll": {} + } + }, + "Microsoft.Composition/1.0.27": { + "type": "package", + "compile": { + "lib/portable-net45+win8+wp8+wpa81/System.Composition.AttributedModel.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.Convention.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.Hosting.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.Runtime.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.TypedParts.dll": {} + }, + "runtime": { + "lib/portable-net45+win8+wp8+wpa81/System.Composition.AttributedModel.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.Convention.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.Hosting.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.Runtime.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.TypedParts.dll": {} + } + }, + "Microsoft.CSharp/4.0.1-beta-23409": { + "type": "package", + "frameworkAssemblies": [ + "Microsoft.CSharp" + ], + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "Newtonsoft.Json/8.0.2": { + "type": "package", + "compile": { + "lib/net45/Newtonsoft.Json.dll": {} + }, + "runtime": { + "lib/net45/Newtonsoft.Json.dll": {} + } + }, + "System.Collections/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "System.Collections.Immutable/1.1.37": { + "type": "package", + "dependencies": { + "System.Collections": "4.0.0", + "System.Diagnostics.Debug": "4.0.0", + "System.Globalization": "4.0.0", + "System.Linq": "4.0.0", + "System.Resources.ResourceManager": "4.0.0", + "System.Runtime": "4.0.0", + "System.Runtime.Extensions": "4.0.0", + "System.Threading": "4.0.0" + }, + "compile": { + "lib/dotnet/System.Collections.Immutable.dll": {} + }, + "runtime": { + "lib/dotnet/System.Collections.Immutable.dll": {} + } + }, + "System.Diagnostics.Debug/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "System.Globalization/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "System.IO/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "System.Linq/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "System.Reflection/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "System.Reflection.Extensions/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "System.Reflection.Metadata/1.1.0": { + "type": "package", + "dependencies": { + "System.Collections": "4.0.0", + "System.Collections.Immutable": "1.1.37", + "System.Diagnostics.Debug": "4.0.0", + "System.IO": "4.0.0", + "System.Reflection": "4.0.0", + "System.Reflection.Extensions": "4.0.0", + "System.Reflection.Primitives": "4.0.0", + "System.Resources.ResourceManager": "4.0.0", + "System.Runtime": "4.0.0", + "System.Runtime.Extensions": "4.0.0", + "System.Runtime.InteropServices": "4.0.0", + "System.Text.Encoding": "4.0.0", + "System.Text.Encoding.Extensions": "4.0.0", + "System.Threading": "4.0.0" + }, + "compile": { + "lib/dotnet5.2/System.Reflection.Metadata.dll": {} + }, + "runtime": { + "lib/dotnet5.2/System.Reflection.Metadata.dll": {} + } + }, + "System.Reflection.Primitives/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "System.Resources.ResourceManager/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "System.Runtime/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "System.Runtime.Extensions/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "System.Runtime.InteropServices/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "System.Text.Encoding/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "System.Text.Encoding.Extensions/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "System.Threading/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + } + }, + ".NETPlatform,Version=v5.1": { + "Microsoft.CodeAnalysis/1.1.1": { + "type": "package", + "dependencies": { + "Microsoft.CodeAnalysis.CSharp.Workspaces": "[1.1.1]", + "Microsoft.CodeAnalysis.VisualBasic.Workspaces": "[1.1.1]" + } + }, + "Microsoft.CodeAnalysis.Analyzers/1.1.0": { + "type": "package" + }, + "Microsoft.CodeAnalysis.Common/1.1.1": { + "type": "package", + "dependencies": { + "Microsoft.CodeAnalysis.Analyzers": "1.1.0", + "System.Collections.Immutable": "1.1.37", + "System.Reflection.Metadata": "1.1.0" + }, + "compile": { + "lib/portable-net45+win8/Microsoft.CodeAnalysis.dll": {} + }, + "runtime": { + "lib/portable-net45+win8/Microsoft.CodeAnalysis.dll": {} + } + }, + "Microsoft.CodeAnalysis.CSharp/1.1.1": { + "type": "package", + "dependencies": { + "Microsoft.CodeAnalysis.Common": "[1.1.1]" + }, + "compile": { + "lib/portable-net45+win8/Microsoft.CodeAnalysis.CSharp.dll": {} + }, + "runtime": { + "lib/portable-net45+win8/Microsoft.CodeAnalysis.CSharp.dll": {} + } + }, + "Microsoft.CodeAnalysis.CSharp.Workspaces/1.1.1": { + "type": "package", + "dependencies": { + "Microsoft.CodeAnalysis.CSharp": "[1.1.1]", + "Microsoft.CodeAnalysis.Workspaces.Common": "[1.1.1]" + }, + "compile": { + "lib/portable-net45+win8/Microsoft.CodeAnalysis.CSharp.Workspaces.dll": {} + }, + "runtime": { + "lib/portable-net45+win8/Microsoft.CodeAnalysis.CSharp.Workspaces.dll": {} + } + }, + "Microsoft.CodeAnalysis.VisualBasic/1.1.1": { + "type": "package", + "dependencies": { + "Microsoft.CodeAnalysis.Common": "1.1.1" + }, + "compile": { + "lib/portable-net45+win8/Microsoft.CodeAnalysis.VisualBasic.dll": {} + }, + "runtime": { + "lib/portable-net45+win8/Microsoft.CodeAnalysis.VisualBasic.dll": {} + } + }, + "Microsoft.CodeAnalysis.VisualBasic.Workspaces/1.1.1": { + "type": "package", + "dependencies": { + "Microsoft.CodeAnalysis.VisualBasic": "[1.1.1]", + "Microsoft.CodeAnalysis.Workspaces.Common": "[1.1.1]" + }, + "compile": { + "lib/portable-net45+win8/Microsoft.CodeAnalysis.VisualBasic.Workspaces.dll": {} + }, + "runtime": { + "lib/portable-net45+win8/Microsoft.CodeAnalysis.VisualBasic.Workspaces.dll": {} + } + }, + "Microsoft.CodeAnalysis.Workspaces.Common/1.1.1": { + "type": "package", + "dependencies": { + "Microsoft.CodeAnalysis.Common": "[1.1.1]", + "Microsoft.Composition": "1.0.27" + }, + "compile": { + "lib/portable-net45+win8/Microsoft.CodeAnalysis.Workspaces.dll": {} + }, + "runtime": { + "lib/portable-net45+win8/Microsoft.CodeAnalysis.Workspaces.dll": {} + } + }, + "Microsoft.Composition/1.0.27": { + "type": "package", + "compile": { + "lib/portable-net45+win8+wp8+wpa81/System.Composition.AttributedModel.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.Convention.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.Hosting.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.Runtime.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.TypedParts.dll": {} + }, + "runtime": { + "lib/portable-net45+win8+wp8+wpa81/System.Composition.AttributedModel.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.Convention.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.Hosting.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.Runtime.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.TypedParts.dll": {} + } + }, + "Microsoft.CSharp/4.0.1-beta-23409": { + "type": "package", + "dependencies": { + "System.Collections": "4.0.10", + "System.Diagnostics.Debug": "4.0.10", + "System.Dynamic.Runtime": "4.0.0", + "System.Globalization": "4.0.10", + "System.Linq": "4.0.0", + "System.Linq.Expressions": "4.0.0", + "System.ObjectModel": "4.0.10", + "System.Reflection": "4.0.10", + "System.Reflection.Extensions": "4.0.0", + "System.Reflection.Primitives": "4.0.0", + "System.Reflection.TypeExtensions": "4.0.0", + "System.Resources.ResourceManager": "4.0.0", + "System.Runtime": "4.0.20", + "System.Runtime.Extensions": "4.0.10", + "System.Runtime.InteropServices": "4.0.20", + "System.Threading": "4.0.10" + }, + "compile": { + "ref/dotnet/Microsoft.CSharp.dll": {} + }, + "runtime": { + "lib/dotnet/Microsoft.CSharp.dll": {} + } + }, + "Newtonsoft.Json/8.0.2": { + "type": "package", + "compile": { + "lib/portable-net45+wp80+win8+wpa81+dnxcore50/Newtonsoft.Json.dll": {} + }, + "runtime": { + "lib/portable-net45+wp80+win8+wpa81+dnxcore50/Newtonsoft.Json.dll": {} + } + }, + "System.Collections/4.0.11-beta-23225": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.0" + }, + "compile": { + "ref/dotnet/System.Collections.dll": {} + } + }, + "System.Collections.Concurrent/4.0.10-beta-23109": { + "type": "package", + "dependencies": { + "System.Collections": "4.0.10-beta-23109", + "System.Diagnostics.Debug": "4.0.10-beta-23109", + "System.Diagnostics.Tracing": "4.0.20-beta-23109", + "System.Globalization": "4.0.10-beta-23109", + "System.Resources.ResourceManager": "4.0.0-beta-23109", + "System.Runtime": "4.0.20-beta-23109", + "System.Runtime.Extensions": "4.0.10-beta-23109", + "System.Threading": "4.0.10-beta-23109", + "System.Threading.Tasks": "4.0.10-beta-23109" + }, + "compile": { + "ref/dotnet/System.Collections.Concurrent.dll": {} + }, + "runtime": { + "lib/dotnet/System.Collections.Concurrent.dll": {} + } + }, + "System.Collections.Immutable/1.1.37": { + "type": "package", + "dependencies": { + "System.Collections": "4.0.0", + "System.Diagnostics.Debug": "4.0.0", + "System.Globalization": "4.0.0", + "System.Linq": "4.0.0", + "System.Resources.ResourceManager": "4.0.0", + "System.Runtime": "4.0.0", + "System.Runtime.Extensions": "4.0.0", + "System.Threading": "4.0.0" + }, + "compile": { + "lib/dotnet/System.Collections.Immutable.dll": {} + }, + "runtime": { + "lib/dotnet/System.Collections.Immutable.dll": {} + } + }, + "System.Collections.NonGeneric/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.Diagnostics.Debug": "4.0.10-beta-23109", + "System.Globalization": "4.0.10-beta-23109", + "System.Resources.ResourceManager": "4.0.0-beta-23109", + "System.Runtime": "4.0.20-beta-23109", + "System.Runtime.Extensions": "4.0.10-beta-23109", + "System.Threading": "4.0.10-beta-23109" + }, + "compile": { + "ref/dotnet/System.Collections.NonGeneric.dll": {} + }, + "runtime": { + "lib/dotnet/System.Collections.NonGeneric.dll": {} + } + }, + "System.Collections.Specialized/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.Collections.NonGeneric": "4.0.0-beta-23109", + "System.Globalization": "4.0.10-beta-23109", + "System.Globalization.Extensions": "4.0.0-beta-23109", + "System.Resources.ResourceManager": "4.0.0-beta-23109", + "System.Runtime": "4.0.20-beta-23109", + "System.Runtime.Extensions": "4.0.10-beta-23109", + "System.Threading": "4.0.10-beta-23109" + }, + "compile": { + "ref/dotnet/System.Collections.Specialized.dll": {} + }, + "runtime": { + "lib/dotnet/System.Collections.Specialized.dll": {} + } + }, + "System.ComponentModel/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.20-beta-23109" + }, + "compile": { + "ref/dotnet/System.ComponentModel.dll": {} + }, + "runtime": { + "lib/dotnet/System.ComponentModel.dll": {} + } + }, + "System.ComponentModel.Primitives/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.ComponentModel": "4.0.0-beta-23109", + "System.Runtime": "4.0.20-beta-23109" + }, + "compile": { + "ref/dotnet/System.ComponentModel.Primitives.dll": {} + }, + "runtime": { + "lib/dotnet/System.ComponentModel.Primitives.dll": {} + } + }, + "System.ComponentModel.TypeConverter/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.Collections": "4.0.10-beta-23109", + "System.ComponentModel": "4.0.0-beta-23109", + "System.ComponentModel.Primitives": "4.0.0-beta-23109", + "System.Globalization": "4.0.10-beta-23109", + "System.Reflection": "4.0.10-beta-23109", + "System.Reflection.Extensions": "4.0.0-beta-23109", + "System.Reflection.Primitives": "4.0.0-beta-23109", + "System.Resources.ResourceManager": "4.0.0-beta-23109", + "System.Runtime": "4.0.20-beta-23109", + "System.Runtime.Extensions": "4.0.10-beta-23109", + "System.Threading": "4.0.10-beta-23109" + }, + "compile": { + "ref/dotnet/System.ComponentModel.TypeConverter.dll": {} + }, + "runtime": { + "lib/dotnet/System.ComponentModel.TypeConverter.dll": {} + } + }, + "System.Diagnostics.Debug/4.0.10": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.0" + }, + "compile": { + "ref/dotnet/System.Diagnostics.Debug.dll": {} + } + }, + "System.Diagnostics.Tracing/4.0.20-beta-23109": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.0-beta-23109" + }, + "compile": { + "ref/dotnet/System.Diagnostics.Tracing.dll": {} + } + }, + "System.Dynamic.Runtime/4.0.11-beta-23225": { + "type": "package", + "dependencies": { + "System.Linq.Expressions": "4.0.10", + "System.ObjectModel": "4.0.0", + "System.Reflection": "4.0.0", + "System.Runtime": "4.0.20" + }, + "compile": { + "ref/dotnet/System.Dynamic.Runtime.dll": {} + } + }, + "System.Globalization/4.0.10": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.0" + }, + "compile": { + "ref/dotnet/System.Globalization.dll": {} + } + }, + "System.Globalization.Extensions/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.Globalization": "4.0.10-beta-23109", + "System.Resources.ResourceManager": "4.0.0-beta-23109", + "System.Runtime": "4.0.20-beta-23109", + "System.Runtime.Extensions": "4.0.10-beta-23109", + "System.Runtime.InteropServices": "4.0.20-beta-23109" + }, + "compile": { + "ref/dotnet/System.Globalization.Extensions.dll": {} + }, + "runtime": { + "lib/dotnet/System.Globalization.Extensions.dll": {} + } + }, + "System.IO/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.0-beta-23109", + "System.Text.Encoding": "4.0.0-beta-23109", + "System.Threading.Tasks": "4.0.0-beta-23109" + }, + "compile": { + "ref/dotnet/System.IO.dll": {} + } + }, + "System.IO.Compression/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.Collections": "4.0.0-beta-23109", + "System.IO": "4.0.0-beta-23109", + "System.Resources.ResourceManager": "4.0.0-beta-23109", + "System.Runtime": "4.0.0-beta-23109", + "System.Runtime.Extensions": "4.0.0-beta-23109", + "System.Runtime.InteropServices": "4.0.0-beta-23109", + "System.Text.Encoding": "4.0.0-beta-23109", + "System.Threading": "4.0.0-beta-23109", + "System.Threading.Tasks": "4.0.0-beta-23109" + }, + "compile": { + "ref/dotnet/System.IO.Compression.dll": {} + }, + "runtime": { + "lib/dotnet/System.IO.Compression.dll": {} + } + }, + "System.IO.FileSystem/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.IO": "4.0.0-beta-23109", + "System.IO.FileSystem.Primitives": "4.0.0-beta-23109", + "System.Runtime": "4.0.0-beta-23109", + "System.Runtime.Handles": "4.0.0-beta-23109", + "System.Text.Encoding": "4.0.0-beta-23109", + "System.Threading.Tasks": "4.0.0-beta-23109" + }, + "compile": { + "ref/dotnet/System.IO.FileSystem.dll": {} + } + }, + "System.IO.FileSystem.Primitives/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.20-beta-23109" + }, + "compile": { + "ref/dotnet/System.IO.FileSystem.Primitives.dll": {} + }, + "runtime": { + "lib/dotnet/System.IO.FileSystem.Primitives.dll": {} + } + }, + "System.Linq/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.Collections": "4.0.10-beta-23109", + "System.Diagnostics.Debug": "4.0.10-beta-23109", + "System.Resources.ResourceManager": "4.0.0-beta-23109", + "System.Runtime": "4.0.20-beta-23109", + "System.Runtime.Extensions": "4.0.10-beta-23109" + }, + "compile": { + "ref/dotnet/System.Linq.dll": {} + }, + "runtime": { + "lib/dotnet/System.Linq.dll": {} + } + }, + "System.Linq.Expressions/4.0.10-beta-23109": { + "type": "package", + "dependencies": { + "System.Reflection": "4.0.0-beta-23109", + "System.Runtime": "4.0.0-beta-23109" + }, + "compile": { + "ref/dotnet/System.Linq.Expressions.dll": {} + } + }, + "System.Net.Http/4.0.1-beta-23225": { + "type": "package", + "dependencies": { + "System.IO": "4.0.0", + "System.Net.Primitives": "4.0.0", + "System.Runtime": "4.0.0", + "System.Text.Encoding": "4.0.0", + "System.Threading.Tasks": "4.0.0" + }, + "compile": { + "ref/dotnet/System.Net.Http.dll": {} + } + }, + "System.Net.Primitives/4.0.0": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.0" + }, + "compile": { + "ref/dotnet/System.Net.Primitives.dll": {} + } + }, + "System.ObjectModel/4.0.10": { + "type": "package", + "dependencies": { + "System.Collections": "4.0.10", + "System.Diagnostics.Debug": "4.0.10", + "System.Resources.ResourceManager": "4.0.0", + "System.Runtime": "4.0.20", + "System.Threading": "4.0.10" + }, + "compile": { + "ref/dotnet/System.ObjectModel.dll": {} + }, + "runtime": { + "lib/dotnet/System.ObjectModel.dll": {} + } + }, + "System.Reflection/4.1.0-beta-23225": { + "type": "package", + "dependencies": { + "System.IO": "4.0.0", + "System.Reflection.Primitives": "4.0.0", + "System.Runtime": "4.0.20" + }, + "compile": { + "ref/dotnet/System.Reflection.dll": {} + } + }, + "System.Reflection.Extensions/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.Reflection": "4.0.0-beta-23109", + "System.Runtime": "4.0.0-beta-23109" + }, + "compile": { + "ref/dotnet/System.Reflection.Extensions.dll": {} + } + }, + "System.Reflection.Metadata/1.1.0-alpha-00009": { + "type": "package", + "dependencies": { + "System.Collections": "4.0.0", + "System.Collections.Immutable": "1.1.36", + "System.Diagnostics.Debug": "4.0.0", + "System.IO": "4.0.0", + "System.Reflection": "4.0.0", + "System.Reflection.Extensions": "4.0.0", + "System.Reflection.Primitives": "4.0.0", + "System.Resources.ResourceManager": "4.0.0", + "System.Runtime": "4.0.0", + "System.Runtime.Extensions": "4.0.0", + "System.Runtime.InteropServices": "4.0.0", + "System.Text.Encoding": "4.0.0", + "System.Text.Encoding.Extensions": "4.0.0", + "System.Threading": "4.0.0" + }, + "compile": { + "lib/dotnet/System.Reflection.Metadata.dll": {} + }, + "runtime": { + "lib/dotnet/System.Reflection.Metadata.dll": {} + } + }, + "System.Reflection.Primitives/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.0-beta-23109" + }, + "compile": { + "ref/dotnet/System.Reflection.Primitives.dll": {} + } + }, + "System.Reflection.TypeExtensions/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.Reflection": "4.0.0-beta-23109", + "System.Runtime": "4.0.0-beta-23109" + }, + "compile": { + "ref/dotnet/System.Reflection.TypeExtensions.dll": {} + } + }, + "System.Resources.ResourceManager/4.0.0": { + "type": "package", + "dependencies": { + "System.Globalization": "4.0.0", + "System.Reflection": "4.0.0", + "System.Runtime": "4.0.0" + }, + "compile": { + "ref/dotnet/System.Resources.ResourceManager.dll": {} + } + }, + "System.Runtime/4.0.21-beta-23225": { + "type": "package", + "compile": { + "ref/dotnet/System.Runtime.dll": {} + } + }, + "System.Runtime.Extensions/4.0.10": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.20" + }, + "compile": { + "ref/dotnet/System.Runtime.Extensions.dll": {} + } + }, + "System.Runtime.Handles/4.0.0": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.0" + }, + "compile": { + "ref/dotnet/System.Runtime.Handles.dll": {} + } + }, + "System.Runtime.InteropServices/4.0.20": { + "type": "package", + "dependencies": { + "System.Reflection": "4.0.0", + "System.Reflection.Primitives": "4.0.0", + "System.Runtime": "4.0.0", + "System.Runtime.Handles": "4.0.0" + }, + "compile": { + "ref/dotnet/System.Runtime.InteropServices.dll": {} + } + }, + "System.Runtime.Serialization.Primitives/4.0.10-beta-23109": { + "type": "package", + "dependencies": { + "System.Resources.ResourceManager": "4.0.0-beta-23109", + "System.Runtime": "4.0.20-beta-23109" + }, + "compile": { + "ref/dotnet/System.Runtime.Serialization.Primitives.dll": {} + }, + "runtime": { + "lib/dotnet/System.Runtime.Serialization.Primitives.dll": {} + } + }, + "System.Security.Cryptography.Algorithms/4.0.0-beta-23225": { + "type": "package", + "dependencies": { + "System.IO": "4.0.0", + "System.Runtime": "4.0.0", + "System.Security.Cryptography.Primitives": "4.0.0-beta-23225" + }, + "compile": { + "ref/dotnet/System.Security.Cryptography.Algorithms.dll": {} + } + }, + "System.Security.Cryptography.Encoding/4.0.0-beta-23225": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.0" + }, + "compile": { + "ref/dotnet/System.Security.Cryptography.Encoding.dll": {} + } + }, + "System.Security.Cryptography.Primitives/4.0.0-beta-23225": { + "type": "package", + "dependencies": { + "System.IO": "4.0.0", + "System.Runtime": "4.0.0", + "System.Threading.Tasks": "4.0.0" + }, + "compile": { + "ref/dotnet/System.Security.Cryptography.Primitives.dll": {} + } + }, + "System.Security.Cryptography.X509Certificates/4.0.0-beta-23225": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.0", + "System.Runtime.Handles": "4.0.0", + "System.Security.Cryptography.Algorithms": "4.0.0-beta-23225", + "System.Security.Cryptography.Encoding": "4.0.0-beta-23225" + }, + "compile": { + "ref/dotnet/System.Security.Cryptography.X509Certificates.dll": {} + } + }, + "System.Text.Encoding/4.0.0": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.0" + }, + "compile": { + "ref/dotnet/System.Text.Encoding.dll": {} + } + }, + "System.Text.Encoding.Extensions/4.0.0": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.0", + "System.Text.Encoding": "4.0.0" + }, + "compile": { + "ref/dotnet/System.Text.Encoding.Extensions.dll": {} + } + }, + "System.Text.RegularExpressions/4.0.10-beta-23109": { + "type": "package", + "dependencies": { + "System.Collections": "4.0.10-beta-23109", + "System.Globalization": "4.0.10-beta-23109", + "System.Resources.ResourceManager": "4.0.0-beta-23109", + "System.Runtime": "4.0.20-beta-23109", + "System.Runtime.Extensions": "4.0.10-beta-23109", + "System.Threading": "4.0.10-beta-23109" + }, + "compile": { + "ref/dotnet/System.Text.RegularExpressions.dll": {} + }, + "runtime": { + "lib/dotnet/System.Text.RegularExpressions.dll": {} + } + }, + "System.Threading/4.0.10": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.0", + "System.Threading.Tasks": "4.0.0" + }, + "compile": { + "ref/dotnet/System.Threading.dll": {} + } + }, + "System.Threading.Tasks/4.0.10-beta-23109": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.0-beta-23109" + }, + "compile": { + "ref/dotnet/System.Threading.Tasks.dll": {} + } + } + }, + "DNX,Version=v4.5.1/win7-x86": { + "AsciiDoc/1.0.0": { + "type": "project", + "framework": "DNX,Version=v4.5.1" + }, + "Microsoft.CodeAnalysis/1.1.1": { + "type": "package", + "dependencies": { + "Microsoft.CodeAnalysis.CSharp.Workspaces": "[1.1.1]", + "Microsoft.CodeAnalysis.VisualBasic.Workspaces": "[1.1.1]" + } + }, + "Microsoft.CodeAnalysis.Analyzers/1.1.0": { + "type": "package", + "frameworkAssemblies": [ + "System" + ] + }, + "Microsoft.CodeAnalysis.Common/1.1.1": { + "type": "package", + "dependencies": { + "Microsoft.CodeAnalysis.Analyzers": "1.1.0", + "System.Collections.Immutable": "1.1.37", + "System.Reflection.Metadata": "1.1.0" + }, + "compile": { + "lib/net45/Microsoft.CodeAnalysis.dll": {} + }, + "runtime": { + "lib/net45/Microsoft.CodeAnalysis.dll": {} + } + }, + "Microsoft.CodeAnalysis.CSharp/1.1.1": { + "type": "package", + "dependencies": { + "Microsoft.CodeAnalysis.Common": "[1.1.1]" + }, + "compile": { + "lib/net45/Microsoft.CodeAnalysis.CSharp.dll": {} + }, + "runtime": { + "lib/net45/Microsoft.CodeAnalysis.CSharp.dll": {} + } + }, + "Microsoft.CodeAnalysis.CSharp.Workspaces/1.1.1": { + "type": "package", + "dependencies": { + "Microsoft.CodeAnalysis.CSharp": "[1.1.1]", + "Microsoft.CodeAnalysis.Workspaces.Common": "[1.1.1]" + }, + "compile": { + "lib/net45/Microsoft.CodeAnalysis.CSharp.Workspaces.dll": {} + }, + "runtime": { + "lib/net45/Microsoft.CodeAnalysis.CSharp.Workspaces.dll": {} + } + }, + "Microsoft.CodeAnalysis.VisualBasic/1.1.1": { + "type": "package", + "dependencies": { + "Microsoft.CodeAnalysis.Common": "1.1.1" + }, + "compile": { + "lib/net45/Microsoft.CodeAnalysis.VisualBasic.dll": {} + }, + "runtime": { + "lib/net45/Microsoft.CodeAnalysis.VisualBasic.dll": {} + } + }, + "Microsoft.CodeAnalysis.VisualBasic.Workspaces/1.1.1": { + "type": "package", + "dependencies": { + "Microsoft.CodeAnalysis.VisualBasic": "[1.1.1]", + "Microsoft.CodeAnalysis.Workspaces.Common": "[1.1.1]" + }, + "compile": { + "lib/net45/Microsoft.CodeAnalysis.VisualBasic.Workspaces.dll": {} + }, + "runtime": { + "lib/net45/Microsoft.CodeAnalysis.VisualBasic.Workspaces.dll": {} + } + }, + "Microsoft.CodeAnalysis.Workspaces.Common/1.1.1": { + "type": "package", + "dependencies": { + "Microsoft.CodeAnalysis.Common": "[1.1.1]", + "Microsoft.Composition": "1.0.27" + }, + "compile": { + "lib/net45/Microsoft.CodeAnalysis.Workspaces.Desktop.dll": {}, + "lib/net45/Microsoft.CodeAnalysis.Workspaces.dll": {} + }, + "runtime": { + "lib/net45/Microsoft.CodeAnalysis.Workspaces.Desktop.dll": {}, + "lib/net45/Microsoft.CodeAnalysis.Workspaces.dll": {} + } + }, + "Microsoft.Composition/1.0.27": { + "type": "package", + "compile": { + "lib/portable-net45+win8+wp8+wpa81/System.Composition.AttributedModel.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.Convention.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.Hosting.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.Runtime.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.TypedParts.dll": {} + }, + "runtime": { + "lib/portable-net45+win8+wp8+wpa81/System.Composition.AttributedModel.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.Convention.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.Hosting.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.Runtime.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.TypedParts.dll": {} + } + }, + "Microsoft.CSharp/4.0.1-beta-23409": { + "type": "package", + "frameworkAssemblies": [ + "Microsoft.CSharp" + ], + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "Newtonsoft.Json/8.0.2": { + "type": "package", + "compile": { + "lib/net45/Newtonsoft.Json.dll": {} + }, + "runtime": { + "lib/net45/Newtonsoft.Json.dll": {} + } + }, + "System.Collections/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "System.Collections.Immutable/1.1.37": { + "type": "package", + "dependencies": { + "System.Collections": "4.0.0", + "System.Diagnostics.Debug": "4.0.0", + "System.Globalization": "4.0.0", + "System.Linq": "4.0.0", + "System.Resources.ResourceManager": "4.0.0", + "System.Runtime": "4.0.0", + "System.Runtime.Extensions": "4.0.0", + "System.Threading": "4.0.0" + }, + "compile": { + "lib/dotnet/System.Collections.Immutable.dll": {} + }, + "runtime": { + "lib/dotnet/System.Collections.Immutable.dll": {} + } + }, + "System.Diagnostics.Debug/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "System.Globalization/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "System.IO/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "System.Linq/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "System.Reflection/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "System.Reflection.Extensions/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "System.Reflection.Metadata/1.1.0": { + "type": "package", + "dependencies": { + "System.Collections": "4.0.0", + "System.Collections.Immutable": "1.1.37", + "System.Diagnostics.Debug": "4.0.0", + "System.IO": "4.0.0", + "System.Reflection": "4.0.0", + "System.Reflection.Extensions": "4.0.0", + "System.Reflection.Primitives": "4.0.0", + "System.Resources.ResourceManager": "4.0.0", + "System.Runtime": "4.0.0", + "System.Runtime.Extensions": "4.0.0", + "System.Runtime.InteropServices": "4.0.0", + "System.Text.Encoding": "4.0.0", + "System.Text.Encoding.Extensions": "4.0.0", + "System.Threading": "4.0.0" + }, + "compile": { + "lib/dotnet5.2/System.Reflection.Metadata.dll": {} + }, + "runtime": { + "lib/dotnet5.2/System.Reflection.Metadata.dll": {} + } + }, + "System.Reflection.Primitives/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "System.Resources.ResourceManager/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "System.Runtime/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "System.Runtime.Extensions/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "System.Runtime.InteropServices/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "System.Text.Encoding/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "System.Text.Encoding.Extensions/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "System.Threading/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + } + }, + "DNX,Version=v4.5.1/win7-x64": { + "AsciiDoc/1.0.0": { + "type": "project", + "framework": "DNX,Version=v4.5.1" + }, + "Microsoft.CodeAnalysis/1.1.1": { + "type": "package", + "dependencies": { + "Microsoft.CodeAnalysis.CSharp.Workspaces": "[1.1.1]", + "Microsoft.CodeAnalysis.VisualBasic.Workspaces": "[1.1.1]" + } + }, + "Microsoft.CodeAnalysis.Analyzers/1.1.0": { + "type": "package", + "frameworkAssemblies": [ + "System" + ] + }, + "Microsoft.CodeAnalysis.Common/1.1.1": { + "type": "package", + "dependencies": { + "Microsoft.CodeAnalysis.Analyzers": "1.1.0", + "System.Collections.Immutable": "1.1.37", + "System.Reflection.Metadata": "1.1.0" + }, + "compile": { + "lib/net45/Microsoft.CodeAnalysis.dll": {} + }, + "runtime": { + "lib/net45/Microsoft.CodeAnalysis.dll": {} + } + }, + "Microsoft.CodeAnalysis.CSharp/1.1.1": { + "type": "package", + "dependencies": { + "Microsoft.CodeAnalysis.Common": "[1.1.1]" + }, + "compile": { + "lib/net45/Microsoft.CodeAnalysis.CSharp.dll": {} + }, + "runtime": { + "lib/net45/Microsoft.CodeAnalysis.CSharp.dll": {} + } + }, + "Microsoft.CodeAnalysis.CSharp.Workspaces/1.1.1": { + "type": "package", + "dependencies": { + "Microsoft.CodeAnalysis.CSharp": "[1.1.1]", + "Microsoft.CodeAnalysis.Workspaces.Common": "[1.1.1]" + }, + "compile": { + "lib/net45/Microsoft.CodeAnalysis.CSharp.Workspaces.dll": {} + }, + "runtime": { + "lib/net45/Microsoft.CodeAnalysis.CSharp.Workspaces.dll": {} + } + }, + "Microsoft.CodeAnalysis.VisualBasic/1.1.1": { + "type": "package", + "dependencies": { + "Microsoft.CodeAnalysis.Common": "1.1.1" + }, + "compile": { + "lib/net45/Microsoft.CodeAnalysis.VisualBasic.dll": {} + }, + "runtime": { + "lib/net45/Microsoft.CodeAnalysis.VisualBasic.dll": {} + } + }, + "Microsoft.CodeAnalysis.VisualBasic.Workspaces/1.1.1": { + "type": "package", + "dependencies": { + "Microsoft.CodeAnalysis.VisualBasic": "[1.1.1]", + "Microsoft.CodeAnalysis.Workspaces.Common": "[1.1.1]" + }, + "compile": { + "lib/net45/Microsoft.CodeAnalysis.VisualBasic.Workspaces.dll": {} + }, + "runtime": { + "lib/net45/Microsoft.CodeAnalysis.VisualBasic.Workspaces.dll": {} + } + }, + "Microsoft.CodeAnalysis.Workspaces.Common/1.1.1": { + "type": "package", + "dependencies": { + "Microsoft.CodeAnalysis.Common": "[1.1.1]", + "Microsoft.Composition": "1.0.27" + }, + "compile": { + "lib/net45/Microsoft.CodeAnalysis.Workspaces.Desktop.dll": {}, + "lib/net45/Microsoft.CodeAnalysis.Workspaces.dll": {} + }, + "runtime": { + "lib/net45/Microsoft.CodeAnalysis.Workspaces.Desktop.dll": {}, + "lib/net45/Microsoft.CodeAnalysis.Workspaces.dll": {} + } + }, + "Microsoft.Composition/1.0.27": { + "type": "package", + "compile": { + "lib/portable-net45+win8+wp8+wpa81/System.Composition.AttributedModel.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.Convention.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.Hosting.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.Runtime.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.TypedParts.dll": {} + }, + "runtime": { + "lib/portable-net45+win8+wp8+wpa81/System.Composition.AttributedModel.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.Convention.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.Hosting.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.Runtime.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.TypedParts.dll": {} + } + }, + "Microsoft.CSharp/4.0.1-beta-23409": { + "type": "package", + "frameworkAssemblies": [ + "Microsoft.CSharp" + ], + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "Newtonsoft.Json/8.0.2": { + "type": "package", + "compile": { + "lib/net45/Newtonsoft.Json.dll": {} + }, + "runtime": { + "lib/net45/Newtonsoft.Json.dll": {} + } + }, + "System.Collections/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "System.Collections.Immutable/1.1.37": { + "type": "package", + "dependencies": { + "System.Collections": "4.0.0", + "System.Diagnostics.Debug": "4.0.0", + "System.Globalization": "4.0.0", + "System.Linq": "4.0.0", + "System.Resources.ResourceManager": "4.0.0", + "System.Runtime": "4.0.0", + "System.Runtime.Extensions": "4.0.0", + "System.Threading": "4.0.0" + }, + "compile": { + "lib/dotnet/System.Collections.Immutable.dll": {} + }, + "runtime": { + "lib/dotnet/System.Collections.Immutable.dll": {} + } + }, + "System.Diagnostics.Debug/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "System.Globalization/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "System.IO/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "System.Linq/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "System.Reflection/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "System.Reflection.Extensions/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "System.Reflection.Metadata/1.1.0": { + "type": "package", + "dependencies": { + "System.Collections": "4.0.0", + "System.Collections.Immutable": "1.1.37", + "System.Diagnostics.Debug": "4.0.0", + "System.IO": "4.0.0", + "System.Reflection": "4.0.0", + "System.Reflection.Extensions": "4.0.0", + "System.Reflection.Primitives": "4.0.0", + "System.Resources.ResourceManager": "4.0.0", + "System.Runtime": "4.0.0", + "System.Runtime.Extensions": "4.0.0", + "System.Runtime.InteropServices": "4.0.0", + "System.Text.Encoding": "4.0.0", + "System.Text.Encoding.Extensions": "4.0.0", + "System.Threading": "4.0.0" + }, + "compile": { + "lib/dotnet5.2/System.Reflection.Metadata.dll": {} + }, + "runtime": { + "lib/dotnet5.2/System.Reflection.Metadata.dll": {} + } + }, + "System.Reflection.Primitives/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "System.Resources.ResourceManager/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "System.Runtime/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "System.Runtime.Extensions/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "System.Runtime.InteropServices/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "System.Text.Encoding/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "System.Text.Encoding.Extensions/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + }, + "System.Threading/4.0.0": { + "type": "package", + "compile": { + "ref/net45/_._": {} + }, + "runtime": { + "lib/net45/_._": {} + } + } + }, + ".NETPlatform,Version=v5.1/win7-x86": { + "Microsoft.CodeAnalysis/1.1.1": { + "type": "package", + "dependencies": { + "Microsoft.CodeAnalysis.CSharp.Workspaces": "[1.1.1]", + "Microsoft.CodeAnalysis.VisualBasic.Workspaces": "[1.1.1]" + } + }, + "Microsoft.CodeAnalysis.Analyzers/1.1.0": { + "type": "package" + }, + "Microsoft.CodeAnalysis.Common/1.1.1": { + "type": "package", + "dependencies": { + "Microsoft.CodeAnalysis.Analyzers": "1.1.0", + "System.Collections.Immutable": "1.1.37", + "System.Reflection.Metadata": "1.1.0" + }, + "compile": { + "lib/portable-net45+win8/Microsoft.CodeAnalysis.dll": {} + }, + "runtime": { + "lib/portable-net45+win8/Microsoft.CodeAnalysis.dll": {} + } + }, + "Microsoft.CodeAnalysis.CSharp/1.1.1": { + "type": "package", + "dependencies": { + "Microsoft.CodeAnalysis.Common": "[1.1.1]" + }, + "compile": { + "lib/portable-net45+win8/Microsoft.CodeAnalysis.CSharp.dll": {} + }, + "runtime": { + "lib/portable-net45+win8/Microsoft.CodeAnalysis.CSharp.dll": {} + } + }, + "Microsoft.CodeAnalysis.CSharp.Workspaces/1.1.1": { + "type": "package", + "dependencies": { + "Microsoft.CodeAnalysis.CSharp": "[1.1.1]", + "Microsoft.CodeAnalysis.Workspaces.Common": "[1.1.1]" + }, + "compile": { + "lib/portable-net45+win8/Microsoft.CodeAnalysis.CSharp.Workspaces.dll": {} + }, + "runtime": { + "lib/portable-net45+win8/Microsoft.CodeAnalysis.CSharp.Workspaces.dll": {} + } + }, + "Microsoft.CodeAnalysis.VisualBasic/1.1.1": { + "type": "package", + "dependencies": { + "Microsoft.CodeAnalysis.Common": "1.1.1" + }, + "compile": { + "lib/portable-net45+win8/Microsoft.CodeAnalysis.VisualBasic.dll": {} + }, + "runtime": { + "lib/portable-net45+win8/Microsoft.CodeAnalysis.VisualBasic.dll": {} + } + }, + "Microsoft.CodeAnalysis.VisualBasic.Workspaces/1.1.1": { + "type": "package", + "dependencies": { + "Microsoft.CodeAnalysis.VisualBasic": "[1.1.1]", + "Microsoft.CodeAnalysis.Workspaces.Common": "[1.1.1]" + }, + "compile": { + "lib/portable-net45+win8/Microsoft.CodeAnalysis.VisualBasic.Workspaces.dll": {} + }, + "runtime": { + "lib/portable-net45+win8/Microsoft.CodeAnalysis.VisualBasic.Workspaces.dll": {} + } + }, + "Microsoft.CodeAnalysis.Workspaces.Common/1.1.1": { + "type": "package", + "dependencies": { + "Microsoft.CodeAnalysis.Common": "[1.1.1]", + "Microsoft.Composition": "1.0.27" + }, + "compile": { + "lib/portable-net45+win8/Microsoft.CodeAnalysis.Workspaces.dll": {} + }, + "runtime": { + "lib/portable-net45+win8/Microsoft.CodeAnalysis.Workspaces.dll": {} + } + }, + "Microsoft.Composition/1.0.27": { + "type": "package", + "compile": { + "lib/portable-net45+win8+wp8+wpa81/System.Composition.AttributedModel.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.Convention.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.Hosting.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.Runtime.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.TypedParts.dll": {} + }, + "runtime": { + "lib/portable-net45+win8+wp8+wpa81/System.Composition.AttributedModel.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.Convention.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.Hosting.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.Runtime.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.TypedParts.dll": {} + } + }, + "Microsoft.CSharp/4.0.1-beta-23409": { + "type": "package", + "dependencies": { + "System.Collections": "4.0.10", + "System.Diagnostics.Debug": "4.0.10", + "System.Dynamic.Runtime": "4.0.0", + "System.Globalization": "4.0.10", + "System.Linq": "4.0.0", + "System.Linq.Expressions": "4.0.0", + "System.ObjectModel": "4.0.10", + "System.Reflection": "4.0.10", + "System.Reflection.Extensions": "4.0.0", + "System.Reflection.Primitives": "4.0.0", + "System.Reflection.TypeExtensions": "4.0.0", + "System.Resources.ResourceManager": "4.0.0", + "System.Runtime": "4.0.20", + "System.Runtime.Extensions": "4.0.10", + "System.Runtime.InteropServices": "4.0.20", + "System.Threading": "4.0.10" + }, + "compile": { + "ref/dotnet/Microsoft.CSharp.dll": {} + }, + "runtime": { + "lib/dotnet/Microsoft.CSharp.dll": {} + } + }, + "Newtonsoft.Json/8.0.2": { + "type": "package", + "compile": { + "lib/portable-net45+wp80+win8+wpa81+dnxcore50/Newtonsoft.Json.dll": {} + }, + "runtime": { + "lib/portable-net45+wp80+win8+wpa81+dnxcore50/Newtonsoft.Json.dll": {} + } + }, + "System.Collections/4.0.11-beta-23225": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.0" + }, + "compile": { + "ref/dotnet/System.Collections.dll": {} + } + }, + "System.Collections.Concurrent/4.0.10-beta-23109": { + "type": "package", + "dependencies": { + "System.Collections": "4.0.10-beta-23109", + "System.Diagnostics.Debug": "4.0.10-beta-23109", + "System.Diagnostics.Tracing": "4.0.20-beta-23109", + "System.Globalization": "4.0.10-beta-23109", + "System.Resources.ResourceManager": "4.0.0-beta-23109", + "System.Runtime": "4.0.20-beta-23109", + "System.Runtime.Extensions": "4.0.10-beta-23109", + "System.Threading": "4.0.10-beta-23109", + "System.Threading.Tasks": "4.0.10-beta-23109" + }, + "compile": { + "ref/dotnet/System.Collections.Concurrent.dll": {} + }, + "runtime": { + "lib/dotnet/System.Collections.Concurrent.dll": {} + } + }, + "System.Collections.Immutable/1.1.37": { + "type": "package", + "dependencies": { + "System.Collections": "4.0.0", + "System.Diagnostics.Debug": "4.0.0", + "System.Globalization": "4.0.0", + "System.Linq": "4.0.0", + "System.Resources.ResourceManager": "4.0.0", + "System.Runtime": "4.0.0", + "System.Runtime.Extensions": "4.0.0", + "System.Threading": "4.0.0" + }, + "compile": { + "lib/dotnet/System.Collections.Immutable.dll": {} + }, + "runtime": { + "lib/dotnet/System.Collections.Immutable.dll": {} + } + }, + "System.Collections.NonGeneric/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.Diagnostics.Debug": "4.0.10-beta-23109", + "System.Globalization": "4.0.10-beta-23109", + "System.Resources.ResourceManager": "4.0.0-beta-23109", + "System.Runtime": "4.0.20-beta-23109", + "System.Runtime.Extensions": "4.0.10-beta-23109", + "System.Threading": "4.0.10-beta-23109" + }, + "compile": { + "ref/dotnet/System.Collections.NonGeneric.dll": {} + }, + "runtime": { + "lib/dotnet/System.Collections.NonGeneric.dll": {} + } + }, + "System.Collections.Specialized/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.Collections.NonGeneric": "4.0.0-beta-23109", + "System.Globalization": "4.0.10-beta-23109", + "System.Globalization.Extensions": "4.0.0-beta-23109", + "System.Resources.ResourceManager": "4.0.0-beta-23109", + "System.Runtime": "4.0.20-beta-23109", + "System.Runtime.Extensions": "4.0.10-beta-23109", + "System.Threading": "4.0.10-beta-23109" + }, + "compile": { + "ref/dotnet/System.Collections.Specialized.dll": {} + }, + "runtime": { + "lib/dotnet/System.Collections.Specialized.dll": {} + } + }, + "System.ComponentModel/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.20-beta-23109" + }, + "compile": { + "ref/dotnet/System.ComponentModel.dll": {} + }, + "runtime": { + "lib/dotnet/System.ComponentModel.dll": {} + } + }, + "System.ComponentModel.Primitives/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.ComponentModel": "4.0.0-beta-23109", + "System.Runtime": "4.0.20-beta-23109" + }, + "compile": { + "ref/dotnet/System.ComponentModel.Primitives.dll": {} + }, + "runtime": { + "lib/dotnet/System.ComponentModel.Primitives.dll": {} + } + }, + "System.ComponentModel.TypeConverter/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.Collections": "4.0.10-beta-23109", + "System.ComponentModel": "4.0.0-beta-23109", + "System.ComponentModel.Primitives": "4.0.0-beta-23109", + "System.Globalization": "4.0.10-beta-23109", + "System.Reflection": "4.0.10-beta-23109", + "System.Reflection.Extensions": "4.0.0-beta-23109", + "System.Reflection.Primitives": "4.0.0-beta-23109", + "System.Resources.ResourceManager": "4.0.0-beta-23109", + "System.Runtime": "4.0.20-beta-23109", + "System.Runtime.Extensions": "4.0.10-beta-23109", + "System.Threading": "4.0.10-beta-23109" + }, + "compile": { + "ref/dotnet/System.ComponentModel.TypeConverter.dll": {} + }, + "runtime": { + "lib/dotnet/System.ComponentModel.TypeConverter.dll": {} + } + }, + "System.Diagnostics.Debug/4.0.10": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.0" + }, + "compile": { + "ref/dotnet/System.Diagnostics.Debug.dll": {} + } + }, + "System.Diagnostics.Tracing/4.0.20-beta-23109": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.0-beta-23109" + }, + "compile": { + "ref/dotnet/System.Diagnostics.Tracing.dll": {} + } + }, + "System.Dynamic.Runtime/4.0.11-beta-23225": { + "type": "package", + "dependencies": { + "System.Linq.Expressions": "4.0.10", + "System.ObjectModel": "4.0.0", + "System.Reflection": "4.0.0", + "System.Runtime": "4.0.20" + }, + "compile": { + "ref/dotnet/System.Dynamic.Runtime.dll": {} + } + }, + "System.Globalization/4.0.10": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.0" + }, + "compile": { + "ref/dotnet/System.Globalization.dll": {} + } + }, + "System.Globalization.Extensions/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.Globalization": "4.0.10-beta-23109", + "System.Resources.ResourceManager": "4.0.0-beta-23109", + "System.Runtime": "4.0.20-beta-23109", + "System.Runtime.Extensions": "4.0.10-beta-23109", + "System.Runtime.InteropServices": "4.0.20-beta-23109" + }, + "compile": { + "ref/dotnet/System.Globalization.Extensions.dll": {} + }, + "runtime": { + "lib/dotnet/System.Globalization.Extensions.dll": {} + } + }, + "System.IO/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.0-beta-23109", + "System.Text.Encoding": "4.0.0-beta-23109", + "System.Threading.Tasks": "4.0.0-beta-23109" + }, + "compile": { + "ref/dotnet/System.IO.dll": {} + } + }, + "System.IO.Compression/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.Collections": "4.0.0-beta-23109", + "System.IO": "4.0.0-beta-23109", + "System.Resources.ResourceManager": "4.0.0-beta-23109", + "System.Runtime": "4.0.0-beta-23109", + "System.Runtime.Extensions": "4.0.0-beta-23109", + "System.Runtime.InteropServices": "4.0.0-beta-23109", + "System.Text.Encoding": "4.0.0-beta-23109", + "System.Threading": "4.0.0-beta-23109", + "System.Threading.Tasks": "4.0.0-beta-23109" + }, + "compile": { + "ref/dotnet/System.IO.Compression.dll": {} + }, + "runtime": { + "lib/dotnet/System.IO.Compression.dll": {} + } + }, + "System.IO.Compression.clrcompression-x86/4.0.0-beta-23109": { + "type": "package", + "native": { + "runtimes/win7-x86/native/clrcompression.dll": {} + } + }, + "System.IO.FileSystem/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.IO": "4.0.0-beta-23109", + "System.IO.FileSystem.Primitives": "4.0.0-beta-23109", + "System.Runtime": "4.0.0-beta-23109", + "System.Runtime.Handles": "4.0.0-beta-23109", + "System.Text.Encoding": "4.0.0-beta-23109", + "System.Threading.Tasks": "4.0.0-beta-23109" + }, + "compile": { + "ref/dotnet/System.IO.FileSystem.dll": {} + } + }, + "System.IO.FileSystem.Primitives/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.20-beta-23109" + }, + "compile": { + "ref/dotnet/System.IO.FileSystem.Primitives.dll": {} + }, + "runtime": { + "lib/dotnet/System.IO.FileSystem.Primitives.dll": {} + } + }, + "System.Linq/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.Collections": "4.0.10-beta-23109", + "System.Diagnostics.Debug": "4.0.10-beta-23109", + "System.Resources.ResourceManager": "4.0.0-beta-23109", + "System.Runtime": "4.0.20-beta-23109", + "System.Runtime.Extensions": "4.0.10-beta-23109" + }, + "compile": { + "ref/dotnet/System.Linq.dll": {} + }, + "runtime": { + "lib/dotnet/System.Linq.dll": {} + } + }, + "System.Linq.Expressions/4.0.10-beta-23109": { + "type": "package", + "dependencies": { + "System.Reflection": "4.0.0-beta-23109", + "System.Runtime": "4.0.0-beta-23109" + }, + "compile": { + "ref/dotnet/System.Linq.Expressions.dll": {} + } + }, + "System.Net.Http/4.0.1-beta-23225": { + "type": "package", + "dependencies": { + "System.IO": "4.0.0", + "System.Net.Primitives": "4.0.0", + "System.Runtime": "4.0.0", + "System.Text.Encoding": "4.0.0", + "System.Threading.Tasks": "4.0.0" + }, + "compile": { + "ref/dotnet/System.Net.Http.dll": {} + } + }, + "System.Net.Primitives/4.0.0": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.0" + }, + "compile": { + "ref/dotnet/System.Net.Primitives.dll": {} + } + }, + "System.ObjectModel/4.0.10": { + "type": "package", + "dependencies": { + "System.Collections": "4.0.10", + "System.Diagnostics.Debug": "4.0.10", + "System.Resources.ResourceManager": "4.0.0", + "System.Runtime": "4.0.20", + "System.Threading": "4.0.10" + }, + "compile": { + "ref/dotnet/System.ObjectModel.dll": {} + }, + "runtime": { + "lib/dotnet/System.ObjectModel.dll": {} + } + }, + "System.Reflection/4.1.0-beta-23225": { + "type": "package", + "dependencies": { + "System.IO": "4.0.0", + "System.Reflection.Primitives": "4.0.0", + "System.Runtime": "4.0.20" + }, + "compile": { + "ref/dotnet/System.Reflection.dll": {} + } + }, + "System.Reflection.Emit.ILGeneration/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.Reflection": "4.0.0-beta-23109", + "System.Reflection.Primitives": "4.0.0-beta-23109", + "System.Runtime": "4.0.0-beta-23109" + }, + "compile": { + "ref/dotnet/System.Reflection.Emit.ILGeneration.dll": {} + } + }, + "System.Reflection.Emit.Lightweight/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.Reflection": "4.0.0-beta-23109", + "System.Reflection.Emit.ILGeneration": "4.0.0-beta-23109", + "System.Reflection.Primitives": "4.0.0-beta-23109", + "System.Runtime": "4.0.0-beta-23109" + }, + "compile": { + "ref/dotnet/System.Reflection.Emit.Lightweight.dll": {} + } + }, + "System.Reflection.Extensions/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.Reflection": "4.0.0-beta-23109", + "System.Runtime": "4.0.0-beta-23109" + }, + "compile": { + "ref/dotnet/System.Reflection.Extensions.dll": {} + } + }, + "System.Reflection.Metadata/1.1.0-alpha-00009": { + "type": "package", + "dependencies": { + "System.Collections": "4.0.0", + "System.Collections.Immutable": "1.1.36", + "System.Diagnostics.Debug": "4.0.0", + "System.IO": "4.0.0", + "System.Reflection": "4.0.0", + "System.Reflection.Extensions": "4.0.0", + "System.Reflection.Primitives": "4.0.0", + "System.Resources.ResourceManager": "4.0.0", + "System.Runtime": "4.0.0", + "System.Runtime.Extensions": "4.0.0", + "System.Runtime.InteropServices": "4.0.0", + "System.Text.Encoding": "4.0.0", + "System.Text.Encoding.Extensions": "4.0.0", + "System.Threading": "4.0.0" + }, + "compile": { + "lib/dotnet/System.Reflection.Metadata.dll": {} + }, + "runtime": { + "lib/dotnet/System.Reflection.Metadata.dll": {} + } + }, + "System.Reflection.Primitives/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.0-beta-23109" + }, + "compile": { + "ref/dotnet/System.Reflection.Primitives.dll": {} + } + }, + "System.Reflection.TypeExtensions/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.Reflection": "4.0.0-beta-23109", + "System.Runtime": "4.0.0-beta-23109" + }, + "compile": { + "ref/dotnet/System.Reflection.TypeExtensions.dll": {} + } + }, + "System.Resources.ResourceManager/4.0.0": { + "type": "package", + "dependencies": { + "System.Globalization": "4.0.0", + "System.Reflection": "4.0.0", + "System.Runtime": "4.0.0" + }, + "compile": { + "ref/dotnet/System.Resources.ResourceManager.dll": {} + } + }, + "System.Runtime/4.0.21-beta-23225": { + "type": "package", + "compile": { + "ref/dotnet/System.Runtime.dll": {} + } + }, + "System.Runtime.Extensions/4.0.10": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.20" + }, + "compile": { + "ref/dotnet/System.Runtime.Extensions.dll": {} + } + }, + "System.Runtime.Handles/4.0.0": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.0" + }, + "compile": { + "ref/dotnet/System.Runtime.Handles.dll": {} + } + }, + "System.Runtime.InteropServices/4.0.20": { + "type": "package", + "dependencies": { + "System.Reflection": "4.0.0", + "System.Reflection.Primitives": "4.0.0", + "System.Runtime": "4.0.0", + "System.Runtime.Handles": "4.0.0" + }, + "compile": { + "ref/dotnet/System.Runtime.InteropServices.dll": {} + } + }, + "System.Runtime.Serialization.Primitives/4.0.10-beta-23109": { + "type": "package", + "dependencies": { + "System.Resources.ResourceManager": "4.0.0-beta-23109", + "System.Runtime": "4.0.20-beta-23109" + }, + "compile": { + "ref/dotnet/System.Runtime.Serialization.Primitives.dll": {} + }, + "runtime": { + "lib/dotnet/System.Runtime.Serialization.Primitives.dll": {} + } + }, + "System.Security.Cryptography.Algorithms/4.0.0-beta-23225": { + "type": "package", + "dependencies": { + "System.IO": "4.0.0", + "System.Runtime": "4.0.0", + "System.Security.Cryptography.Primitives": "4.0.0-beta-23225" + }, + "compile": { + "ref/dotnet/System.Security.Cryptography.Algorithms.dll": {} + } + }, + "System.Security.Cryptography.Encoding/4.0.0-beta-23225": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.0" + }, + "compile": { + "ref/dotnet/System.Security.Cryptography.Encoding.dll": {} + } + }, + "System.Security.Cryptography.Primitives/4.0.0-beta-23225": { + "type": "package", + "dependencies": { + "System.IO": "4.0.0", + "System.Runtime": "4.0.0", + "System.Threading.Tasks": "4.0.0" + }, + "compile": { + "ref/dotnet/System.Security.Cryptography.Primitives.dll": {} + } + }, + "System.Security.Cryptography.X509Certificates/4.0.0-beta-23225": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.0", + "System.Runtime.Handles": "4.0.0", + "System.Security.Cryptography.Algorithms": "4.0.0-beta-23225", + "System.Security.Cryptography.Encoding": "4.0.0-beta-23225" + }, + "compile": { + "ref/dotnet/System.Security.Cryptography.X509Certificates.dll": {} + } + }, + "System.Text.Encoding/4.0.0": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.0" + }, + "compile": { + "ref/dotnet/System.Text.Encoding.dll": {} + } + }, + "System.Text.Encoding.Extensions/4.0.0": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.0", + "System.Text.Encoding": "4.0.0" + }, + "compile": { + "ref/dotnet/System.Text.Encoding.Extensions.dll": {} + } + }, + "System.Text.RegularExpressions/4.0.10-beta-23109": { + "type": "package", + "dependencies": { + "System.Collections": "4.0.10-beta-23109", + "System.Globalization": "4.0.10-beta-23109", + "System.Resources.ResourceManager": "4.0.0-beta-23109", + "System.Runtime": "4.0.20-beta-23109", + "System.Runtime.Extensions": "4.0.10-beta-23109", + "System.Threading": "4.0.10-beta-23109" + }, + "compile": { + "ref/dotnet/System.Text.RegularExpressions.dll": {} + }, + "runtime": { + "lib/dotnet/System.Text.RegularExpressions.dll": {} + } + }, + "System.Threading/4.0.10": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.0", + "System.Threading.Tasks": "4.0.0" + }, + "compile": { + "ref/dotnet/System.Threading.dll": {} + } + }, + "System.Threading.Tasks/4.0.10-beta-23109": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.0-beta-23109" + }, + "compile": { + "ref/dotnet/System.Threading.Tasks.dll": {} + } + } + }, + ".NETPlatform,Version=v5.1/win7-x64": { + "Microsoft.CodeAnalysis/1.1.1": { + "type": "package", + "dependencies": { + "Microsoft.CodeAnalysis.CSharp.Workspaces": "[1.1.1]", + "Microsoft.CodeAnalysis.VisualBasic.Workspaces": "[1.1.1]" + } + }, + "Microsoft.CodeAnalysis.Analyzers/1.1.0": { + "type": "package" + }, + "Microsoft.CodeAnalysis.Common/1.1.1": { + "type": "package", + "dependencies": { + "Microsoft.CodeAnalysis.Analyzers": "1.1.0", + "System.Collections.Immutable": "1.1.37", + "System.Reflection.Metadata": "1.1.0" + }, + "compile": { + "lib/portable-net45+win8/Microsoft.CodeAnalysis.dll": {} + }, + "runtime": { + "lib/portable-net45+win8/Microsoft.CodeAnalysis.dll": {} + } + }, + "Microsoft.CodeAnalysis.CSharp/1.1.1": { + "type": "package", + "dependencies": { + "Microsoft.CodeAnalysis.Common": "[1.1.1]" + }, + "compile": { + "lib/portable-net45+win8/Microsoft.CodeAnalysis.CSharp.dll": {} + }, + "runtime": { + "lib/portable-net45+win8/Microsoft.CodeAnalysis.CSharp.dll": {} + } + }, + "Microsoft.CodeAnalysis.CSharp.Workspaces/1.1.1": { + "type": "package", + "dependencies": { + "Microsoft.CodeAnalysis.CSharp": "[1.1.1]", + "Microsoft.CodeAnalysis.Workspaces.Common": "[1.1.1]" + }, + "compile": { + "lib/portable-net45+win8/Microsoft.CodeAnalysis.CSharp.Workspaces.dll": {} + }, + "runtime": { + "lib/portable-net45+win8/Microsoft.CodeAnalysis.CSharp.Workspaces.dll": {} + } + }, + "Microsoft.CodeAnalysis.VisualBasic/1.1.1": { + "type": "package", + "dependencies": { + "Microsoft.CodeAnalysis.Common": "1.1.1" + }, + "compile": { + "lib/portable-net45+win8/Microsoft.CodeAnalysis.VisualBasic.dll": {} + }, + "runtime": { + "lib/portable-net45+win8/Microsoft.CodeAnalysis.VisualBasic.dll": {} + } + }, + "Microsoft.CodeAnalysis.VisualBasic.Workspaces/1.1.1": { + "type": "package", + "dependencies": { + "Microsoft.CodeAnalysis.VisualBasic": "[1.1.1]", + "Microsoft.CodeAnalysis.Workspaces.Common": "[1.1.1]" + }, + "compile": { + "lib/portable-net45+win8/Microsoft.CodeAnalysis.VisualBasic.Workspaces.dll": {} + }, + "runtime": { + "lib/portable-net45+win8/Microsoft.CodeAnalysis.VisualBasic.Workspaces.dll": {} + } + }, + "Microsoft.CodeAnalysis.Workspaces.Common/1.1.1": { + "type": "package", + "dependencies": { + "Microsoft.CodeAnalysis.Common": "[1.1.1]", + "Microsoft.Composition": "1.0.27" + }, + "compile": { + "lib/portable-net45+win8/Microsoft.CodeAnalysis.Workspaces.dll": {} + }, + "runtime": { + "lib/portable-net45+win8/Microsoft.CodeAnalysis.Workspaces.dll": {} + } + }, + "Microsoft.Composition/1.0.27": { + "type": "package", + "compile": { + "lib/portable-net45+win8+wp8+wpa81/System.Composition.AttributedModel.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.Convention.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.Hosting.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.Runtime.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.TypedParts.dll": {} + }, + "runtime": { + "lib/portable-net45+win8+wp8+wpa81/System.Composition.AttributedModel.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.Convention.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.Hosting.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.Runtime.dll": {}, + "lib/portable-net45+win8+wp8+wpa81/System.Composition.TypedParts.dll": {} + } + }, + "Microsoft.CSharp/4.0.1-beta-23409": { + "type": "package", + "dependencies": { + "System.Collections": "4.0.10", + "System.Diagnostics.Debug": "4.0.10", + "System.Dynamic.Runtime": "4.0.0", + "System.Globalization": "4.0.10", + "System.Linq": "4.0.0", + "System.Linq.Expressions": "4.0.0", + "System.ObjectModel": "4.0.10", + "System.Reflection": "4.0.10", + "System.Reflection.Extensions": "4.0.0", + "System.Reflection.Primitives": "4.0.0", + "System.Reflection.TypeExtensions": "4.0.0", + "System.Resources.ResourceManager": "4.0.0", + "System.Runtime": "4.0.20", + "System.Runtime.Extensions": "4.0.10", + "System.Runtime.InteropServices": "4.0.20", + "System.Threading": "4.0.10" + }, + "compile": { + "ref/dotnet/Microsoft.CSharp.dll": {} + }, + "runtime": { + "lib/dotnet/Microsoft.CSharp.dll": {} + } + }, + "Newtonsoft.Json/8.0.2": { + "type": "package", + "compile": { + "lib/portable-net45+wp80+win8+wpa81+dnxcore50/Newtonsoft.Json.dll": {} + }, + "runtime": { + "lib/portable-net45+wp80+win8+wpa81+dnxcore50/Newtonsoft.Json.dll": {} + } + }, + "System.Collections/4.0.11-beta-23225": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.0" + }, + "compile": { + "ref/dotnet/System.Collections.dll": {} + } + }, + "System.Collections.Concurrent/4.0.10-beta-23109": { + "type": "package", + "dependencies": { + "System.Collections": "4.0.10-beta-23109", + "System.Diagnostics.Debug": "4.0.10-beta-23109", + "System.Diagnostics.Tracing": "4.0.20-beta-23109", + "System.Globalization": "4.0.10-beta-23109", + "System.Resources.ResourceManager": "4.0.0-beta-23109", + "System.Runtime": "4.0.20-beta-23109", + "System.Runtime.Extensions": "4.0.10-beta-23109", + "System.Threading": "4.0.10-beta-23109", + "System.Threading.Tasks": "4.0.10-beta-23109" + }, + "compile": { + "ref/dotnet/System.Collections.Concurrent.dll": {} + }, + "runtime": { + "lib/dotnet/System.Collections.Concurrent.dll": {} + } + }, + "System.Collections.Immutable/1.1.37": { + "type": "package", + "dependencies": { + "System.Collections": "4.0.0", + "System.Diagnostics.Debug": "4.0.0", + "System.Globalization": "4.0.0", + "System.Linq": "4.0.0", + "System.Resources.ResourceManager": "4.0.0", + "System.Runtime": "4.0.0", + "System.Runtime.Extensions": "4.0.0", + "System.Threading": "4.0.0" + }, + "compile": { + "lib/dotnet/System.Collections.Immutable.dll": {} + }, + "runtime": { + "lib/dotnet/System.Collections.Immutable.dll": {} + } + }, + "System.Collections.NonGeneric/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.Diagnostics.Debug": "4.0.10-beta-23109", + "System.Globalization": "4.0.10-beta-23109", + "System.Resources.ResourceManager": "4.0.0-beta-23109", + "System.Runtime": "4.0.20-beta-23109", + "System.Runtime.Extensions": "4.0.10-beta-23109", + "System.Threading": "4.0.10-beta-23109" + }, + "compile": { + "ref/dotnet/System.Collections.NonGeneric.dll": {} + }, + "runtime": { + "lib/dotnet/System.Collections.NonGeneric.dll": {} + } + }, + "System.Collections.Specialized/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.Collections.NonGeneric": "4.0.0-beta-23109", + "System.Globalization": "4.0.10-beta-23109", + "System.Globalization.Extensions": "4.0.0-beta-23109", + "System.Resources.ResourceManager": "4.0.0-beta-23109", + "System.Runtime": "4.0.20-beta-23109", + "System.Runtime.Extensions": "4.0.10-beta-23109", + "System.Threading": "4.0.10-beta-23109" + }, + "compile": { + "ref/dotnet/System.Collections.Specialized.dll": {} + }, + "runtime": { + "lib/dotnet/System.Collections.Specialized.dll": {} + } + }, + "System.ComponentModel/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.20-beta-23109" + }, + "compile": { + "ref/dotnet/System.ComponentModel.dll": {} + }, + "runtime": { + "lib/dotnet/System.ComponentModel.dll": {} + } + }, + "System.ComponentModel.Primitives/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.ComponentModel": "4.0.0-beta-23109", + "System.Runtime": "4.0.20-beta-23109" + }, + "compile": { + "ref/dotnet/System.ComponentModel.Primitives.dll": {} + }, + "runtime": { + "lib/dotnet/System.ComponentModel.Primitives.dll": {} + } + }, + "System.ComponentModel.TypeConverter/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.Collections": "4.0.10-beta-23109", + "System.ComponentModel": "4.0.0-beta-23109", + "System.ComponentModel.Primitives": "4.0.0-beta-23109", + "System.Globalization": "4.0.10-beta-23109", + "System.Reflection": "4.0.10-beta-23109", + "System.Reflection.Extensions": "4.0.0-beta-23109", + "System.Reflection.Primitives": "4.0.0-beta-23109", + "System.Resources.ResourceManager": "4.0.0-beta-23109", + "System.Runtime": "4.0.20-beta-23109", + "System.Runtime.Extensions": "4.0.10-beta-23109", + "System.Threading": "4.0.10-beta-23109" + }, + "compile": { + "ref/dotnet/System.ComponentModel.TypeConverter.dll": {} + }, + "runtime": { + "lib/dotnet/System.ComponentModel.TypeConverter.dll": {} + } + }, + "System.Diagnostics.Debug/4.0.10": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.0" + }, + "compile": { + "ref/dotnet/System.Diagnostics.Debug.dll": {} + } + }, + "System.Diagnostics.Tracing/4.0.20-beta-23109": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.0-beta-23109" + }, + "compile": { + "ref/dotnet/System.Diagnostics.Tracing.dll": {} + } + }, + "System.Dynamic.Runtime/4.0.11-beta-23225": { + "type": "package", + "dependencies": { + "System.Linq.Expressions": "4.0.10", + "System.ObjectModel": "4.0.0", + "System.Reflection": "4.0.0", + "System.Runtime": "4.0.20" + }, + "compile": { + "ref/dotnet/System.Dynamic.Runtime.dll": {} + } + }, + "System.Globalization/4.0.10": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.0" + }, + "compile": { + "ref/dotnet/System.Globalization.dll": {} + } + }, + "System.Globalization.Extensions/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.Globalization": "4.0.10-beta-23109", + "System.Resources.ResourceManager": "4.0.0-beta-23109", + "System.Runtime": "4.0.20-beta-23109", + "System.Runtime.Extensions": "4.0.10-beta-23109", + "System.Runtime.InteropServices": "4.0.20-beta-23109" + }, + "compile": { + "ref/dotnet/System.Globalization.Extensions.dll": {} + }, + "runtime": { + "lib/dotnet/System.Globalization.Extensions.dll": {} + } + }, + "System.IO/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.0-beta-23109", + "System.Text.Encoding": "4.0.0-beta-23109", + "System.Threading.Tasks": "4.0.0-beta-23109" + }, + "compile": { + "ref/dotnet/System.IO.dll": {} + } + }, + "System.IO.Compression/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.Collections": "4.0.0-beta-23109", + "System.IO": "4.0.0-beta-23109", + "System.Resources.ResourceManager": "4.0.0-beta-23109", + "System.Runtime": "4.0.0-beta-23109", + "System.Runtime.Extensions": "4.0.0-beta-23109", + "System.Runtime.InteropServices": "4.0.0-beta-23109", + "System.Text.Encoding": "4.0.0-beta-23109", + "System.Threading": "4.0.0-beta-23109", + "System.Threading.Tasks": "4.0.0-beta-23109" + }, + "compile": { + "ref/dotnet/System.IO.Compression.dll": {} + }, + "runtime": { + "lib/dotnet/System.IO.Compression.dll": {} + } + }, + "System.IO.Compression.clrcompression-x64/4.0.0-beta-23109": { + "type": "package", + "native": { + "runtimes/win7-x64/native/clrcompression.dll": {} + } + }, + "System.IO.FileSystem/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.IO": "4.0.0-beta-23109", + "System.IO.FileSystem.Primitives": "4.0.0-beta-23109", + "System.Runtime": "4.0.0-beta-23109", + "System.Runtime.Handles": "4.0.0-beta-23109", + "System.Text.Encoding": "4.0.0-beta-23109", + "System.Threading.Tasks": "4.0.0-beta-23109" + }, + "compile": { + "ref/dotnet/System.IO.FileSystem.dll": {} + } + }, + "System.IO.FileSystem.Primitives/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.20-beta-23109" + }, + "compile": { + "ref/dotnet/System.IO.FileSystem.Primitives.dll": {} + }, + "runtime": { + "lib/dotnet/System.IO.FileSystem.Primitives.dll": {} + } + }, + "System.Linq/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.Collections": "4.0.10-beta-23109", + "System.Diagnostics.Debug": "4.0.10-beta-23109", + "System.Resources.ResourceManager": "4.0.0-beta-23109", + "System.Runtime": "4.0.20-beta-23109", + "System.Runtime.Extensions": "4.0.10-beta-23109" + }, + "compile": { + "ref/dotnet/System.Linq.dll": {} + }, + "runtime": { + "lib/dotnet/System.Linq.dll": {} + } + }, + "System.Linq.Expressions/4.0.10-beta-23109": { + "type": "package", + "dependencies": { + "System.Reflection": "4.0.0-beta-23109", + "System.Runtime": "4.0.0-beta-23109" + }, + "compile": { + "ref/dotnet/System.Linq.Expressions.dll": {} + } + }, + "System.Net.Http/4.0.1-beta-23225": { + "type": "package", + "dependencies": { + "System.IO": "4.0.0", + "System.Net.Primitives": "4.0.0", + "System.Runtime": "4.0.0", + "System.Text.Encoding": "4.0.0", + "System.Threading.Tasks": "4.0.0" + }, + "compile": { + "ref/dotnet/System.Net.Http.dll": {} + } + }, + "System.Net.Primitives/4.0.0": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.0" + }, + "compile": { + "ref/dotnet/System.Net.Primitives.dll": {} + } + }, + "System.ObjectModel/4.0.10": { + "type": "package", + "dependencies": { + "System.Collections": "4.0.10", + "System.Diagnostics.Debug": "4.0.10", + "System.Resources.ResourceManager": "4.0.0", + "System.Runtime": "4.0.20", + "System.Threading": "4.0.10" + }, + "compile": { + "ref/dotnet/System.ObjectModel.dll": {} + }, + "runtime": { + "lib/dotnet/System.ObjectModel.dll": {} + } + }, + "System.Reflection/4.1.0-beta-23225": { + "type": "package", + "dependencies": { + "System.IO": "4.0.0", + "System.Reflection.Primitives": "4.0.0", + "System.Runtime": "4.0.20" + }, + "compile": { + "ref/dotnet/System.Reflection.dll": {} + } + }, + "System.Reflection.Emit.ILGeneration/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.Reflection": "4.0.0-beta-23109", + "System.Reflection.Primitives": "4.0.0-beta-23109", + "System.Runtime": "4.0.0-beta-23109" + }, + "compile": { + "ref/dotnet/System.Reflection.Emit.ILGeneration.dll": {} + } + }, + "System.Reflection.Emit.Lightweight/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.Reflection": "4.0.0-beta-23109", + "System.Reflection.Emit.ILGeneration": "4.0.0-beta-23109", + "System.Reflection.Primitives": "4.0.0-beta-23109", + "System.Runtime": "4.0.0-beta-23109" + }, + "compile": { + "ref/dotnet/System.Reflection.Emit.Lightweight.dll": {} + } + }, + "System.Reflection.Extensions/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.Reflection": "4.0.0-beta-23109", + "System.Runtime": "4.0.0-beta-23109" + }, + "compile": { + "ref/dotnet/System.Reflection.Extensions.dll": {} + } + }, + "System.Reflection.Metadata/1.1.0-alpha-00009": { + "type": "package", + "dependencies": { + "System.Collections": "4.0.0", + "System.Collections.Immutable": "1.1.36", + "System.Diagnostics.Debug": "4.0.0", + "System.IO": "4.0.0", + "System.Reflection": "4.0.0", + "System.Reflection.Extensions": "4.0.0", + "System.Reflection.Primitives": "4.0.0", + "System.Resources.ResourceManager": "4.0.0", + "System.Runtime": "4.0.0", + "System.Runtime.Extensions": "4.0.0", + "System.Runtime.InteropServices": "4.0.0", + "System.Text.Encoding": "4.0.0", + "System.Text.Encoding.Extensions": "4.0.0", + "System.Threading": "4.0.0" + }, + "compile": { + "lib/dotnet/System.Reflection.Metadata.dll": {} + }, + "runtime": { + "lib/dotnet/System.Reflection.Metadata.dll": {} + } + }, + "System.Reflection.Primitives/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.0-beta-23109" + }, + "compile": { + "ref/dotnet/System.Reflection.Primitives.dll": {} + } + }, + "System.Reflection.TypeExtensions/4.0.0-beta-23109": { + "type": "package", + "dependencies": { + "System.Reflection": "4.0.0-beta-23109", + "System.Runtime": "4.0.0-beta-23109" + }, + "compile": { + "ref/dotnet/System.Reflection.TypeExtensions.dll": {} + } + }, + "System.Resources.ResourceManager/4.0.0": { + "type": "package", + "dependencies": { + "System.Globalization": "4.0.0", + "System.Reflection": "4.0.0", + "System.Runtime": "4.0.0" + }, + "compile": { + "ref/dotnet/System.Resources.ResourceManager.dll": {} + } + }, + "System.Runtime/4.0.21-beta-23225": { + "type": "package", + "compile": { + "ref/dotnet/System.Runtime.dll": {} + } + }, + "System.Runtime.Extensions/4.0.10": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.20" + }, + "compile": { + "ref/dotnet/System.Runtime.Extensions.dll": {} + } + }, + "System.Runtime.Handles/4.0.0": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.0" + }, + "compile": { + "ref/dotnet/System.Runtime.Handles.dll": {} + } + }, + "System.Runtime.InteropServices/4.0.20": { + "type": "package", + "dependencies": { + "System.Reflection": "4.0.0", + "System.Reflection.Primitives": "4.0.0", + "System.Runtime": "4.0.0", + "System.Runtime.Handles": "4.0.0" + }, + "compile": { + "ref/dotnet/System.Runtime.InteropServices.dll": {} + } + }, + "System.Runtime.Serialization.Primitives/4.0.10-beta-23109": { + "type": "package", + "dependencies": { + "System.Resources.ResourceManager": "4.0.0-beta-23109", + "System.Runtime": "4.0.20-beta-23109" + }, + "compile": { + "ref/dotnet/System.Runtime.Serialization.Primitives.dll": {} + }, + "runtime": { + "lib/dotnet/System.Runtime.Serialization.Primitives.dll": {} + } + }, + "System.Security.Cryptography.Algorithms/4.0.0-beta-23225": { + "type": "package", + "dependencies": { + "System.IO": "4.0.0", + "System.Runtime": "4.0.0", + "System.Security.Cryptography.Primitives": "4.0.0-beta-23225" + }, + "compile": { + "ref/dotnet/System.Security.Cryptography.Algorithms.dll": {} + } + }, + "System.Security.Cryptography.Encoding/4.0.0-beta-23225": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.0" + }, + "compile": { + "ref/dotnet/System.Security.Cryptography.Encoding.dll": {} + } + }, + "System.Security.Cryptography.Primitives/4.0.0-beta-23225": { + "type": "package", + "dependencies": { + "System.IO": "4.0.0", + "System.Runtime": "4.0.0", + "System.Threading.Tasks": "4.0.0" + }, + "compile": { + "ref/dotnet/System.Security.Cryptography.Primitives.dll": {} + } + }, + "System.Security.Cryptography.X509Certificates/4.0.0-beta-23225": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.0", + "System.Runtime.Handles": "4.0.0", + "System.Security.Cryptography.Algorithms": "4.0.0-beta-23225", + "System.Security.Cryptography.Encoding": "4.0.0-beta-23225" + }, + "compile": { + "ref/dotnet/System.Security.Cryptography.X509Certificates.dll": {} + } + }, + "System.Text.Encoding/4.0.0": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.0" + }, + "compile": { + "ref/dotnet/System.Text.Encoding.dll": {} + } + }, + "System.Text.Encoding.Extensions/4.0.0": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.0", + "System.Text.Encoding": "4.0.0" + }, + "compile": { + "ref/dotnet/System.Text.Encoding.Extensions.dll": {} + } + }, + "System.Text.RegularExpressions/4.0.10-beta-23109": { + "type": "package", + "dependencies": { + "System.Collections": "4.0.10-beta-23109", + "System.Globalization": "4.0.10-beta-23109", + "System.Resources.ResourceManager": "4.0.0-beta-23109", + "System.Runtime": "4.0.20-beta-23109", + "System.Runtime.Extensions": "4.0.10-beta-23109", + "System.Threading": "4.0.10-beta-23109" + }, + "compile": { + "ref/dotnet/System.Text.RegularExpressions.dll": {} + }, + "runtime": { + "lib/dotnet/System.Text.RegularExpressions.dll": {} + } + }, + "System.Threading/4.0.10": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.0", + "System.Threading.Tasks": "4.0.0" + }, + "compile": { + "ref/dotnet/System.Threading.dll": {} + } + }, + "System.Threading.Tasks/4.0.10-beta-23109": { + "type": "package", + "dependencies": { + "System.Runtime": "4.0.0-beta-23109" + }, + "compile": { + "ref/dotnet/System.Threading.Tasks.dll": {} + } + } + } + }, + "libraries": { + "AsciiDoc/1.0.0": { + "type": "project", + "path": "../../wrap/AsciiDoc/project.json" + }, + "Microsoft.CodeAnalysis/1.1.1": { + "type": "package", + "sha512": "X+UK+Ev5rQOCkb1HgMdUz47VK/qUvCe8FyObh3McgfIi+j2VXDsQq0YSPxqcKyQd72UmY0ttYDGLsA/ee0oC6w==", + "files": [ + "Microsoft.CodeAnalysis.1.1.1.nupkg", + "Microsoft.CodeAnalysis.1.1.1.nupkg.sha512", + "Microsoft.CodeAnalysis.nuspec", + "ThirdPartyNotices.rtf" + ] + }, + "Microsoft.CodeAnalysis.Analyzers/1.1.0": { + "type": "package", + "sha512": "HS3iRWZKcUw/8eZ/08GXKY2Bn7xNzQPzf8gRPHGSowX7u7XXu9i9YEaBeBNKUXWfI7qjvT2zXtLUvbN0hds8vg==", + "files": [ + "analyzers/dotnet/cs/Microsoft.CodeAnalysis.Analyzers.dll", + "analyzers/dotnet/cs/Microsoft.CodeAnalysis.CSharp.Analyzers.dll", + "analyzers/dotnet/vb/Microsoft.CodeAnalysis.Analyzers.dll", + "analyzers/dotnet/vb/Microsoft.CodeAnalysis.VisualBasic.Analyzers.dll", + "Microsoft.CodeAnalysis.Analyzers.1.1.0.nupkg", + "Microsoft.CodeAnalysis.Analyzers.1.1.0.nupkg.sha512", + "Microsoft.CodeAnalysis.Analyzers.nuspec", + "ThirdPartyNotices.rtf", + "tools/install.ps1", + "tools/uninstall.ps1" + ] + }, + "Microsoft.CodeAnalysis.Common/1.1.1": { + "type": "package", + "sha512": "r958nYjnl//vgjLljJLu52N2FJKlKYF5pqzyXM/C6K0w8uMcKIkJS4RXygqRBhW7ZjlsJXfiEX/JxLeDxMQUWQ==", + "files": [ + "lib/net45/Microsoft.CodeAnalysis.dll", + "lib/net45/Microsoft.CodeAnalysis.xml", + "lib/portable-net45+win8/Microsoft.CodeAnalysis.dll", + "lib/portable-net45+win8/Microsoft.CodeAnalysis.xml", + "Microsoft.CodeAnalysis.Common.1.1.1.nupkg", + "Microsoft.CodeAnalysis.Common.1.1.1.nupkg.sha512", + "Microsoft.CodeAnalysis.Common.nuspec", + "ThirdPartyNotices.rtf" + ] + }, + "Microsoft.CodeAnalysis.CSharp/1.1.1": { + "type": "package", + "sha512": "HH8f/KK4fjTWNaKhHPgjF1Hm7lGMEB6A3DK+CUzW9ZbudZTFdNwb3Oa4qDZ25HWF+ifImSdu+1bLgqKCWRbsNQ==", + "files": [ + "lib/net45/Microsoft.CodeAnalysis.CSharp.dll", + "lib/net45/Microsoft.CodeAnalysis.CSharp.xml", + "lib/portable-net45+win8/Microsoft.CodeAnalysis.CSharp.dll", + "lib/portable-net45+win8/Microsoft.CodeAnalysis.CSharp.xml", + "Microsoft.CodeAnalysis.CSharp.1.1.1.nupkg", + "Microsoft.CodeAnalysis.CSharp.1.1.1.nupkg.sha512", + "Microsoft.CodeAnalysis.CSharp.nuspec", + "ThirdPartyNotices.rtf" + ] + }, + "Microsoft.CodeAnalysis.CSharp.Workspaces/1.1.1": { + "type": "package", + "sha512": "8qaTrwyJYNIXO4K4XWXb2GqfqWCOPvr50iNMUjMCfovm+wwFBcLFyDN+B37n+qpeku4NoX+3qGRh5i3pxSD17Q==", + "files": [ + "lib/net45/Microsoft.CodeAnalysis.CSharp.Workspaces.dll", + "lib/net45/Microsoft.CodeAnalysis.CSharp.Workspaces.xml", + "lib/portable-net45+win8/Microsoft.CodeAnalysis.CSharp.Workspaces.dll", + "lib/portable-net45+win8/Microsoft.CodeAnalysis.CSharp.Workspaces.xml", + "Microsoft.CodeAnalysis.CSharp.Workspaces.1.1.1.nupkg", + "Microsoft.CodeAnalysis.CSharp.Workspaces.1.1.1.nupkg.sha512", + "Microsoft.CodeAnalysis.CSharp.Workspaces.nuspec", + "ThirdPartyNotices.rtf" + ] + }, + "Microsoft.CodeAnalysis.VisualBasic/1.1.1": { + "type": "package", + "sha512": "QFOiWdSHCJwy1usS/7Ka+VAAhtXeEwQCDOd1iKGllGgcJuSiwtLEchxfOkXKsFtNq0P03mVEDRfUxc+6XHR60A==", + "files": [ + "lib/net45/Microsoft.CodeAnalysis.VisualBasic.dll", + "lib/net45/Microsoft.CodeAnalysis.VisualBasic.xml", + "lib/portable-net45+win8/Microsoft.CodeAnalysis.VisualBasic.dll", + "lib/portable-net45+win8/Microsoft.CodeAnalysis.VisualBasic.xml", + "Microsoft.CodeAnalysis.VisualBasic.1.1.1.nupkg", + "Microsoft.CodeAnalysis.VisualBasic.1.1.1.nupkg.sha512", + "Microsoft.CodeAnalysis.VisualBasic.nuspec", + "ThirdPartyNotices.rtf" + ] + }, + "Microsoft.CodeAnalysis.VisualBasic.Workspaces/1.1.1": { + "type": "package", + "sha512": "CahYDtBMECLXIr5ClViDFfHKUGkb1kuRPYGCUQW53fdu5b39kkEgvm6tCppViQKV0KOOmYACmyT53xvoYYZ2Tw==", + "files": [ + "lib/net45/Microsoft.CodeAnalysis.VisualBasic.Workspaces.dll", + "lib/net45/Microsoft.CodeAnalysis.VisualBasic.Workspaces.xml", + "lib/portable-net45+win8/Microsoft.CodeAnalysis.VisualBasic.Workspaces.dll", + "lib/portable-net45+win8/Microsoft.CodeAnalysis.VisualBasic.Workspaces.xml", + "Microsoft.CodeAnalysis.VisualBasic.Workspaces.1.1.1.nupkg", + "Microsoft.CodeAnalysis.VisualBasic.Workspaces.1.1.1.nupkg.sha512", + "Microsoft.CodeAnalysis.VisualBasic.Workspaces.nuspec", + "ThirdPartyNotices.rtf" + ] + }, + "Microsoft.CodeAnalysis.Workspaces.Common/1.1.1": { + "type": "package", + "sha512": "IRdrJK6VK+h+0pgYXAWBTbeg9wiDv3BauHsZpKcImAOwbezHkeXi9CzYqMXV/Ll+knnctKxDou7OfxpTo1dRgQ==", + "files": [ + "lib/net45/Microsoft.CodeAnalysis.Workspaces.Desktop.dll", + "lib/net45/Microsoft.CodeAnalysis.Workspaces.Desktop.xml", + "lib/net45/Microsoft.CodeAnalysis.Workspaces.dll", + "lib/net45/Microsoft.CodeAnalysis.Workspaces.xml", + "lib/portable-net45+win8/Microsoft.CodeAnalysis.Workspaces.dll", + "lib/portable-net45+win8/Microsoft.CodeAnalysis.Workspaces.xml", + "Microsoft.CodeAnalysis.Workspaces.Common.1.1.1.nupkg", + "Microsoft.CodeAnalysis.Workspaces.Common.1.1.1.nupkg.sha512", + "Microsoft.CodeAnalysis.Workspaces.Common.nuspec", + "ThirdPartyNotices.rtf" + ] + }, + "Microsoft.Composition/1.0.27": { + "type": "package", + "sha512": "pwu80Ohe7SBzZ6i69LVdzowp6V+LaVRzd5F7A6QlD42vQkX0oT7KXKWWPlM/S00w1gnMQMRnEdbtOV12z6rXdQ==", + "files": [ + "lib/portable-net45+win8+wp8+wpa81/System.Composition.AttributedModel.dll", + "lib/portable-net45+win8+wp8+wpa81/System.Composition.AttributedModel.XML", + "lib/portable-net45+win8+wp8+wpa81/System.Composition.Convention.dll", + "lib/portable-net45+win8+wp8+wpa81/System.Composition.Convention.xml", + "lib/portable-net45+win8+wp8+wpa81/System.Composition.Hosting.dll", + "lib/portable-net45+win8+wp8+wpa81/System.Composition.Hosting.XML", + "lib/portable-net45+win8+wp8+wpa81/System.Composition.Runtime.dll", + "lib/portable-net45+win8+wp8+wpa81/System.Composition.Runtime.XML", + "lib/portable-net45+win8+wp8+wpa81/System.Composition.TypedParts.dll", + "lib/portable-net45+win8+wp8+wpa81/System.Composition.TypedParts.XML", + "License-Stable.rtf", + "Microsoft.Composition.1.0.27.nupkg", + "Microsoft.Composition.1.0.27.nupkg.sha512", + "Microsoft.Composition.nuspec" + ] + }, + "Microsoft.CSharp/4.0.1-beta-23409": { + "type": "package", + "serviceable": true, + "sha512": "I1jsSsyK89VfNebrnx2eiBD5YT6zp+DcX2v6AxZ/IosS38QYmA9YKVmssMd5yhRkXwr1f8MfgZTxF1Cli90JEQ==", + "files": [ + "lib/dotnet/de/Microsoft.CSharp.xml", + "lib/dotnet/es/Microsoft.CSharp.xml", + "lib/dotnet/fr/Microsoft.CSharp.xml", + "lib/dotnet/it/Microsoft.CSharp.xml", + "lib/dotnet/ja/Microsoft.CSharp.xml", + "lib/dotnet/ko/Microsoft.CSharp.xml", + "lib/dotnet/Microsoft.CSharp.dll", + "lib/dotnet/Microsoft.CSharp.xml", + "lib/dotnet/ru/Microsoft.CSharp.xml", + "lib/dotnet/zh-hans/Microsoft.CSharp.xml", + "lib/dotnet/zh-hant/Microsoft.CSharp.xml", + "lib/MonoAndroid10/_._", + "lib/MonoTouch10/_._", + "lib/net45/_._", + "lib/netcore50/Microsoft.CSharp.dll", + "lib/netcore50/Microsoft.CSharp.xml", + "lib/win8/_._", + "lib/wp80/_._", + "lib/wpa81/_._", + "lib/xamarinios10/_._", + "lib/xamarinmac20/_._", + "Microsoft.CSharp.4.0.1-beta-23409.nupkg", + "Microsoft.CSharp.4.0.1-beta-23409.nupkg.sha512", + "Microsoft.CSharp.nuspec", + "ref/dotnet/Microsoft.CSharp.dll", + "ref/MonoAndroid10/_._", + "ref/MonoTouch10/_._", + "ref/net45/_._", + "ref/netcore50/Microsoft.CSharp.dll", + "ref/win8/_._", + "ref/wp80/_._", + "ref/wpa81/_._", + "ref/xamarinios10/_._", + "ref/xamarinmac20/_._" + ] + }, + "Newtonsoft.Json/8.0.2": { + "type": "package", + "sha512": "e5yWmEfu68rmtG431zl9N/7PlNKQDIuiDW5MHlEFAZcecakcxrIGnKqrPAtWNILzK2oNanRB5cD150MYhECK3g==", + "files": [ + "lib/net20/Newtonsoft.Json.dll", + "lib/net20/Newtonsoft.Json.xml", + "lib/net35/Newtonsoft.Json.dll", + "lib/net35/Newtonsoft.Json.xml", + "lib/net40/Newtonsoft.Json.dll", + "lib/net40/Newtonsoft.Json.xml", + "lib/net45/Newtonsoft.Json.dll", + "lib/net45/Newtonsoft.Json.xml", + "lib/portable-net40+sl5+wp80+win8+wpa81/Newtonsoft.Json.dll", + "lib/portable-net40+sl5+wp80+win8+wpa81/Newtonsoft.Json.xml", + "lib/portable-net45+wp80+win8+wpa81+dnxcore50/Newtonsoft.Json.dll", + "lib/portable-net45+wp80+win8+wpa81+dnxcore50/Newtonsoft.Json.xml", + "Newtonsoft.Json.8.0.2.nupkg", + "Newtonsoft.Json.8.0.2.nupkg.sha512", + "Newtonsoft.Json.nuspec", + "tools/install.ps1" + ] + }, + "System.Collections/4.0.0": { + "type": "package", + "sha512": "i2vsGDIEbWdHcUSNDPKZP/ZWod6o740el7mGTCy0dqbCxQh74W4QoC+klUwPEtGEFuvzJ7bJgvwJqscosVNyZQ==", + "files": [ + "lib/MonoAndroid10/_._", + "lib/MonoTouch10/_._", + "lib/net45/_._", + "lib/win8/_._", + "lib/wp80/_._", + "lib/wpa81/_._", + "lib/xamarinios10/_._", + "lib/xamarinmac20/_._", + "License.rtf", + "ref/dotnet/de/System.Collections.xml", + "ref/dotnet/es/System.Collections.xml", + "ref/dotnet/fr/System.Collections.xml", + "ref/dotnet/it/System.Collections.xml", + "ref/dotnet/ja/System.Collections.xml", + "ref/dotnet/ko/System.Collections.xml", + "ref/dotnet/ru/System.Collections.xml", + "ref/dotnet/System.Collections.dll", + "ref/dotnet/System.Collections.xml", + "ref/dotnet/zh-hans/System.Collections.xml", + "ref/dotnet/zh-hant/System.Collections.xml", + "ref/MonoAndroid10/_._", + "ref/MonoTouch10/_._", + "ref/net45/_._", + "ref/netcore50/de/System.Collections.xml", + "ref/netcore50/es/System.Collections.xml", + "ref/netcore50/fr/System.Collections.xml", + "ref/netcore50/it/System.Collections.xml", + "ref/netcore50/ja/System.Collections.xml", + "ref/netcore50/ko/System.Collections.xml", + "ref/netcore50/ru/System.Collections.xml", + "ref/netcore50/System.Collections.dll", + "ref/netcore50/System.Collections.xml", + "ref/netcore50/zh-hans/System.Collections.xml", + "ref/netcore50/zh-hant/System.Collections.xml", + "ref/win8/_._", + "ref/wp80/_._", + "ref/wpa81/_._", + "ref/xamarinios10/_._", + "ref/xamarinmac20/_._", + "System.Collections.4.0.0.nupkg", + "System.Collections.4.0.0.nupkg.sha512", + "System.Collections.nuspec" + ] + }, + "System.Collections/4.0.11-beta-23225": { + "type": "package", + "sha512": "5nxF7rLpel65n6081k8h3ptC6qlH+5WVauBCSlxwa6niB6TJeMP+yw8B2bK7IKDrcSv3EsrN7UTkfrqssxS4Eg==", + "files": [ + "lib/DNXCore50/System.Collections.dll", + "lib/MonoAndroid10/_._", + "lib/MonoTouch10/_._", + "lib/net46/_._", + "lib/netcore50/System.Collections.dll", + "lib/xamarinios10/_._", + "lib/xamarinmac20/_._", + "ref/dotnet/System.Collections.dll", + "ref/MonoAndroid10/_._", + "ref/MonoTouch10/_._", + "ref/net46/_._", + "ref/xamarinios10/_._", + "ref/xamarinmac20/_._", + "runtimes/win8-aot/lib/netcore50/System.Collections.dll", + "System.Collections.4.0.11-beta-23225.nupkg", + "System.Collections.4.0.11-beta-23225.nupkg.sha512", + "System.Collections.nuspec" + ] + }, + "System.Collections.Concurrent/4.0.10-beta-23109": { + "type": "package", + "sha512": "fRXR5y4dg9zPovFpSQZkc6gnyMNlQJVQ8lKqPDPFSIelhqWxv2VyY87s5Vd9ViGwKrFwekDwCQFug9ny8qCHuA==", + "files": [ + "lib/dotnet/System.Collections.Concurrent.dll", + "lib/net46/_._", + "ref/dotnet/de/System.Collections.Concurrent.xml", + "ref/dotnet/es/System.Collections.Concurrent.xml", + "ref/dotnet/fr/System.Collections.Concurrent.xml", + "ref/dotnet/it/System.Collections.Concurrent.xml", + "ref/dotnet/ja/System.Collections.Concurrent.xml", + "ref/dotnet/ko/System.Collections.Concurrent.xml", + "ref/dotnet/ru/System.Collections.Concurrent.xml", + "ref/dotnet/System.Collections.Concurrent.dll", + "ref/dotnet/System.Collections.Concurrent.xml", + "ref/dotnet/zh-hans/System.Collections.Concurrent.xml", + "ref/dotnet/zh-hant/System.Collections.Concurrent.xml", + "ref/net46/_._", + "System.Collections.Concurrent.4.0.10-beta-23109.nupkg", + "System.Collections.Concurrent.4.0.10-beta-23109.nupkg.sha512", + "System.Collections.Concurrent.nuspec" + ] + }, + "System.Collections.Immutable/1.1.37": { + "type": "package", + "sha512": "fTpqwZYBzoklTT+XjTRK8KxvmrGkYHzBiylCcKyQcxiOM8k+QvhNBxRvFHDWzy4OEP5f8/9n+xQ9mEgEXY+muA==", + "files": [ + "lib/dotnet/System.Collections.Immutable.dll", + "lib/dotnet/System.Collections.Immutable.xml", + "lib/portable-net45+win8+wp8+wpa81/System.Collections.Immutable.dll", + "lib/portable-net45+win8+wp8+wpa81/System.Collections.Immutable.xml", + "System.Collections.Immutable.1.1.37.nupkg", + "System.Collections.Immutable.1.1.37.nupkg.sha512", + "System.Collections.Immutable.nuspec" + ] + }, + "System.Collections.NonGeneric/4.0.0-beta-23109": { + "type": "package", + "sha512": "Z+VGG3YaL4PxuBn/Nbjul1OoaCBzvkylexTwIW6s8arj3c17XkCeJvZ1BIn66lzDpAQYMGZmv97UMT6OboRoVg==", + "files": [ + "lib/dotnet/System.Collections.NonGeneric.dll", + "lib/net46/System.Collections.NonGeneric.dll", + "ref/dotnet/de/System.Collections.NonGeneric.xml", + "ref/dotnet/es/System.Collections.NonGeneric.xml", + "ref/dotnet/fr/System.Collections.NonGeneric.xml", + "ref/dotnet/it/System.Collections.NonGeneric.xml", + "ref/dotnet/ja/System.Collections.NonGeneric.xml", + "ref/dotnet/ko/System.Collections.NonGeneric.xml", + "ref/dotnet/ru/System.Collections.NonGeneric.xml", + "ref/dotnet/System.Collections.NonGeneric.dll", + "ref/dotnet/System.Collections.NonGeneric.xml", + "ref/dotnet/zh-hans/System.Collections.NonGeneric.xml", + "ref/dotnet/zh-hant/System.Collections.NonGeneric.xml", + "ref/net46/System.Collections.NonGeneric.dll", + "System.Collections.NonGeneric.4.0.0-beta-23109.nupkg", + "System.Collections.NonGeneric.4.0.0-beta-23109.nupkg.sha512", + "System.Collections.NonGeneric.nuspec" + ] + }, + "System.Collections.Specialized/4.0.0-beta-23109": { + "type": "package", + "sha512": "sHMEvjfZcBO04aDflCBqbNSsrs7NgpkXO4gZN+yt80KMEQ3iWbeK5U8CbdtjDRePNZzoYf5K1mE1c+3RR03UGA==", + "files": [ + "lib/dotnet/System.Collections.Specialized.dll", + "lib/net46/System.Collections.Specialized.dll", + "ref/dotnet/de/System.Collections.Specialized.xml", + "ref/dotnet/es/System.Collections.Specialized.xml", + "ref/dotnet/fr/System.Collections.Specialized.xml", + "ref/dotnet/it/System.Collections.Specialized.xml", + "ref/dotnet/ja/System.Collections.Specialized.xml", + "ref/dotnet/ko/System.Collections.Specialized.xml", + "ref/dotnet/ru/System.Collections.Specialized.xml", + "ref/dotnet/System.Collections.Specialized.dll", + "ref/dotnet/System.Collections.Specialized.xml", + "ref/dotnet/zh-hans/System.Collections.Specialized.xml", + "ref/dotnet/zh-hant/System.Collections.Specialized.xml", + "ref/net46/System.Collections.Specialized.dll", + "System.Collections.Specialized.4.0.0-beta-23109.nupkg", + "System.Collections.Specialized.4.0.0-beta-23109.nupkg.sha512", + "System.Collections.Specialized.nuspec" + ] + }, + "System.ComponentModel/4.0.0-beta-23109": { + "type": "package", + "sha512": "JFzHUvX28tQbud0c9AP+SFeOroBG9x7tgc0OYu0Z5nQotnIiwP5nHXq9DoK3N2y0MoRUt8jy2FwkRMAYgiQuNQ==", + "files": [ + "lib/dotnet/System.ComponentModel.dll", + "lib/net45/_._", + "lib/netcore50/System.ComponentModel.dll", + "lib/win8/_._", + "lib/wp80/_._", + "lib/wpa81/_._", + "ref/dotnet/de/System.ComponentModel.xml", + "ref/dotnet/es/System.ComponentModel.xml", + "ref/dotnet/fr/System.ComponentModel.xml", + "ref/dotnet/it/System.ComponentModel.xml", + "ref/dotnet/ja/System.ComponentModel.xml", + "ref/dotnet/ko/System.ComponentModel.xml", + "ref/dotnet/ru/System.ComponentModel.xml", + "ref/dotnet/System.ComponentModel.dll", + "ref/dotnet/System.ComponentModel.xml", + "ref/dotnet/zh-hans/System.ComponentModel.xml", + "ref/dotnet/zh-hant/System.ComponentModel.xml", + "ref/net45/_._", + "ref/netcore50/System.ComponentModel.dll", + "ref/netcore50/System.ComponentModel.xml", + "ref/win8/_._", + "ref/wp80/_._", + "ref/wpa81/_._", + "System.ComponentModel.4.0.0-beta-23109.nupkg", + "System.ComponentModel.4.0.0-beta-23109.nupkg.sha512", + "System.ComponentModel.nuspec" + ] + }, + "System.ComponentModel.Primitives/4.0.0-beta-23109": { + "type": "package", + "sha512": "W4QlGNghutVgxOHvA0Snz8oLh79LlBebVowBe02gJavcZszrwRWv5wBWkJgBZopXc+XW8HQJg/wX+ZYrJAyg3A==", + "files": [ + "lib/dotnet/System.ComponentModel.Primitives.dll", + "lib/net46/System.ComponentModel.Primitives.dll", + "ref/dotnet/de/System.ComponentModel.Primitives.xml", + "ref/dotnet/es/System.ComponentModel.Primitives.xml", + "ref/dotnet/fr/System.ComponentModel.Primitives.xml", + "ref/dotnet/it/System.ComponentModel.Primitives.xml", + "ref/dotnet/ja/System.ComponentModel.Primitives.xml", + "ref/dotnet/ko/System.ComponentModel.Primitives.xml", + "ref/dotnet/ru/System.ComponentModel.Primitives.xml", + "ref/dotnet/System.ComponentModel.Primitives.dll", + "ref/dotnet/System.ComponentModel.Primitives.xml", + "ref/dotnet/zh-hans/System.ComponentModel.Primitives.xml", + "ref/dotnet/zh-hant/System.ComponentModel.Primitives.xml", + "ref/net46/System.ComponentModel.Primitives.dll", + "System.ComponentModel.Primitives.4.0.0-beta-23109.nupkg", + "System.ComponentModel.Primitives.4.0.0-beta-23109.nupkg.sha512", + "System.ComponentModel.Primitives.nuspec" + ] + }, + "System.ComponentModel.TypeConverter/4.0.0-beta-23109": { + "type": "package", + "sha512": "knpJr7XRZbn/JJZeiUzQvOvi018Jcf24XmimpGN5Za5z8WYdBBniscgdrzWNQMfRu99duPUmxFWHLThrj9Q8Bg==", + "files": [ + "lib/dotnet/System.ComponentModel.TypeConverter.dll", + "lib/net46/System.ComponentModel.TypeConverter.dll", + "ref/dotnet/de/System.ComponentModel.TypeConverter.xml", + "ref/dotnet/es/System.ComponentModel.TypeConverter.xml", + "ref/dotnet/fr/System.ComponentModel.TypeConverter.xml", + "ref/dotnet/it/System.ComponentModel.TypeConverter.xml", + "ref/dotnet/ja/System.ComponentModel.TypeConverter.xml", + "ref/dotnet/ko/System.ComponentModel.TypeConverter.xml", + "ref/dotnet/ru/System.ComponentModel.TypeConverter.xml", + "ref/dotnet/System.ComponentModel.TypeConverter.dll", + "ref/dotnet/System.ComponentModel.TypeConverter.xml", + "ref/dotnet/zh-hans/System.ComponentModel.TypeConverter.xml", + "ref/dotnet/zh-hant/System.ComponentModel.TypeConverter.xml", + "ref/net46/System.ComponentModel.TypeConverter.dll", + "System.ComponentModel.TypeConverter.4.0.0-beta-23109.nupkg", + "System.ComponentModel.TypeConverter.4.0.0-beta-23109.nupkg.sha512", + "System.ComponentModel.TypeConverter.nuspec" + ] + }, + "System.Diagnostics.Debug/4.0.0": { + "type": "package", + "sha512": "AYJsLLGDVTC/nyURjgAo7Lpye0+HuSkcQujUf+NgQVdC/C/ky5NyamQHCforHJzgqspitMMtBe8B4UBdGXy1zQ==", + "files": [ + "lib/MonoAndroid10/_._", + "lib/MonoTouch10/_._", + "lib/net45/_._", + "lib/win8/_._", + "lib/wp80/_._", + "lib/wpa81/_._", + "lib/xamarinios10/_._", + "lib/xamarinmac20/_._", + "License.rtf", + "ref/dotnet/de/System.Diagnostics.Debug.xml", + "ref/dotnet/es/System.Diagnostics.Debug.xml", + "ref/dotnet/fr/System.Diagnostics.Debug.xml", + "ref/dotnet/it/System.Diagnostics.Debug.xml", + "ref/dotnet/ja/System.Diagnostics.Debug.xml", + "ref/dotnet/ko/System.Diagnostics.Debug.xml", + "ref/dotnet/ru/System.Diagnostics.Debug.xml", + "ref/dotnet/System.Diagnostics.Debug.dll", + "ref/dotnet/System.Diagnostics.Debug.xml", + "ref/dotnet/zh-hans/System.Diagnostics.Debug.xml", + "ref/dotnet/zh-hant/System.Diagnostics.Debug.xml", + "ref/MonoAndroid10/_._", + "ref/MonoTouch10/_._", + "ref/net45/_._", + "ref/netcore50/de/System.Diagnostics.Debug.xml", + "ref/netcore50/es/System.Diagnostics.Debug.xml", + "ref/netcore50/fr/System.Diagnostics.Debug.xml", + "ref/netcore50/it/System.Diagnostics.Debug.xml", + "ref/netcore50/ja/System.Diagnostics.Debug.xml", + "ref/netcore50/ko/System.Diagnostics.Debug.xml", + "ref/netcore50/ru/System.Diagnostics.Debug.xml", + "ref/netcore50/System.Diagnostics.Debug.dll", + "ref/netcore50/System.Diagnostics.Debug.xml", + "ref/netcore50/zh-hans/System.Diagnostics.Debug.xml", + "ref/netcore50/zh-hant/System.Diagnostics.Debug.xml", + "ref/win8/_._", + "ref/wp80/_._", + "ref/wpa81/_._", + "ref/xamarinios10/_._", + "ref/xamarinmac20/_._", + "System.Diagnostics.Debug.4.0.0.nupkg", + "System.Diagnostics.Debug.4.0.0.nupkg.sha512", + "System.Diagnostics.Debug.nuspec" + ] + }, + "System.Diagnostics.Debug/4.0.10": { + "type": "package", + "sha512": "pi2KthuvI2LWV2c2V+fwReDsDiKpNl040h6DcwFOb59SafsPT/V1fCy0z66OKwysurJkBMmp5j5CBe3Um+ub0g==", + "files": [ + "lib/DNXCore50/System.Diagnostics.Debug.dll", + "lib/MonoAndroid10/_._", + "lib/MonoTouch10/_._", + "lib/net46/_._", + "lib/netcore50/System.Diagnostics.Debug.dll", + "lib/xamarinios10/_._", + "lib/xamarinmac20/_._", + "ref/dotnet/de/System.Diagnostics.Debug.xml", + "ref/dotnet/es/System.Diagnostics.Debug.xml", + "ref/dotnet/fr/System.Diagnostics.Debug.xml", + "ref/dotnet/it/System.Diagnostics.Debug.xml", + "ref/dotnet/ja/System.Diagnostics.Debug.xml", + "ref/dotnet/ko/System.Diagnostics.Debug.xml", + "ref/dotnet/ru/System.Diagnostics.Debug.xml", + "ref/dotnet/System.Diagnostics.Debug.dll", + "ref/dotnet/System.Diagnostics.Debug.xml", + "ref/dotnet/zh-hans/System.Diagnostics.Debug.xml", + "ref/dotnet/zh-hant/System.Diagnostics.Debug.xml", + "ref/MonoAndroid10/_._", + "ref/MonoTouch10/_._", + "ref/net46/_._", + "ref/xamarinios10/_._", + "ref/xamarinmac20/_._", + "runtimes/win8-aot/lib/netcore50/System.Diagnostics.Debug.dll", + "System.Diagnostics.Debug.4.0.10.nupkg", + "System.Diagnostics.Debug.4.0.10.nupkg.sha512", + "System.Diagnostics.Debug.nuspec" + ] + }, + "System.Diagnostics.Tracing/4.0.20-beta-23109": { + "type": "package", + "sha512": "sNauLSBFewFLjULHeplEr7FCmmfPbcc1jfz1Mynjy445bYiP/IW1q9X14a//oV/uQG7p82S9qHyfPxoxmt3pxQ==", + "files": [ + "lib/DNXCore50/System.Diagnostics.Tracing.dll", + "lib/net46/_._", + "lib/netcore50/System.Diagnostics.Tracing.dll", + "ref/dotnet/de/System.Diagnostics.Tracing.xml", + "ref/dotnet/es/System.Diagnostics.Tracing.xml", + "ref/dotnet/fr/System.Diagnostics.Tracing.xml", + "ref/dotnet/it/System.Diagnostics.Tracing.xml", + "ref/dotnet/ja/System.Diagnostics.Tracing.xml", + "ref/dotnet/ko/System.Diagnostics.Tracing.xml", + "ref/dotnet/ru/System.Diagnostics.Tracing.xml", + "ref/dotnet/System.Diagnostics.Tracing.dll", + "ref/dotnet/System.Diagnostics.Tracing.xml", + "ref/dotnet/zh-hans/System.Diagnostics.Tracing.xml", + "ref/dotnet/zh-hant/System.Diagnostics.Tracing.xml", + "ref/net46/_._", + "runtimes/win8-aot/lib/netcore50/System.Diagnostics.Tracing.dll", + "System.Diagnostics.Tracing.4.0.20-beta-23109.nupkg", + "System.Diagnostics.Tracing.4.0.20-beta-23109.nupkg.sha512", + "System.Diagnostics.Tracing.nuspec" + ] + }, + "System.Dynamic.Runtime/4.0.11-beta-23225": { + "type": "package", + "sha512": "5XO8dWdYR10m0FcIH7XPo2QJs8wj7Rls9C4I93PY0MHm41VPgUOyQLBfgTrgdb3LAZeqFTjsUVkWEQimYdmzvg==", + "files": [ + "lib/DNXCore50/System.Dynamic.Runtime.dll", + "lib/MonoAndroid10/_._", + "lib/MonoTouch10/_._", + "lib/net46/_._", + "lib/netcore50/System.Dynamic.Runtime.dll", + "lib/xamarinios10/_._", + "lib/xamarinmac20/_._", + "ref/dotnet/System.Dynamic.Runtime.dll", + "ref/MonoAndroid10/_._", + "ref/MonoTouch10/_._", + "ref/net46/_._", + "ref/xamarinios10/_._", + "ref/xamarinmac20/_._", + "runtimes/win8-aot/lib/netcore50/System.Dynamic.Runtime.dll", + "System.Dynamic.Runtime.4.0.11-beta-23225.nupkg", + "System.Dynamic.Runtime.4.0.11-beta-23225.nupkg.sha512", + "System.Dynamic.Runtime.nuspec" + ] + }, + "System.Globalization/4.0.0": { + "type": "package", + "sha512": "IBJyTo1y7ZtzzoJUA60T1XPvNTyw/wfFmjFoBFtlYfkekIOtD/AzDDIg0YdUa7eNtFEfliED2R7HdppTdU4t5A==", + "files": [ + "lib/MonoAndroid10/_._", + "lib/MonoTouch10/_._", + "lib/net45/_._", + "lib/win8/_._", + "lib/wp80/_._", + "lib/wpa81/_._", + "lib/xamarinios10/_._", + "lib/xamarinmac20/_._", + "License.rtf", + "ref/dotnet/de/System.Globalization.xml", + "ref/dotnet/es/System.Globalization.xml", + "ref/dotnet/fr/System.Globalization.xml", + "ref/dotnet/it/System.Globalization.xml", + "ref/dotnet/ja/System.Globalization.xml", + "ref/dotnet/ko/System.Globalization.xml", + "ref/dotnet/ru/System.Globalization.xml", + "ref/dotnet/System.Globalization.dll", + "ref/dotnet/System.Globalization.xml", + "ref/dotnet/zh-hans/System.Globalization.xml", + "ref/dotnet/zh-hant/System.Globalization.xml", + "ref/MonoAndroid10/_._", + "ref/MonoTouch10/_._", + "ref/net45/_._", + "ref/netcore50/de/System.Globalization.xml", + "ref/netcore50/es/System.Globalization.xml", + "ref/netcore50/fr/System.Globalization.xml", + "ref/netcore50/it/System.Globalization.xml", + "ref/netcore50/ja/System.Globalization.xml", + "ref/netcore50/ko/System.Globalization.xml", + "ref/netcore50/ru/System.Globalization.xml", + "ref/netcore50/System.Globalization.dll", + "ref/netcore50/System.Globalization.xml", + "ref/netcore50/zh-hans/System.Globalization.xml", + "ref/netcore50/zh-hant/System.Globalization.xml", + "ref/win8/_._", + "ref/wp80/_._", + "ref/wpa81/_._", + "ref/xamarinios10/_._", + "ref/xamarinmac20/_._", + "System.Globalization.4.0.0.nupkg", + "System.Globalization.4.0.0.nupkg.sha512", + "System.Globalization.nuspec" + ] + }, + "System.Globalization/4.0.10": { + "type": "package", + "sha512": "kzRtbbCNAxdafFBDogcM36ehA3th8c1PGiz8QRkZn8O5yMBorDHSK8/TGJPYOaCS5zdsGk0u9qXHnW91nqy7fw==", + "files": [ + "lib/DNXCore50/System.Globalization.dll", + "lib/MonoAndroid10/_._", + "lib/MonoTouch10/_._", + "lib/net46/_._", + "lib/netcore50/System.Globalization.dll", + "lib/xamarinios10/_._", + "lib/xamarinmac20/_._", + "ref/dotnet/de/System.Globalization.xml", + "ref/dotnet/es/System.Globalization.xml", + "ref/dotnet/fr/System.Globalization.xml", + "ref/dotnet/it/System.Globalization.xml", + "ref/dotnet/ja/System.Globalization.xml", + "ref/dotnet/ko/System.Globalization.xml", + "ref/dotnet/ru/System.Globalization.xml", + "ref/dotnet/System.Globalization.dll", + "ref/dotnet/System.Globalization.xml", + "ref/dotnet/zh-hans/System.Globalization.xml", + "ref/dotnet/zh-hant/System.Globalization.xml", + "ref/MonoAndroid10/_._", + "ref/MonoTouch10/_._", + "ref/net46/_._", + "ref/xamarinios10/_._", + "ref/xamarinmac20/_._", + "runtimes/win8-aot/lib/netcore50/System.Globalization.dll", + "System.Globalization.4.0.10.nupkg", + "System.Globalization.4.0.10.nupkg.sha512", + "System.Globalization.nuspec" + ] + }, + "System.Globalization.Extensions/4.0.0-beta-23109": { + "type": "package", + "sha512": "Q0BD+jsPZFtcR5DMcyERM2TggHJm9fJNGIMypTTh/6i6jET/c0B1MPnOyOUrHIbLuYrEwyiKDNJz0yw7Ps3hbg==", + "files": [ + "lib/dotnet/System.Globalization.Extensions.dll", + "lib/net46/System.Globalization.Extensions.dll", + "ref/dotnet/de/System.Globalization.Extensions.xml", + "ref/dotnet/es/System.Globalization.Extensions.xml", + "ref/dotnet/fr/System.Globalization.Extensions.xml", + "ref/dotnet/it/System.Globalization.Extensions.xml", + "ref/dotnet/ja/System.Globalization.Extensions.xml", + "ref/dotnet/ko/System.Globalization.Extensions.xml", + "ref/dotnet/ru/System.Globalization.Extensions.xml", + "ref/dotnet/System.Globalization.Extensions.dll", + "ref/dotnet/System.Globalization.Extensions.xml", + "ref/dotnet/zh-hans/System.Globalization.Extensions.xml", + "ref/dotnet/zh-hant/System.Globalization.Extensions.xml", + "ref/net46/System.Globalization.Extensions.dll", + "System.Globalization.Extensions.4.0.0-beta-23109.nupkg", + "System.Globalization.Extensions.4.0.0-beta-23109.nupkg.sha512", + "System.Globalization.Extensions.nuspec" + ] + }, + "System.IO/4.0.0-beta-23109": { + "type": "package", + "sha512": "5hFYIKRlgbPyYa9NKvizmrCACEbzVRpgMf+lF6erYxa5P7lPcbbQEt9C3xBbvShBwqNNzkosB5kD5MeaNKAmww==", + "files": [ + "lib/net45/_._", + "lib/win8/_._", + "lib/wp80/_._", + "lib/wpa81/_._", + "License.rtf", + "ref/dotnet/de/System.IO.xml", + "ref/dotnet/es/System.IO.xml", + "ref/dotnet/fr/System.IO.xml", + "ref/dotnet/it/System.IO.xml", + "ref/dotnet/ja/System.IO.xml", + "ref/dotnet/ko/System.IO.xml", + "ref/dotnet/ru/System.IO.xml", + "ref/dotnet/System.IO.dll", + "ref/dotnet/System.IO.xml", + "ref/dotnet/zh-hans/System.IO.xml", + "ref/dotnet/zh-hant/System.IO.xml", + "ref/net45/_._", + "ref/netcore50/de/System.IO.xml", + "ref/netcore50/es/System.IO.xml", + "ref/netcore50/fr/System.IO.xml", + "ref/netcore50/it/System.IO.xml", + "ref/netcore50/ja/System.IO.xml", + "ref/netcore50/ko/System.IO.xml", + "ref/netcore50/ru/System.IO.xml", + "ref/netcore50/System.IO.dll", + "ref/netcore50/System.IO.xml", + "ref/netcore50/zh-hans/System.IO.xml", + "ref/netcore50/zh-hant/System.IO.xml", + "ref/win8/_._", + "ref/wp80/_._", + "ref/wpa81/_._", + "System.IO.4.0.0-beta-23109.nupkg", + "System.IO.4.0.0-beta-23109.nupkg.sha512", + "System.IO.nuspec" + ] + }, + "System.IO/4.0.0": { + "type": "package", + "sha512": "MoCHQ0u5n0OMwUS8OX4Gl48qKiQziSW5cXvt82d+MmAcsLq9OL90+ihnu/aJ1h6OOYcBswrZAEuApfZha9w2lg==", + "files": [ + "lib/MonoAndroid10/_._", + "lib/MonoTouch10/_._", + "lib/net45/_._", + "lib/win8/_._", + "lib/wp80/_._", + "lib/wpa81/_._", + "lib/xamarinios10/_._", + "lib/xamarinmac20/_._", + "License.rtf", + "ref/dotnet/de/System.IO.xml", + "ref/dotnet/es/System.IO.xml", + "ref/dotnet/fr/System.IO.xml", + "ref/dotnet/it/System.IO.xml", + "ref/dotnet/ja/System.IO.xml", + "ref/dotnet/ko/System.IO.xml", + "ref/dotnet/ru/System.IO.xml", + "ref/dotnet/System.IO.dll", + "ref/dotnet/System.IO.xml", + "ref/dotnet/zh-hans/System.IO.xml", + "ref/dotnet/zh-hant/System.IO.xml", + "ref/MonoAndroid10/_._", + "ref/MonoTouch10/_._", + "ref/net45/_._", + "ref/netcore50/de/System.IO.xml", + "ref/netcore50/es/System.IO.xml", + "ref/netcore50/fr/System.IO.xml", + "ref/netcore50/it/System.IO.xml", + "ref/netcore50/ja/System.IO.xml", + "ref/netcore50/ko/System.IO.xml", + "ref/netcore50/ru/System.IO.xml", + "ref/netcore50/System.IO.dll", + "ref/netcore50/System.IO.xml", + "ref/netcore50/zh-hans/System.IO.xml", + "ref/netcore50/zh-hant/System.IO.xml", + "ref/win8/_._", + "ref/wp80/_._", + "ref/wpa81/_._", + "ref/xamarinios10/_._", + "ref/xamarinmac20/_._", + "System.IO.4.0.0.nupkg", + "System.IO.4.0.0.nupkg.sha512", + "System.IO.nuspec" + ] + }, + "System.IO.Compression/4.0.0-beta-23109": { + "type": "package", + "sha512": "kxDCzu/6/xGkpDk1Zk/NqbxngQd7B1fv53KlWutVBKWD/TwZOtGJbfDuW2eQ13IaYJAoqubMln902wICe2yzKg==", + "files": [ + "lib/dotnet/System.IO.Compression.dll", + "lib/net45/_._", + "lib/netcore50/System.IO.Compression.dll", + "lib/win8/_._", + "lib/wpa81/_._", + "ref/dotnet/de/System.IO.Compression.xml", + "ref/dotnet/es/System.IO.Compression.xml", + "ref/dotnet/fr/System.IO.Compression.xml", + "ref/dotnet/it/System.IO.Compression.xml", + "ref/dotnet/ja/System.IO.Compression.xml", + "ref/dotnet/ko/System.IO.Compression.xml", + "ref/dotnet/ru/System.IO.Compression.xml", + "ref/dotnet/System.IO.Compression.dll", + "ref/dotnet/System.IO.Compression.xml", + "ref/dotnet/zh-hans/System.IO.Compression.xml", + "ref/dotnet/zh-hant/System.IO.Compression.xml", + "ref/net45/_._", + "ref/netcore50/System.IO.Compression.dll", + "ref/netcore50/System.IO.Compression.xml", + "ref/win8/_._", + "ref/wpa81/_._", + "runtime.json", + "System.IO.Compression.4.0.0-beta-23109.nupkg", + "System.IO.Compression.4.0.0-beta-23109.nupkg.sha512", + "System.IO.Compression.nuspec" + ] + }, + "System.IO.Compression.clrcompression-x64/4.0.0-beta-23109": { + "type": "package", + "sha512": "1Stn+qndkQzL+Ej31ZTbYp0vShkyWelnQZBA9e0+sN/or0js5WiD0b8PXeyf6O7O/5JmFkZTFYYmiF0qotkexQ==", + "files": [ + "runtimes/win10-x64/native/ClrCompression.dll", + "runtimes/win7-x64/native/clrcompression.dll", + "System.IO.Compression.clrcompression-x64.4.0.0-beta-23109.nupkg", + "System.IO.Compression.clrcompression-x64.4.0.0-beta-23109.nupkg.sha512", + "System.IO.Compression.clrcompression-x64.nuspec" + ] + }, + "System.IO.Compression.clrcompression-x86/4.0.0-beta-23109": { + "type": "package", + "sha512": "qlNN1b/+L1u29uOdk/OsQ0SHcS82jORlVOuqXDLv0fZa3aeiakrqI8mkx+hLm+0Z9WtrLXz5aMdfDjGM1LUmuw==", + "files": [ + "runtimes/win10-x86/native/ClrCompression.dll", + "runtimes/win7-x86/native/clrcompression.dll", + "System.IO.Compression.clrcompression-x86.4.0.0-beta-23109.nupkg", + "System.IO.Compression.clrcompression-x86.4.0.0-beta-23109.nupkg.sha512", + "System.IO.Compression.clrcompression-x86.nuspec" + ] + }, + "System.IO.FileSystem/4.0.0-beta-23109": { + "type": "package", + "sha512": "OusDe4B1/Q3BYY4QzNfztnyPPK9a1OGsQVZBg41C1bk8F2S3AOtXCx0GUwAfKbVxxS4dOdIgxWw1JN5m+IlpAA==", + "files": [ + "lib/DNXCore50/System.IO.FileSystem.dll", + "lib/net46/System.IO.FileSystem.dll", + "lib/netcore50/System.IO.FileSystem.dll", + "ref/dotnet/de/System.IO.FileSystem.xml", + "ref/dotnet/es/System.IO.FileSystem.xml", + "ref/dotnet/fr/System.IO.FileSystem.xml", + "ref/dotnet/it/System.IO.FileSystem.xml", + "ref/dotnet/ja/System.IO.FileSystem.xml", + "ref/dotnet/ko/System.IO.FileSystem.xml", + "ref/dotnet/ru/System.IO.FileSystem.xml", + "ref/dotnet/System.IO.FileSystem.dll", + "ref/dotnet/System.IO.FileSystem.xml", + "ref/dotnet/zh-hans/System.IO.FileSystem.xml", + "ref/dotnet/zh-hant/System.IO.FileSystem.xml", + "ref/net46/System.IO.FileSystem.dll", + "System.IO.FileSystem.4.0.0-beta-23109.nupkg", + "System.IO.FileSystem.4.0.0-beta-23109.nupkg.sha512", + "System.IO.FileSystem.nuspec" + ] + }, + "System.IO.FileSystem.Primitives/4.0.0-beta-23109": { + "type": "package", + "sha512": "1JFc6+hsLnlYgACgluhMi19zFkNruPgWoLDq30z7qMKgs3FZqShvXDZLTQ1Hk+cnenUoUU/8P8yRdmbdQaf4yg==", + "files": [ + "lib/dotnet/System.IO.FileSystem.Primitives.dll", + "lib/net46/System.IO.FileSystem.Primitives.dll", + "ref/dotnet/de/System.IO.FileSystem.Primitives.xml", + "ref/dotnet/es/System.IO.FileSystem.Primitives.xml", + "ref/dotnet/fr/System.IO.FileSystem.Primitives.xml", + "ref/dotnet/it/System.IO.FileSystem.Primitives.xml", + "ref/dotnet/ja/System.IO.FileSystem.Primitives.xml", + "ref/dotnet/ko/System.IO.FileSystem.Primitives.xml", + "ref/dotnet/ru/System.IO.FileSystem.Primitives.xml", + "ref/dotnet/System.IO.FileSystem.Primitives.dll", + "ref/dotnet/System.IO.FileSystem.Primitives.xml", + "ref/dotnet/zh-hans/System.IO.FileSystem.Primitives.xml", + "ref/dotnet/zh-hant/System.IO.FileSystem.Primitives.xml", + "ref/net46/System.IO.FileSystem.Primitives.dll", + "System.IO.FileSystem.Primitives.4.0.0-beta-23109.nupkg", + "System.IO.FileSystem.Primitives.4.0.0-beta-23109.nupkg.sha512", + "System.IO.FileSystem.Primitives.nuspec" + ] + }, + "System.Linq/4.0.0-beta-23109": { + "type": "package", + "sha512": "NN0sQlEAjYDdMW5SU8p75niORiKwME2hRac30HB1QVUSuMtDs/easUBMJKGnXRHmxcrdwvHApfJOiH1tZ6eTbQ==", + "files": [ + "lib/dotnet/System.Linq.dll", + "lib/net45/_._", + "lib/netcore50/System.Linq.dll", + "lib/win8/_._", + "lib/wp80/_._", + "lib/wpa81/_._", + "ref/dotnet/de/System.Linq.xml", + "ref/dotnet/es/System.Linq.xml", + "ref/dotnet/fr/System.Linq.xml", + "ref/dotnet/it/System.Linq.xml", + "ref/dotnet/ja/System.Linq.xml", + "ref/dotnet/ko/System.Linq.xml", + "ref/dotnet/ru/System.Linq.xml", + "ref/dotnet/System.Linq.dll", + "ref/dotnet/System.Linq.xml", + "ref/dotnet/zh-hans/System.Linq.xml", + "ref/dotnet/zh-hant/System.Linq.xml", + "ref/net45/_._", + "ref/netcore50/System.Linq.dll", + "ref/netcore50/System.Linq.xml", + "ref/win8/_._", + "ref/wp80/_._", + "ref/wpa81/_._", + "System.Linq.4.0.0-beta-23109.nupkg", + "System.Linq.4.0.0-beta-23109.nupkg.sha512", + "System.Linq.nuspec" + ] + }, + "System.Linq/4.0.0": { + "type": "package", + "sha512": "r6Hlc+ytE6m/9UBr+nNRRdoJEWjoeQiT3L3lXYFDHoXk3VYsRBCDNXrawcexw7KPLaH0zamQLiAb6avhZ50cGg==", + "files": [ + "lib/dotnet/System.Linq.dll", + "lib/net45/_._", + "lib/netcore50/System.Linq.dll", + "lib/win8/_._", + "lib/wp80/_._", + "lib/wpa81/_._", + "ref/dotnet/de/System.Linq.xml", + "ref/dotnet/es/System.Linq.xml", + "ref/dotnet/fr/System.Linq.xml", + "ref/dotnet/it/System.Linq.xml", + "ref/dotnet/ja/System.Linq.xml", + "ref/dotnet/ko/System.Linq.xml", + "ref/dotnet/ru/System.Linq.xml", + "ref/dotnet/System.Linq.dll", + "ref/dotnet/System.Linq.xml", + "ref/dotnet/zh-hans/System.Linq.xml", + "ref/dotnet/zh-hant/System.Linq.xml", + "ref/net45/_._", + "ref/netcore50/System.Linq.dll", + "ref/netcore50/System.Linq.xml", + "ref/win8/_._", + "ref/wp80/_._", + "ref/wpa81/_._", + "System.Linq.4.0.0.nupkg", + "System.Linq.4.0.0.nupkg.sha512", + "System.Linq.nuspec" + ] + }, + "System.Linq.Expressions/4.0.10-beta-23109": { + "type": "package", + "sha512": "cy+sQJW809QnFHIXt5Y/If45u362gwiNqHldnJS70l7VCjyR4Bo3UgZMZl+bowSupWxqXk69cwCQhRUyFu2hVw==", + "files": [ + "lib/DNXCore50/System.Linq.Expressions.dll", + "lib/net46/_._", + "lib/netcore50/System.Linq.Expressions.dll", + "ref/dotnet/de/System.Linq.Expressions.xml", + "ref/dotnet/es/System.Linq.Expressions.xml", + "ref/dotnet/fr/System.Linq.Expressions.xml", + "ref/dotnet/it/System.Linq.Expressions.xml", + "ref/dotnet/ja/System.Linq.Expressions.xml", + "ref/dotnet/ko/System.Linq.Expressions.xml", + "ref/dotnet/ru/System.Linq.Expressions.xml", + "ref/dotnet/System.Linq.Expressions.dll", + "ref/dotnet/System.Linq.Expressions.xml", + "ref/dotnet/zh-hans/System.Linq.Expressions.xml", + "ref/dotnet/zh-hant/System.Linq.Expressions.xml", + "ref/net46/_._", + "runtime.json", + "runtimes/win8-aot/lib/netcore50/System.Linq.Expressions.dll", + "System.Linq.Expressions.4.0.10-beta-23109.nupkg", + "System.Linq.Expressions.4.0.10-beta-23109.nupkg.sha512", + "System.Linq.Expressions.nuspec" + ] + }, + "System.Net.Http/4.0.1-beta-23225": { + "type": "package", + "sha512": "vr7kg18C/a+p3MzPIqRn2v0lIcvppuBtjFTiMRC571LAdnmllwLqdNFhwgi19x4z4dOdPfVMylZz9FWWGyWddg==", + "files": [ + "lib/DNXCore50/System.Net.Http.dll", + "lib/net45/_._", + "lib/netcore50/System.Net.Http.dll", + "lib/win8/_._", + "lib/wpa81/_._", + "ref/dotnet/System.Net.Http.dll", + "ref/net45/_._", + "ref/netcore50/System.Net.Http.dll", + "ref/win8/_._", + "ref/wpa81/_._", + "System.Net.Http.4.0.1-beta-23225.nupkg", + "System.Net.Http.4.0.1-beta-23225.nupkg.sha512", + "System.Net.Http.nuspec" + ] + }, + "System.Net.Primitives/4.0.0": { + "type": "package", + "sha512": "RcWCfqEPIGdytI4grLSG6LFe270154kMvuOs/pU+VzlKbjnW+h2c6jWf4r/tqzAELiBhibGHE2MGn+SLtl+fZg==", + "files": [ + "lib/MonoAndroid10/_._", + "lib/MonoTouch10/_._", + "lib/net45/_._", + "lib/win8/_._", + "lib/wpa81/_._", + "lib/xamarinios10/_._", + "lib/xamarinmac20/_._", + "License.rtf", + "ref/dotnet/de/System.Net.Primitives.xml", + "ref/dotnet/es/System.Net.Primitives.xml", + "ref/dotnet/fr/System.Net.Primitives.xml", + "ref/dotnet/it/System.Net.Primitives.xml", + "ref/dotnet/ja/System.Net.Primitives.xml", + "ref/dotnet/ko/System.Net.Primitives.xml", + "ref/dotnet/ru/System.Net.Primitives.xml", + "ref/dotnet/System.Net.Primitives.dll", + "ref/dotnet/System.Net.Primitives.xml", + "ref/dotnet/zh-hans/System.Net.Primitives.xml", + "ref/dotnet/zh-hant/System.Net.Primitives.xml", + "ref/MonoAndroid10/_._", + "ref/MonoTouch10/_._", + "ref/net45/_._", + "ref/netcore50/de/System.Net.Primitives.xml", + "ref/netcore50/es/System.Net.Primitives.xml", + "ref/netcore50/fr/System.Net.Primitives.xml", + "ref/netcore50/it/System.Net.Primitives.xml", + "ref/netcore50/ja/System.Net.Primitives.xml", + "ref/netcore50/ko/System.Net.Primitives.xml", + "ref/netcore50/ru/System.Net.Primitives.xml", + "ref/netcore50/System.Net.Primitives.dll", + "ref/netcore50/System.Net.Primitives.xml", + "ref/netcore50/zh-hans/System.Net.Primitives.xml", + "ref/netcore50/zh-hant/System.Net.Primitives.xml", + "ref/win8/_._", + "ref/wpa81/_._", + "ref/xamarinios10/_._", + "ref/xamarinmac20/_._", + "System.Net.Primitives.4.0.0.nupkg", + "System.Net.Primitives.4.0.0.nupkg.sha512", + "System.Net.Primitives.nuspec" + ] + }, + "System.ObjectModel/4.0.10": { + "type": "package", + "sha512": "Djn1wb0vP662zxbe+c3mOhvC4vkQGicsFs1Wi0/GJJpp3Eqp+oxbJ+p2Sx3O0efYueggAI5SW+BqEoczjfr1cA==", + "files": [ + "lib/dotnet/System.ObjectModel.dll", + "lib/MonoAndroid10/_._", + "lib/MonoTouch10/_._", + "lib/net46/_._", + "lib/xamarinios10/_._", + "lib/xamarinmac20/_._", + "ref/dotnet/de/System.ObjectModel.xml", + "ref/dotnet/es/System.ObjectModel.xml", + "ref/dotnet/fr/System.ObjectModel.xml", + "ref/dotnet/it/System.ObjectModel.xml", + "ref/dotnet/ja/System.ObjectModel.xml", + "ref/dotnet/ko/System.ObjectModel.xml", + "ref/dotnet/ru/System.ObjectModel.xml", + "ref/dotnet/System.ObjectModel.dll", + "ref/dotnet/System.ObjectModel.xml", + "ref/dotnet/zh-hans/System.ObjectModel.xml", + "ref/dotnet/zh-hant/System.ObjectModel.xml", + "ref/MonoAndroid10/_._", + "ref/MonoTouch10/_._", + "ref/net46/_._", + "ref/xamarinios10/_._", + "ref/xamarinmac20/_._", + "System.ObjectModel.4.0.10.nupkg", + "System.ObjectModel.4.0.10.nupkg.sha512", + "System.ObjectModel.nuspec" + ] + }, + "System.Reflection/4.0.0": { + "type": "package", + "sha512": "g96Rn8XuG7y4VfxPj/jnXroRJdQ8L3iN3k3zqsuzk4k3Nq4KMXARYiIO4BLW4GwX06uQpuYwRMcAC/aF117knQ==", + "files": [ + "lib/MonoAndroid10/_._", + "lib/MonoTouch10/_._", + "lib/net45/_._", + "lib/win8/_._", + "lib/wp80/_._", + "lib/wpa81/_._", + "lib/xamarinios10/_._", + "lib/xamarinmac20/_._", + "License.rtf", + "ref/dotnet/de/System.Reflection.xml", + "ref/dotnet/es/System.Reflection.xml", + "ref/dotnet/fr/System.Reflection.xml", + "ref/dotnet/it/System.Reflection.xml", + "ref/dotnet/ja/System.Reflection.xml", + "ref/dotnet/ko/System.Reflection.xml", + "ref/dotnet/ru/System.Reflection.xml", + "ref/dotnet/System.Reflection.dll", + "ref/dotnet/System.Reflection.xml", + "ref/dotnet/zh-hans/System.Reflection.xml", + "ref/dotnet/zh-hant/System.Reflection.xml", + "ref/MonoAndroid10/_._", + "ref/MonoTouch10/_._", + "ref/net45/_._", + "ref/netcore50/de/System.Reflection.xml", + "ref/netcore50/es/System.Reflection.xml", + "ref/netcore50/fr/System.Reflection.xml", + "ref/netcore50/it/System.Reflection.xml", + "ref/netcore50/ja/System.Reflection.xml", + "ref/netcore50/ko/System.Reflection.xml", + "ref/netcore50/ru/System.Reflection.xml", + "ref/netcore50/System.Reflection.dll", + "ref/netcore50/System.Reflection.xml", + "ref/netcore50/zh-hans/System.Reflection.xml", + "ref/netcore50/zh-hant/System.Reflection.xml", + "ref/win8/_._", + "ref/wp80/_._", + "ref/wpa81/_._", + "ref/xamarinios10/_._", + "ref/xamarinmac20/_._", + "System.Reflection.4.0.0.nupkg", + "System.Reflection.4.0.0.nupkg.sha512", + "System.Reflection.nuspec" + ] + }, + "System.Reflection/4.1.0-beta-23225": { + "type": "package", + "sha512": "WbLtaCxoe5XdqEyZuGpemSQ8YBJ8cj11zx+yxOxJfHbNrmu7oMQ29+J50swaqg3soUc3BVBMqfIhb/7gocDHQA==", + "files": [ + "lib/DNXCore50/System.Reflection.dll", + "lib/MonoAndroid10/_._", + "lib/MonoTouch10/_._", + "lib/net46/_._", + "lib/netcore50/System.Reflection.dll", + "lib/xamarinios10/_._", + "lib/xamarinmac20/_._", + "ref/dotnet/System.Reflection.dll", + "ref/MonoAndroid10/_._", + "ref/MonoTouch10/_._", + "ref/net46/_._", + "ref/xamarinios10/_._", + "ref/xamarinmac20/_._", + "runtimes/win8-aot/lib/netcore50/System.Reflection.dll", + "System.Reflection.4.1.0-beta-23225.nupkg", + "System.Reflection.4.1.0-beta-23225.nupkg.sha512", + "System.Reflection.nuspec" + ] + }, + "System.Reflection.Emit.ILGeneration/4.0.0-beta-23109": { + "type": "package", + "sha512": "tJO27blFXyvYwDLbK7GvgFC94XjeY6MnRitBI4w8LHfEc/gVdRmCC8gzkE8KEmkecnIWIt9VTSJ3p4lGgafKKA==", + "files": [ + "lib/DNXCore50/System.Reflection.Emit.ILGeneration.dll", + "lib/net45/_._", + "lib/netcore50/System.Reflection.Emit.ILGeneration.dll", + "lib/wp80/_._", + "ref/dotnet/de/System.Reflection.Emit.ILGeneration.xml", + "ref/dotnet/es/System.Reflection.Emit.ILGeneration.xml", + "ref/dotnet/fr/System.Reflection.Emit.ILGeneration.xml", + "ref/dotnet/it/System.Reflection.Emit.ILGeneration.xml", + "ref/dotnet/ja/System.Reflection.Emit.ILGeneration.xml", + "ref/dotnet/ko/System.Reflection.Emit.ILGeneration.xml", + "ref/dotnet/ru/System.Reflection.Emit.ILGeneration.xml", + "ref/dotnet/System.Reflection.Emit.ILGeneration.dll", + "ref/dotnet/System.Reflection.Emit.ILGeneration.xml", + "ref/dotnet/zh-hans/System.Reflection.Emit.ILGeneration.xml", + "ref/dotnet/zh-hant/System.Reflection.Emit.ILGeneration.xml", + "ref/net45/_._", + "ref/wp80/_._", + "System.Reflection.Emit.ILGeneration.4.0.0-beta-23109.nupkg", + "System.Reflection.Emit.ILGeneration.4.0.0-beta-23109.nupkg.sha512", + "System.Reflection.Emit.ILGeneration.nuspec" + ] + }, + "System.Reflection.Emit.Lightweight/4.0.0-beta-23109": { + "type": "package", + "sha512": "Kbg03ijw7jZUswutleX/zmGgz6VYszTuyy4DciBn1PHFYAqAgZFZdK3cpYCsAU+cPliQ35UcOX828OsAG9NU4w==", + "files": [ + "lib/DNXCore50/System.Reflection.Emit.Lightweight.dll", + "lib/net45/_._", + "lib/netcore50/System.Reflection.Emit.Lightweight.dll", + "lib/wp80/_._", + "ref/dotnet/de/System.Reflection.Emit.Lightweight.xml", + "ref/dotnet/es/System.Reflection.Emit.Lightweight.xml", + "ref/dotnet/fr/System.Reflection.Emit.Lightweight.xml", + "ref/dotnet/it/System.Reflection.Emit.Lightweight.xml", + "ref/dotnet/ja/System.Reflection.Emit.Lightweight.xml", + "ref/dotnet/ko/System.Reflection.Emit.Lightweight.xml", + "ref/dotnet/ru/System.Reflection.Emit.Lightweight.xml", + "ref/dotnet/System.Reflection.Emit.Lightweight.dll", + "ref/dotnet/System.Reflection.Emit.Lightweight.xml", + "ref/dotnet/zh-hans/System.Reflection.Emit.Lightweight.xml", + "ref/dotnet/zh-hant/System.Reflection.Emit.Lightweight.xml", + "ref/net45/_._", + "ref/wp80/_._", + "System.Reflection.Emit.Lightweight.4.0.0-beta-23109.nupkg", + "System.Reflection.Emit.Lightweight.4.0.0-beta-23109.nupkg.sha512", + "System.Reflection.Emit.Lightweight.nuspec" + ] + }, + "System.Reflection.Extensions/4.0.0-beta-23109": { + "type": "package", + "sha512": "sXo3+qJ7TukCF/rDFYKjEog4iVb75dXJztu/lkvhMHgYnviPSbi9zg78FQUFhB6BOvntwnknrgNJhhtl3x9a7A==", + "files": [ + "lib/DNXCore50/System.Reflection.Extensions.dll", + "lib/net45/_._", + "lib/netcore50/System.Reflection.Extensions.dll", + "lib/win8/_._", + "lib/wp80/_._", + "lib/wpa81/_._", + "ref/dotnet/de/System.Reflection.Extensions.xml", + "ref/dotnet/es/System.Reflection.Extensions.xml", + "ref/dotnet/fr/System.Reflection.Extensions.xml", + "ref/dotnet/it/System.Reflection.Extensions.xml", + "ref/dotnet/ja/System.Reflection.Extensions.xml", + "ref/dotnet/ko/System.Reflection.Extensions.xml", + "ref/dotnet/ru/System.Reflection.Extensions.xml", + "ref/dotnet/System.Reflection.Extensions.dll", + "ref/dotnet/System.Reflection.Extensions.xml", + "ref/dotnet/zh-hans/System.Reflection.Extensions.xml", + "ref/dotnet/zh-hant/System.Reflection.Extensions.xml", + "ref/net45/_._", + "ref/netcore50/System.Reflection.Extensions.dll", + "ref/netcore50/System.Reflection.Extensions.xml", + "ref/win8/_._", + "ref/wp80/_._", + "ref/wpa81/_._", + "runtimes/win8-aot/lib/netcore50/System.Reflection.Extensions.dll", + "System.Reflection.Extensions.4.0.0-beta-23109.nupkg", + "System.Reflection.Extensions.4.0.0-beta-23109.nupkg.sha512", + "System.Reflection.Extensions.nuspec" + ] + }, + "System.Reflection.Extensions/4.0.0": { + "type": "package", + "sha512": "dbYaZWCyFAu1TGYUqR2n+Q+1casSHPR2vVW0WVNkXpZbrd2BXcZ7cpvpu9C98CTHtNmyfMWCLpCclDqly23t6A==", + "files": [ + "lib/DNXCore50/System.Reflection.Extensions.dll", + "lib/net45/_._", + "lib/netcore50/System.Reflection.Extensions.dll", + "lib/win8/_._", + "lib/wp80/_._", + "lib/wpa81/_._", + "ref/dotnet/de/System.Reflection.Extensions.xml", + "ref/dotnet/es/System.Reflection.Extensions.xml", + "ref/dotnet/fr/System.Reflection.Extensions.xml", + "ref/dotnet/it/System.Reflection.Extensions.xml", + "ref/dotnet/ja/System.Reflection.Extensions.xml", + "ref/dotnet/ko/System.Reflection.Extensions.xml", + "ref/dotnet/ru/System.Reflection.Extensions.xml", + "ref/dotnet/System.Reflection.Extensions.dll", + "ref/dotnet/System.Reflection.Extensions.xml", + "ref/dotnet/zh-hans/System.Reflection.Extensions.xml", + "ref/dotnet/zh-hant/System.Reflection.Extensions.xml", + "ref/net45/_._", + "ref/netcore50/System.Reflection.Extensions.dll", + "ref/netcore50/System.Reflection.Extensions.xml", + "ref/win8/_._", + "ref/wp80/_._", + "ref/wpa81/_._", + "runtimes/win8-aot/lib/netcore50/System.Reflection.Extensions.dll", + "System.Reflection.Extensions.4.0.0.nupkg", + "System.Reflection.Extensions.4.0.0.nupkg.sha512", + "System.Reflection.Extensions.nuspec" + ] + }, + "System.Reflection.Metadata/1.1.0-alpha-00009": { + "type": "package", + "sha512": "mYqM9/TzKM0HAWeo31bDadJe47VR3XSk7BmMRUcR2tNDPgKj3VPuKqcnFMquqbFaOd6ZApu4gQ4YWACnrcXcFQ==", + "files": [ + "lib/dotnet/System.Reflection.Metadata.dll", + "lib/dotnet/System.Reflection.Metadata.xml", + "lib/portable-net45+win8/System.Reflection.Metadata.dll", + "lib/portable-net45+win8/System.Reflection.Metadata.xml", + "System.Reflection.Metadata.1.1.0-alpha-00009.nupkg", + "System.Reflection.Metadata.1.1.0-alpha-00009.nupkg.sha512", + "System.Reflection.Metadata.nuspec" + ] + }, + "System.Reflection.Metadata/1.1.0": { + "type": "package", + "serviceable": true, + "sha512": "a8VsRm/B0Ik1o5FumSMWmpwbG7cvIIajAYhzTTy9VB9XItByJDQHGZkQTIAdsvVJ6MI5O3uH/lb0izgQDlDIWA==", + "files": [ + "lib/dotnet5.2/System.Reflection.Metadata.dll", + "lib/dotnet5.2/System.Reflection.Metadata.xml", + "lib/portable-net45+win8/System.Reflection.Metadata.dll", + "lib/portable-net45+win8/System.Reflection.Metadata.xml", + "System.Reflection.Metadata.1.1.0.nupkg", + "System.Reflection.Metadata.1.1.0.nupkg.sha512", + "System.Reflection.Metadata.nuspec" + ] + }, + "System.Reflection.Primitives/4.0.0-beta-23109": { + "type": "package", + "sha512": "R5oBDNgUtSV9KTPCxwHWiTyryX/9mngnPbLWwFOEF5xcNd3Qlu6/3LiauGsIzGtXV8vOpRl9tfyIFy8t4ix4Gw==", + "files": [ + "lib/DNXCore50/System.Reflection.Primitives.dll", + "lib/net45/_._", + "lib/netcore50/System.Reflection.Primitives.dll", + "lib/win8/_._", + "lib/wp80/_._", + "lib/wpa81/_._", + "ref/dotnet/de/System.Reflection.Primitives.xml", + "ref/dotnet/es/System.Reflection.Primitives.xml", + "ref/dotnet/fr/System.Reflection.Primitives.xml", + "ref/dotnet/it/System.Reflection.Primitives.xml", + "ref/dotnet/ja/System.Reflection.Primitives.xml", + "ref/dotnet/ko/System.Reflection.Primitives.xml", + "ref/dotnet/ru/System.Reflection.Primitives.xml", + "ref/dotnet/System.Reflection.Primitives.dll", + "ref/dotnet/System.Reflection.Primitives.xml", + "ref/dotnet/zh-hans/System.Reflection.Primitives.xml", + "ref/dotnet/zh-hant/System.Reflection.Primitives.xml", + "ref/net45/_._", + "ref/netcore50/System.Reflection.Primitives.dll", + "ref/netcore50/System.Reflection.Primitives.xml", + "ref/win8/_._", + "ref/wp80/_._", + "ref/wpa81/_._", + "runtimes/win8-aot/lib/netcore50/System.Reflection.Primitives.dll", + "System.Reflection.Primitives.4.0.0-beta-23109.nupkg", + "System.Reflection.Primitives.4.0.0-beta-23109.nupkg.sha512", + "System.Reflection.Primitives.nuspec" + ] + }, + "System.Reflection.Primitives/4.0.0": { + "type": "package", + "sha512": "n9S0XpKv2ruc17FSnaiX6nV47VfHTZ1wLjKZlAirUZCvDQCH71mVp+Ohabn0xXLh5pK2PKp45HCxkqu5Fxn/lA==", + "files": [ + "lib/DNXCore50/System.Reflection.Primitives.dll", + "lib/net45/_._", + "lib/netcore50/System.Reflection.Primitives.dll", + "lib/win8/_._", + "lib/wp80/_._", + "lib/wpa81/_._", + "ref/dotnet/de/System.Reflection.Primitives.xml", + "ref/dotnet/es/System.Reflection.Primitives.xml", + "ref/dotnet/fr/System.Reflection.Primitives.xml", + "ref/dotnet/it/System.Reflection.Primitives.xml", + "ref/dotnet/ja/System.Reflection.Primitives.xml", + "ref/dotnet/ko/System.Reflection.Primitives.xml", + "ref/dotnet/ru/System.Reflection.Primitives.xml", + "ref/dotnet/System.Reflection.Primitives.dll", + "ref/dotnet/System.Reflection.Primitives.xml", + "ref/dotnet/zh-hans/System.Reflection.Primitives.xml", + "ref/dotnet/zh-hant/System.Reflection.Primitives.xml", + "ref/net45/_._", + "ref/netcore50/System.Reflection.Primitives.dll", + "ref/netcore50/System.Reflection.Primitives.xml", + "ref/win8/_._", + "ref/wp80/_._", + "ref/wpa81/_._", + "runtimes/win8-aot/lib/netcore50/System.Reflection.Primitives.dll", + "System.Reflection.Primitives.4.0.0.nupkg", + "System.Reflection.Primitives.4.0.0.nupkg.sha512", + "System.Reflection.Primitives.nuspec" + ] + }, + "System.Reflection.TypeExtensions/4.0.0-beta-23109": { + "type": "package", + "sha512": "lTuPMxeoT/Jqb1QtsIuSomLiJpQP3Y4DqqxLPncrMvLrejwdF25QSbhXdx5twgvBDqwSVL8wYtqiOqKe02u7FA==", + "files": [ + "lib/DNXCore50/System.Reflection.TypeExtensions.dll", + "lib/net46/System.Reflection.TypeExtensions.dll", + "lib/netcore50/System.Reflection.TypeExtensions.dll", + "ref/dotnet/de/System.Reflection.TypeExtensions.xml", + "ref/dotnet/es/System.Reflection.TypeExtensions.xml", + "ref/dotnet/fr/System.Reflection.TypeExtensions.xml", + "ref/dotnet/it/System.Reflection.TypeExtensions.xml", + "ref/dotnet/ja/System.Reflection.TypeExtensions.xml", + "ref/dotnet/ko/System.Reflection.TypeExtensions.xml", + "ref/dotnet/ru/System.Reflection.TypeExtensions.xml", + "ref/dotnet/System.Reflection.TypeExtensions.dll", + "ref/dotnet/System.Reflection.TypeExtensions.xml", + "ref/dotnet/zh-hans/System.Reflection.TypeExtensions.xml", + "ref/dotnet/zh-hant/System.Reflection.TypeExtensions.xml", + "ref/net46/System.Reflection.TypeExtensions.dll", + "runtimes/win8-aot/lib/netcore50/System.Reflection.TypeExtensions.dll", + "System.Reflection.TypeExtensions.4.0.0-beta-23109.nupkg", + "System.Reflection.TypeExtensions.4.0.0-beta-23109.nupkg.sha512", + "System.Reflection.TypeExtensions.nuspec" + ] + }, + "System.Resources.ResourceManager/4.0.0": { + "type": "package", + "sha512": "qmqeZ4BJgjfU+G2JbrZt4Dk1LsMxO4t+f/9HarNY6w8pBgweO6jT+cknUH7c3qIrGvyUqraBhU45Eo6UtA0fAw==", + "files": [ + "lib/DNXCore50/System.Resources.ResourceManager.dll", + "lib/net45/_._", + "lib/netcore50/System.Resources.ResourceManager.dll", + "lib/win8/_._", + "lib/wp80/_._", + "lib/wpa81/_._", + "ref/dotnet/de/System.Resources.ResourceManager.xml", + "ref/dotnet/es/System.Resources.ResourceManager.xml", + "ref/dotnet/fr/System.Resources.ResourceManager.xml", + "ref/dotnet/it/System.Resources.ResourceManager.xml", + "ref/dotnet/ja/System.Resources.ResourceManager.xml", + "ref/dotnet/ko/System.Resources.ResourceManager.xml", + "ref/dotnet/ru/System.Resources.ResourceManager.xml", + "ref/dotnet/System.Resources.ResourceManager.dll", + "ref/dotnet/System.Resources.ResourceManager.xml", + "ref/dotnet/zh-hans/System.Resources.ResourceManager.xml", + "ref/dotnet/zh-hant/System.Resources.ResourceManager.xml", + "ref/net45/_._", + "ref/netcore50/System.Resources.ResourceManager.dll", + "ref/netcore50/System.Resources.ResourceManager.xml", + "ref/win8/_._", + "ref/wp80/_._", + "ref/wpa81/_._", + "runtimes/win8-aot/lib/netcore50/System.Resources.ResourceManager.dll", + "System.Resources.ResourceManager.4.0.0.nupkg", + "System.Resources.ResourceManager.4.0.0.nupkg.sha512", + "System.Resources.ResourceManager.nuspec" + ] + }, + "System.Runtime/4.0.0": { + "type": "package", + "sha512": "Uq9epame8hEqJlj4KaWb67dDJvj4IM37jRFGVeFbugRdPz48bR0voyBhrbf3iSa2tAmlkg4lsa6BUOL9iwlMew==", + "files": [ + "lib/MonoAndroid10/_._", + "lib/MonoTouch10/_._", + "lib/net45/_._", + "lib/win8/_._", + "lib/wp80/_._", + "lib/wpa81/_._", + "lib/xamarinios10/_._", + "lib/xamarinmac20/_._", + "License.rtf", + "ref/dotnet/de/System.Runtime.xml", + "ref/dotnet/es/System.Runtime.xml", + "ref/dotnet/fr/System.Runtime.xml", + "ref/dotnet/it/System.Runtime.xml", + "ref/dotnet/ja/System.Runtime.xml", + "ref/dotnet/ko/System.Runtime.xml", + "ref/dotnet/ru/System.Runtime.xml", + "ref/dotnet/System.Runtime.dll", + "ref/dotnet/System.Runtime.xml", + "ref/dotnet/zh-hans/System.Runtime.xml", + "ref/dotnet/zh-hant/System.Runtime.xml", + "ref/MonoAndroid10/_._", + "ref/MonoTouch10/_._", + "ref/net45/_._", + "ref/netcore50/de/System.Runtime.xml", + "ref/netcore50/es/System.Runtime.xml", + "ref/netcore50/fr/System.Runtime.xml", + "ref/netcore50/it/System.Runtime.xml", + "ref/netcore50/ja/System.Runtime.xml", + "ref/netcore50/ko/System.Runtime.xml", + "ref/netcore50/ru/System.Runtime.xml", + "ref/netcore50/System.Runtime.dll", + "ref/netcore50/System.Runtime.xml", + "ref/netcore50/zh-hans/System.Runtime.xml", + "ref/netcore50/zh-hant/System.Runtime.xml", + "ref/win8/_._", + "ref/wp80/_._", + "ref/wpa81/_._", + "ref/xamarinios10/_._", + "ref/xamarinmac20/_._", + "System.Runtime.4.0.0.nupkg", + "System.Runtime.4.0.0.nupkg.sha512", + "System.Runtime.nuspec" + ] + }, + "System.Runtime/4.0.21-beta-23225": { + "type": "package", + "sha512": "3YYoQMbbiX6ayLsynZtnA+x7KIMier2NivCOxNzlfDb/ZmpVFCQ2XyYd33JYAVM7TnfFbFMzKZxD1kC+zo8wYA==", + "files": [ + "lib/DNXCore50/System.Runtime.dll", + "lib/MonoAndroid10/_._", + "lib/MonoTouch10/_._", + "lib/net46/_._", + "lib/netcore50/System.Runtime.dll", + "lib/xamarinios10/_._", + "lib/xamarinmac20/_._", + "ref/dotnet/System.Runtime.dll", + "ref/MonoAndroid10/_._", + "ref/MonoTouch10/_._", + "ref/net46/_._", + "ref/xamarinios10/_._", + "ref/xamarinmac20/_._", + "runtimes/win8-aot/lib/netcore50/System.Runtime.dll", + "System.Runtime.4.0.21-beta-23225.nupkg", + "System.Runtime.4.0.21-beta-23225.nupkg.sha512", + "System.Runtime.nuspec" + ] + }, + "System.Runtime.Extensions/4.0.0": { + "type": "package", + "sha512": "zPzwoJcA7qar/b5Ihhzfcdr3vBOR8FIg7u//Qc5mqyAriasXuMFVraBZ5vOQq5asfun9ryNEL8Z2BOlUK5QRqA==", + "files": [ + "lib/MonoAndroid10/_._", + "lib/MonoTouch10/_._", + "lib/net45/_._", + "lib/win8/_._", + "lib/wp80/_._", + "lib/wpa81/_._", + "lib/xamarinios10/_._", + "lib/xamarinmac20/_._", + "License.rtf", + "ref/dotnet/de/System.Runtime.Extensions.xml", + "ref/dotnet/es/System.Runtime.Extensions.xml", + "ref/dotnet/fr/System.Runtime.Extensions.xml", + "ref/dotnet/it/System.Runtime.Extensions.xml", + "ref/dotnet/ja/System.Runtime.Extensions.xml", + "ref/dotnet/ko/System.Runtime.Extensions.xml", + "ref/dotnet/ru/System.Runtime.Extensions.xml", + "ref/dotnet/System.Runtime.Extensions.dll", + "ref/dotnet/System.Runtime.Extensions.xml", + "ref/dotnet/zh-hans/System.Runtime.Extensions.xml", + "ref/dotnet/zh-hant/System.Runtime.Extensions.xml", + "ref/MonoAndroid10/_._", + "ref/MonoTouch10/_._", + "ref/net45/_._", + "ref/netcore50/de/System.Runtime.Extensions.xml", + "ref/netcore50/es/System.Runtime.Extensions.xml", + "ref/netcore50/fr/System.Runtime.Extensions.xml", + "ref/netcore50/it/System.Runtime.Extensions.xml", + "ref/netcore50/ja/System.Runtime.Extensions.xml", + "ref/netcore50/ko/System.Runtime.Extensions.xml", + "ref/netcore50/ru/System.Runtime.Extensions.xml", + "ref/netcore50/System.Runtime.Extensions.dll", + "ref/netcore50/System.Runtime.Extensions.xml", + "ref/netcore50/zh-hans/System.Runtime.Extensions.xml", + "ref/netcore50/zh-hant/System.Runtime.Extensions.xml", + "ref/win8/_._", + "ref/wp80/_._", + "ref/wpa81/_._", + "ref/xamarinios10/_._", + "ref/xamarinmac20/_._", + "System.Runtime.Extensions.4.0.0.nupkg", + "System.Runtime.Extensions.4.0.0.nupkg.sha512", + "System.Runtime.Extensions.nuspec" + ] + }, + "System.Runtime.Extensions/4.0.10": { + "type": "package", + "sha512": "5dsEwf3Iml7d5OZeT20iyOjT+r+okWpN7xI2v+R4cgd3WSj4DeRPTvPFjDpacbVW4skCAZ8B9hxXJYgkCFKJ1A==", + "files": [ + "lib/DNXCore50/System.Runtime.Extensions.dll", + "lib/MonoAndroid10/_._", + "lib/MonoTouch10/_._", + "lib/net46/_._", + "lib/netcore50/System.Runtime.Extensions.dll", + "lib/xamarinios10/_._", + "lib/xamarinmac20/_._", + "ref/dotnet/de/System.Runtime.Extensions.xml", + "ref/dotnet/es/System.Runtime.Extensions.xml", + "ref/dotnet/fr/System.Runtime.Extensions.xml", + "ref/dotnet/it/System.Runtime.Extensions.xml", + "ref/dotnet/ja/System.Runtime.Extensions.xml", + "ref/dotnet/ko/System.Runtime.Extensions.xml", + "ref/dotnet/ru/System.Runtime.Extensions.xml", + "ref/dotnet/System.Runtime.Extensions.dll", + "ref/dotnet/System.Runtime.Extensions.xml", + "ref/dotnet/zh-hans/System.Runtime.Extensions.xml", + "ref/dotnet/zh-hant/System.Runtime.Extensions.xml", + "ref/MonoAndroid10/_._", + "ref/MonoTouch10/_._", + "ref/net46/_._", + "ref/xamarinios10/_._", + "ref/xamarinmac20/_._", + "runtimes/win8-aot/lib/netcore50/System.Runtime.Extensions.dll", + "System.Runtime.Extensions.4.0.10.nupkg", + "System.Runtime.Extensions.4.0.10.nupkg.sha512", + "System.Runtime.Extensions.nuspec" + ] + }, + "System.Runtime.Handles/4.0.0": { + "type": "package", + "sha512": "638VhpRq63tVcQ6HDb3um3R/J2BtR1Sa96toHo6PcJGPXEPEsleCuqhBgX2gFCz0y0qkutANwW6VPPY5wQu1XQ==", + "files": [ + "lib/DNXCore50/System.Runtime.Handles.dll", + "lib/MonoAndroid10/_._", + "lib/MonoTouch10/_._", + "lib/net46/_._", + "lib/netcore50/System.Runtime.Handles.dll", + "lib/xamarinios10/_._", + "lib/xamarinmac20/_._", + "ref/dotnet/de/System.Runtime.Handles.xml", + "ref/dotnet/es/System.Runtime.Handles.xml", + "ref/dotnet/fr/System.Runtime.Handles.xml", + "ref/dotnet/it/System.Runtime.Handles.xml", + "ref/dotnet/ja/System.Runtime.Handles.xml", + "ref/dotnet/ko/System.Runtime.Handles.xml", + "ref/dotnet/ru/System.Runtime.Handles.xml", + "ref/dotnet/System.Runtime.Handles.dll", + "ref/dotnet/System.Runtime.Handles.xml", + "ref/dotnet/zh-hans/System.Runtime.Handles.xml", + "ref/dotnet/zh-hant/System.Runtime.Handles.xml", + "ref/MonoAndroid10/_._", + "ref/MonoTouch10/_._", + "ref/net46/_._", + "ref/xamarinios10/_._", + "ref/xamarinmac20/_._", + "runtimes/win8-aot/lib/netcore50/System.Runtime.Handles.dll", + "System.Runtime.Handles.4.0.0.nupkg", + "System.Runtime.Handles.4.0.0.nupkg.sha512", + "System.Runtime.Handles.nuspec" + ] + }, + "System.Runtime.InteropServices/4.0.0": { + "type": "package", + "sha512": "J8GBB0OsVuKJXR412x6uZdoyNi4y9OMjjJRHPutRHjqujuvthus6Xdxn/i8J1lL2PK+2jWCLpZp72h8x73hkLg==", + "files": [ + "lib/MonoAndroid10/_._", + "lib/MonoTouch10/_._", + "lib/net45/_._", + "lib/win8/_._", + "lib/wpa81/_._", + "lib/xamarinios10/_._", + "lib/xamarinmac20/_._", + "License.rtf", + "ref/dotnet/de/System.Runtime.InteropServices.xml", + "ref/dotnet/es/System.Runtime.InteropServices.xml", + "ref/dotnet/fr/System.Runtime.InteropServices.xml", + "ref/dotnet/it/System.Runtime.InteropServices.xml", + "ref/dotnet/ja/System.Runtime.InteropServices.xml", + "ref/dotnet/ko/System.Runtime.InteropServices.xml", + "ref/dotnet/ru/System.Runtime.InteropServices.xml", + "ref/dotnet/System.Runtime.InteropServices.dll", + "ref/dotnet/System.Runtime.InteropServices.xml", + "ref/dotnet/zh-hans/System.Runtime.InteropServices.xml", + "ref/dotnet/zh-hant/System.Runtime.InteropServices.xml", + "ref/MonoAndroid10/_._", + "ref/MonoTouch10/_._", + "ref/net45/_._", + "ref/netcore50/de/System.Runtime.InteropServices.xml", + "ref/netcore50/es/System.Runtime.InteropServices.xml", + "ref/netcore50/fr/System.Runtime.InteropServices.xml", + "ref/netcore50/it/System.Runtime.InteropServices.xml", + "ref/netcore50/ja/System.Runtime.InteropServices.xml", + "ref/netcore50/ko/System.Runtime.InteropServices.xml", + "ref/netcore50/ru/System.Runtime.InteropServices.xml", + "ref/netcore50/System.Runtime.InteropServices.dll", + "ref/netcore50/System.Runtime.InteropServices.xml", + "ref/netcore50/zh-hans/System.Runtime.InteropServices.xml", + "ref/netcore50/zh-hant/System.Runtime.InteropServices.xml", + "ref/win8/_._", + "ref/wpa81/_._", + "ref/xamarinios10/_._", + "ref/xamarinmac20/_._", + "System.Runtime.InteropServices.4.0.0.nupkg", + "System.Runtime.InteropServices.4.0.0.nupkg.sha512", + "System.Runtime.InteropServices.nuspec" + ] + }, + "System.Runtime.InteropServices/4.0.20": { + "type": "package", + "sha512": "ZgDyBYfEnjWoz/viS6VOswA6XOkDSH2DzgbpczbW50RywhnCgTl+w3JEvtAiOGyIh8cyx1NJq80jsNBSUr8Pig==", + "files": [ + "lib/DNXCore50/System.Runtime.InteropServices.dll", + "lib/MonoAndroid10/_._", + "lib/MonoTouch10/_._", + "lib/net46/_._", + "lib/netcore50/System.Runtime.InteropServices.dll", + "lib/xamarinios10/_._", + "lib/xamarinmac20/_._", + "ref/dotnet/de/System.Runtime.InteropServices.xml", + "ref/dotnet/es/System.Runtime.InteropServices.xml", + "ref/dotnet/fr/System.Runtime.InteropServices.xml", + "ref/dotnet/it/System.Runtime.InteropServices.xml", + "ref/dotnet/ja/System.Runtime.InteropServices.xml", + "ref/dotnet/ko/System.Runtime.InteropServices.xml", + "ref/dotnet/ru/System.Runtime.InteropServices.xml", + "ref/dotnet/System.Runtime.InteropServices.dll", + "ref/dotnet/System.Runtime.InteropServices.xml", + "ref/dotnet/zh-hans/System.Runtime.InteropServices.xml", + "ref/dotnet/zh-hant/System.Runtime.InteropServices.xml", + "ref/MonoAndroid10/_._", + "ref/MonoTouch10/_._", + "ref/net46/_._", + "ref/xamarinios10/_._", + "ref/xamarinmac20/_._", + "runtimes/win8-aot/lib/netcore50/System.Runtime.InteropServices.dll", + "System.Runtime.InteropServices.4.0.20.nupkg", + "System.Runtime.InteropServices.4.0.20.nupkg.sha512", + "System.Runtime.InteropServices.nuspec" + ] + }, + "System.Runtime.Serialization.Primitives/4.0.10-beta-23109": { + "type": "package", + "sha512": "Tr4N2JNBj0sM+apa0tMBXTI0yg8wlrWsiDHFtuPFQL+yiutGCgS8fyUcAYNUp4HccsD6leGT8D1N8+0/eMJQSQ==", + "files": [ + "lib/dotnet/System.Runtime.Serialization.Primitives.dll", + "lib/net46/_._", + "ref/dotnet/de/System.Runtime.Serialization.Primitives.xml", + "ref/dotnet/es/System.Runtime.Serialization.Primitives.xml", + "ref/dotnet/fr/System.Runtime.Serialization.Primitives.xml", + "ref/dotnet/it/System.Runtime.Serialization.Primitives.xml", + "ref/dotnet/ja/System.Runtime.Serialization.Primitives.xml", + "ref/dotnet/ko/System.Runtime.Serialization.Primitives.xml", + "ref/dotnet/ru/System.Runtime.Serialization.Primitives.xml", + "ref/dotnet/System.Runtime.Serialization.Primitives.dll", + "ref/dotnet/System.Runtime.Serialization.Primitives.xml", + "ref/dotnet/zh-hans/System.Runtime.Serialization.Primitives.xml", + "ref/dotnet/zh-hant/System.Runtime.Serialization.Primitives.xml", + "ref/net46/_._", + "System.Runtime.Serialization.Primitives.4.0.10-beta-23109.nupkg", + "System.Runtime.Serialization.Primitives.4.0.10-beta-23109.nupkg.sha512", + "System.Runtime.Serialization.Primitives.nuspec" + ] + }, + "System.Security.Cryptography.Algorithms/4.0.0-beta-23225": { + "type": "package", + "sha512": "GjeU8uLxWigoEEtfD2aBtPLmskJNnanmUwNSNKdHHzIuGr69hwmSsaVA1IDJSQ1dX5WQY32v1O1MROCfEq/Seg==", + "files": [ + "lib/DNXCore50/System.Security.Cryptography.Algorithms.dll", + "lib/MonoAndroid10/_._", + "lib/MonoTouch10/_._", + "lib/net46/System.Security.Cryptography.Algorithms.dll", + "lib/xamarinios10/_._", + "lib/xamarinmac20/_._", + "ref/dotnet/System.Security.Cryptography.Algorithms.dll", + "ref/MonoAndroid10/_._", + "ref/MonoTouch10/_._", + "ref/net46/System.Security.Cryptography.Algorithms.dll", + "ref/xamarinios10/_._", + "ref/xamarinmac20/_._", + "System.Security.Cryptography.Algorithms.4.0.0-beta-23225.nupkg", + "System.Security.Cryptography.Algorithms.4.0.0-beta-23225.nupkg.sha512", + "System.Security.Cryptography.Algorithms.nuspec" + ] + }, + "System.Security.Cryptography.Encoding/4.0.0-beta-23225": { + "type": "package", + "sha512": "cnSypPb7Xhng+D/VRZyu+bPvQ4+ShNt6epFmFRfYRPqmRcVHdYxuIOVTPSvbqkIz56IeYW8xOwkuOU7YdRUzbg==", + "files": [ + "lib/DNXCore50/System.Security.Cryptography.Encoding.dll", + "lib/MonoAndroid10/_._", + "lib/MonoTouch10/_._", + "lib/net46/System.Security.Cryptography.Encoding.dll", + "lib/xamarinios10/_._", + "lib/xamarinmac20/_._", + "ref/dotnet/System.Security.Cryptography.Encoding.dll", + "ref/MonoAndroid10/_._", + "ref/MonoTouch10/_._", + "ref/net46/System.Security.Cryptography.Encoding.dll", + "ref/xamarinios10/_._", + "ref/xamarinmac20/_._", + "System.Security.Cryptography.Encoding.4.0.0-beta-23225.nupkg", + "System.Security.Cryptography.Encoding.4.0.0-beta-23225.nupkg.sha512", + "System.Security.Cryptography.Encoding.nuspec" + ] + }, + "System.Security.Cryptography.Primitives/4.0.0-beta-23225": { + "type": "package", + "sha512": "acOPCfkrkOFr/NAnA+hIOnY8yQZr94JzJ02heQDIqE0sFKyBITLbgQBoO+gTBVRxGr1o+oiYbFnXY0Q30+SACg==", + "files": [ + "lib/DNXCore50/System.Security.Cryptography.Primitives.dll", + "lib/MonoAndroid10/_._", + "lib/MonoTouch10/_._", + "lib/net46/System.Security.Cryptography.Primitives.dll", + "lib/xamarinios10/_._", + "lib/xamarinmac20/_._", + "ref/dotnet/System.Security.Cryptography.Primitives.dll", + "ref/MonoAndroid10/_._", + "ref/MonoTouch10/_._", + "ref/net46/System.Security.Cryptography.Primitives.dll", + "ref/xamarinios10/_._", + "ref/xamarinmac20/_._", + "System.Security.Cryptography.Primitives.4.0.0-beta-23225.nupkg", + "System.Security.Cryptography.Primitives.4.0.0-beta-23225.nupkg.sha512", + "System.Security.Cryptography.Primitives.nuspec" + ] + }, + "System.Security.Cryptography.X509Certificates/4.0.0-beta-23225": { + "type": "package", + "sha512": "B1IFymfZHmgOprkMBkFQBdU7ctcG4IZe07v6p2HjVJMwWoRULJtpFAosDOhWpi5cNbjW6SvuiJAfxApQ0t99YA==", + "files": [ + "lib/DNXCore50/System.Security.Cryptography.X509Certificates.dll", + "lib/MonoAndroid10/_._", + "lib/MonoTouch10/_._", + "lib/net46/System.Security.Cryptography.X509Certificates.dll", + "lib/xamarinios10/_._", + "lib/xamarinmac20/_._", + "ref/dotnet/System.Security.Cryptography.X509Certificates.dll", + "ref/MonoAndroid10/_._", + "ref/MonoTouch10/_._", + "ref/net46/System.Security.Cryptography.X509Certificates.dll", + "ref/xamarinios10/_._", + "ref/xamarinmac20/_._", + "System.Security.Cryptography.X509Certificates.4.0.0-beta-23225.nupkg", + "System.Security.Cryptography.X509Certificates.4.0.0-beta-23225.nupkg.sha512", + "System.Security.Cryptography.X509Certificates.nuspec" + ] + }, + "System.Text.Encoding/4.0.0": { + "type": "package", + "sha512": "AMxFNOXpA6Ab8swULbXuJmoT2K5w6TnV3ObF5wsmEcIHQUJghoZtDVfVHb08O2wW15mOSI1i9Wg0Dx0pY13o8g==", + "files": [ + "lib/MonoAndroid10/_._", + "lib/MonoTouch10/_._", + "lib/net45/_._", + "lib/win8/_._", + "lib/wp80/_._", + "lib/wpa81/_._", + "lib/xamarinios10/_._", + "lib/xamarinmac20/_._", + "License.rtf", + "ref/dotnet/de/System.Text.Encoding.xml", + "ref/dotnet/es/System.Text.Encoding.xml", + "ref/dotnet/fr/System.Text.Encoding.xml", + "ref/dotnet/it/System.Text.Encoding.xml", + "ref/dotnet/ja/System.Text.Encoding.xml", + "ref/dotnet/ko/System.Text.Encoding.xml", + "ref/dotnet/ru/System.Text.Encoding.xml", + "ref/dotnet/System.Text.Encoding.dll", + "ref/dotnet/System.Text.Encoding.xml", + "ref/dotnet/zh-hans/System.Text.Encoding.xml", + "ref/dotnet/zh-hant/System.Text.Encoding.xml", + "ref/MonoAndroid10/_._", + "ref/MonoTouch10/_._", + "ref/net45/_._", + "ref/netcore50/de/System.Text.Encoding.xml", + "ref/netcore50/es/System.Text.Encoding.xml", + "ref/netcore50/fr/System.Text.Encoding.xml", + "ref/netcore50/it/System.Text.Encoding.xml", + "ref/netcore50/ja/System.Text.Encoding.xml", + "ref/netcore50/ko/System.Text.Encoding.xml", + "ref/netcore50/ru/System.Text.Encoding.xml", + "ref/netcore50/System.Text.Encoding.dll", + "ref/netcore50/System.Text.Encoding.xml", + "ref/netcore50/zh-hans/System.Text.Encoding.xml", + "ref/netcore50/zh-hant/System.Text.Encoding.xml", + "ref/win8/_._", + "ref/wp80/_._", + "ref/wpa81/_._", + "ref/xamarinios10/_._", + "ref/xamarinmac20/_._", + "System.Text.Encoding.4.0.0.nupkg", + "System.Text.Encoding.4.0.0.nupkg.sha512", + "System.Text.Encoding.nuspec" + ] + }, + "System.Text.Encoding.Extensions/4.0.0": { + "type": "package", + "sha512": "FktA77+2DC0S5oRhgM569pbzFrcA45iQpYiI7+YKl68B6TfI2N5TQbXqSWlh2YXKoFXHi2RFwPMha2lxiFJZ6A==", + "files": [ + "lib/MonoAndroid10/_._", + "lib/MonoTouch10/_._", + "lib/net45/_._", + "lib/win8/_._", + "lib/wp80/_._", + "lib/wpa81/_._", + "lib/xamarinios10/_._", + "lib/xamarinmac20/_._", + "License.rtf", + "ref/dotnet/de/System.Text.Encoding.Extensions.xml", + "ref/dotnet/es/System.Text.Encoding.Extensions.xml", + "ref/dotnet/fr/System.Text.Encoding.Extensions.xml", + "ref/dotnet/it/System.Text.Encoding.Extensions.xml", + "ref/dotnet/ja/System.Text.Encoding.Extensions.xml", + "ref/dotnet/ko/System.Text.Encoding.Extensions.xml", + "ref/dotnet/ru/System.Text.Encoding.Extensions.xml", + "ref/dotnet/System.Text.Encoding.Extensions.dll", + "ref/dotnet/System.Text.Encoding.Extensions.xml", + "ref/dotnet/zh-hans/System.Text.Encoding.Extensions.xml", + "ref/dotnet/zh-hant/System.Text.Encoding.Extensions.xml", + "ref/MonoAndroid10/_._", + "ref/MonoTouch10/_._", + "ref/net45/_._", + "ref/netcore50/de/System.Text.Encoding.Extensions.xml", + "ref/netcore50/es/System.Text.Encoding.Extensions.xml", + "ref/netcore50/fr/System.Text.Encoding.Extensions.xml", + "ref/netcore50/it/System.Text.Encoding.Extensions.xml", + "ref/netcore50/ja/System.Text.Encoding.Extensions.xml", + "ref/netcore50/ko/System.Text.Encoding.Extensions.xml", + "ref/netcore50/ru/System.Text.Encoding.Extensions.xml", + "ref/netcore50/System.Text.Encoding.Extensions.dll", + "ref/netcore50/System.Text.Encoding.Extensions.xml", + "ref/netcore50/zh-hans/System.Text.Encoding.Extensions.xml", + "ref/netcore50/zh-hant/System.Text.Encoding.Extensions.xml", + "ref/win8/_._", + "ref/wp80/_._", + "ref/wpa81/_._", + "ref/xamarinios10/_._", + "ref/xamarinmac20/_._", + "System.Text.Encoding.Extensions.4.0.0.nupkg", + "System.Text.Encoding.Extensions.4.0.0.nupkg.sha512", + "System.Text.Encoding.Extensions.nuspec" + ] + }, + "System.Text.RegularExpressions/4.0.10-beta-23109": { + "type": "package", + "sha512": "F+tfzB34KhZnjS3ActdL57CQVD9oYGhAZm8AREL+KvbJTh1Y8/tlyCyI2xoH6KdJzIPcXC50yu4OkHNbpcutEw==", + "files": [ + "lib/dotnet/System.Text.RegularExpressions.dll", + "lib/net46/_._", + "ref/dotnet/de/System.Text.RegularExpressions.xml", + "ref/dotnet/es/System.Text.RegularExpressions.xml", + "ref/dotnet/fr/System.Text.RegularExpressions.xml", + "ref/dotnet/it/System.Text.RegularExpressions.xml", + "ref/dotnet/ja/System.Text.RegularExpressions.xml", + "ref/dotnet/ko/System.Text.RegularExpressions.xml", + "ref/dotnet/ru/System.Text.RegularExpressions.xml", + "ref/dotnet/System.Text.RegularExpressions.dll", + "ref/dotnet/System.Text.RegularExpressions.xml", + "ref/dotnet/zh-hans/System.Text.RegularExpressions.xml", + "ref/dotnet/zh-hant/System.Text.RegularExpressions.xml", + "ref/net46/_._", + "System.Text.RegularExpressions.4.0.10-beta-23109.nupkg", + "System.Text.RegularExpressions.4.0.10-beta-23109.nupkg.sha512", + "System.Text.RegularExpressions.nuspec" + ] + }, + "System.Threading/4.0.0": { + "type": "package", + "sha512": "H6O/9gUrjPDNYanh/7OFGAZHjVXvEuITD0RcnjfvIV04HOGrOPqUBU0kmz9RIX/7YGgCQn1o1S2DX6Cuv8kVGQ==", + "files": [ + "lib/MonoAndroid10/_._", + "lib/MonoTouch10/_._", + "lib/net45/_._", + "lib/win8/_._", + "lib/wp80/_._", + "lib/wpa81/_._", + "lib/xamarinios10/_._", + "lib/xamarinmac20/_._", + "License.rtf", + "ref/dotnet/de/System.Threading.xml", + "ref/dotnet/es/System.Threading.xml", + "ref/dotnet/fr/System.Threading.xml", + "ref/dotnet/it/System.Threading.xml", + "ref/dotnet/ja/System.Threading.xml", + "ref/dotnet/ko/System.Threading.xml", + "ref/dotnet/ru/System.Threading.xml", + "ref/dotnet/System.Threading.dll", + "ref/dotnet/System.Threading.xml", + "ref/dotnet/zh-hans/System.Threading.xml", + "ref/dotnet/zh-hant/System.Threading.xml", + "ref/MonoAndroid10/_._", + "ref/MonoTouch10/_._", + "ref/net45/_._", + "ref/netcore50/de/System.Threading.xml", + "ref/netcore50/es/System.Threading.xml", + "ref/netcore50/fr/System.Threading.xml", + "ref/netcore50/it/System.Threading.xml", + "ref/netcore50/ja/System.Threading.xml", + "ref/netcore50/ko/System.Threading.xml", + "ref/netcore50/ru/System.Threading.xml", + "ref/netcore50/System.Threading.dll", + "ref/netcore50/System.Threading.xml", + "ref/netcore50/zh-hans/System.Threading.xml", + "ref/netcore50/zh-hant/System.Threading.xml", + "ref/win8/_._", + "ref/wp80/_._", + "ref/wpa81/_._", + "ref/xamarinios10/_._", + "ref/xamarinmac20/_._", + "System.Threading.4.0.0.nupkg", + "System.Threading.4.0.0.nupkg.sha512", + "System.Threading.nuspec" + ] + }, + "System.Threading/4.0.10": { + "type": "package", + "sha512": "0w6pRxIEE7wuiOJeKabkDgeIKmqf4ER1VNrs6qFwHnooEE78yHwi/bKkg5Jo8/pzGLm0xQJw0nEmPXt1QBAIUA==", + "files": [ + "lib/DNXCore50/System.Threading.dll", + "lib/MonoAndroid10/_._", + "lib/MonoTouch10/_._", + "lib/net46/_._", + "lib/netcore50/System.Threading.dll", + "lib/xamarinios10/_._", + "lib/xamarinmac20/_._", + "ref/dotnet/de/System.Threading.xml", + "ref/dotnet/es/System.Threading.xml", + "ref/dotnet/fr/System.Threading.xml", + "ref/dotnet/it/System.Threading.xml", + "ref/dotnet/ja/System.Threading.xml", + "ref/dotnet/ko/System.Threading.xml", + "ref/dotnet/ru/System.Threading.xml", + "ref/dotnet/System.Threading.dll", + "ref/dotnet/System.Threading.xml", + "ref/dotnet/zh-hans/System.Threading.xml", + "ref/dotnet/zh-hant/System.Threading.xml", + "ref/MonoAndroid10/_._", + "ref/MonoTouch10/_._", + "ref/net46/_._", + "ref/xamarinios10/_._", + "ref/xamarinmac20/_._", + "runtimes/win8-aot/lib/netcore50/System.Threading.dll", + "System.Threading.4.0.10.nupkg", + "System.Threading.4.0.10.nupkg.sha512", + "System.Threading.nuspec" + ] + }, + "System.Threading.Tasks/4.0.10-beta-23109": { + "type": "package", + "sha512": "J7lBqLLy1A85KZUgFz0fS8u6BO+pnWun2OAkrNqcUQbgCiKDC9v92u9q6a6AB9znum+GCdzH3cyEftNtIVNd1Q==", + "files": [ + "lib/DNXCore50/System.Threading.Tasks.dll", + "lib/net46/_._", + "lib/netcore50/System.Threading.Tasks.dll", + "ref/dotnet/de/System.Threading.Tasks.xml", + "ref/dotnet/es/System.Threading.Tasks.xml", + "ref/dotnet/fr/System.Threading.Tasks.xml", + "ref/dotnet/it/System.Threading.Tasks.xml", + "ref/dotnet/ja/System.Threading.Tasks.xml", + "ref/dotnet/ko/System.Threading.Tasks.xml", + "ref/dotnet/ru/System.Threading.Tasks.xml", + "ref/dotnet/System.Threading.Tasks.dll", + "ref/dotnet/System.Threading.Tasks.xml", + "ref/dotnet/zh-hans/System.Threading.Tasks.xml", + "ref/dotnet/zh-hant/System.Threading.Tasks.xml", + "ref/net46/_._", + "runtimes/win8-aot/lib/netcore50/System.Threading.Tasks.dll", + "System.Threading.Tasks.4.0.10-beta-23109.nupkg", + "System.Threading.Tasks.4.0.10-beta-23109.nupkg.sha512", + "System.Threading.Tasks.nuspec" + ] + } + }, + "projectFileDependencyGroups": { + "": [ + "Newtonsoft.Json >= 8.0.2" + ], + "DNX,Version=v4.5.1": [ + "Microsoft.CSharp >= 4.0.1-beta-23409", + "Microsoft.CodeAnalysis >= 1.1.1", + "AsciiDoc >= 1.0.0-*", + "fx/System.Runtime ", + "fx/System.Runtime.Serialization ", + "fx/System.Threading.Tasks ", + "fx/System.Text.Encoding ", + "fx/System.IO " + ], + ".NETPlatform,Version=v5.1": [ + "System.Runtime >= 4.0.21-beta-23225", + "System.Collections >= 4.0.11-beta-23225", + "System.Reflection >= 4.1.0-beta-23225", + "System.Collections.Specialized >= 4.0.0-beta-23109", + "System.Linq >= 4.0.0-beta-23109", + "System.IO >= 4.0.0-beta-23109", + "System.IO.FileSystem >= 4.0.0-beta-23109", + "System.IO.Compression >= 4.0.0-beta-23109", + "System.Runtime.Serialization.Primitives >= 4.0.10-beta-23109", + "System.Text.RegularExpressions >= 4.0.10-beta-23109", + "System.Collections.Concurrent >= 4.0.10-beta-23109", + "System.Reflection.Extensions >= 4.0.0-beta-23109", + "System.Reflection.TypeExtensions >= 4.0.0-beta-23109", + "System.Reflection.Metadata >= 1.1.0-alpha-00009", + "System.Reflection.Primitives >= 4.0.0-beta-23109", + "System.Linq.Expressions >= 4.0.10-beta-23109", + "System.Dynamic.Runtime >= 4.0.11-beta-23225", + "Microsoft.CSharp >= 4.0.1-beta-23409", + "Microsoft.CodeAnalysis >= 1.1.1", + "System.Security.Cryptography.Encoding >= 4.0.0-beta-23225", + "System.Security.Cryptography.X509Certificates >= 4.0.0-beta-23225", + "System.ComponentModel.TypeConverter >= 4.0.0-beta-23109", + "System.Net.Http >= 4.0.1-beta-23225" + ] + } +} \ No newline at end of file diff --git a/src/Tests/Aggregations/Bucket/Children/ChildrenAggregationMapping.doc.cs b/src/Tests/Aggregations/Bucket/Children/ChildrenAggregationMapping.doc.cs index c17558c70db..10a5b6ac3b7 100644 --- a/src/Tests/Aggregations/Bucket/Children/ChildrenAggregationMapping.doc.cs +++ b/src/Tests/Aggregations/Bucket/Children/ChildrenAggregationMapping.doc.cs @@ -3,6 +3,7 @@ namespace Tests.Aggregations.Bucket.Children { + /** == Child Aggregation Mapping */ public class ChildrenAggregationMapping { private void MappingExample() @@ -12,10 +13,10 @@ private void MappingExample() * index with two mapped types, `project` and `commitactivity` and * we add a `_parent` mapping from `commitactivity` to `parent` */ var createProjectIndex = TestClient.GetClient().CreateIndex(typeof(Project), c => c - .Mappings(map=>map - .Map(m=>m.AutoMap()) - .Map(m=>m - .Parent() + .Mappings(map => map + .Map(tm => tm.AutoMap()) + .Map(tm => tm + .Parent() //<1> Set the parent of `CommitActivity` to the `Project` type ) ) ); diff --git a/src/Tests/Aggregations/Bucket/Children/ChildrenAggregationUsageTests.cs b/src/Tests/Aggregations/Bucket/Children/ChildrenAggregationUsageTests.cs index 29dc4b93ef0..4d053f39840 100644 --- a/src/Tests/Aggregations/Bucket/Children/ChildrenAggregationUsageTests.cs +++ b/src/Tests/Aggregations/Bucket/Children/ChildrenAggregationUsageTests.cs @@ -10,9 +10,8 @@ namespace Tests.Aggregations.Bucket.Children * A special single bucket aggregation that enables aggregating from buckets on parent document types to * buckets on child documents. * - * Be sure to read the elasticsearch documentation {ref}/search-aggregations-bucket-children-aggregation.html[on this subject here] + * Be sure to read {ref_current}/search-aggregations-bucket-children-aggregation.html[the elasticsearch documentation on Children Aggregation] */ - public class ChildrenAggregationUsageTests : AggregationUsageTestBase { public ChildrenAggregationUsageTests(ReadOnlyCluster i, EndpointUsage usage) : base(i, usage) { } @@ -60,24 +59,4 @@ public ChildrenAggregationUsageTests(ReadOnlyCluster i, EndpointUsage usage) : b } }; } - - - // TODO : move this to a general documentation test explaining how to - // combine aggregations using boolean operators? - - public class ChildrenAggregationDslUsage : ChildrenAggregationUsageTests - { - public ChildrenAggregationDslUsage(ReadOnlyCluster i, EndpointUsage usage) : base(i, usage) { } - - protected override SearchRequest Initializer => - new SearchRequest - { - Aggregations = new ChildrenAggregation("name_of_child_agg", typeof(CommitActivity)) - { - Aggregations = - new AverageAggregation("average_per_child", Field(p => p.ConfidenceFactor)) - && new MaxAggregation("max_per_child", Field(p => p.ConfidenceFactor)) - } - }; - } } diff --git a/src/Tests/Aggregations/Bucket/DateHistogram/DateHistogramAggregationUsageTests.cs b/src/Tests/Aggregations/Bucket/DateHistogram/DateHistogramAggregationUsageTests.cs index c160bbea132..81eb5c1c0f3 100644 --- a/src/Tests/Aggregations/Bucket/DateHistogram/DateHistogramAggregationUsageTests.cs +++ b/src/Tests/Aggregations/Bucket/DateHistogram/DateHistogramAggregationUsageTests.cs @@ -13,11 +13,11 @@ namespace Tests.Aggregations.Bucket.DateHistogram * From a functionality perspective, this histogram supports the same features as the normal histogram. * The main difference is that the interval can be specified by date/time expressions. * - * When both format and extended_bounds are specified, the `date_optional_time` format is included - * as part of the format value so that Elasticsearch to be able to parse - * the serialized DateTimes of extended_bounds correctly. + * NOTE: When specifying a `format` **and** `extended_bounds`, in order for Elasticsearch to be able to parse + * the serialized ``DateTime`` of `extended_bounds` correctly, the `date_optional_time` format is included + * as part of the `format` value. * - * Be sure to read the elasticsearch documentation {ref}/search-aggregations-bucket-datehistogram-aggregation.html[on this subject here] + * Be sure to read the elasticsearch documentation on {ref_current}/search-aggregations-bucket-datehistogram-aggregation.html[Date Histogram Aggregation]. */ public class DateHistogramAggregationUsageTests : AggregationUsageTestBase { @@ -35,7 +35,7 @@ public DateHistogramAggregationUsageTests(ReadOnlyCluster i, EndpointUsage usage field = "startedOn", interval = "month", min_doc_count = 2, - format = "yyyy-MM-dd'T'HH:mm:ss||date_optional_time", + format = "yyyy-MM-dd'T'HH:mm:ss||date_optional_time", //<1> Note the inclusion of `date_optional_time` to `format` order = new { _count = "asc" }, extended_bounds = new { @@ -117,12 +117,12 @@ public DateHistogramAggregationUsageTests(ReadOnlyCluster i, EndpointUsage usage protected override void ExpectResponse(ISearchResponse response) { + /** === Handling responses + * Using the `.Aggs` aggregation helper on `ISearchResponse`, we can fetch our aggregation results easily + * in the correct type. <> + */ response.IsValid.Should().BeTrue(); - /** - * Using the `.Agg` aggregation helper we can fetch our aggregation results easily - * in the correct type. [Be sure to read more about `.Agg` vs `.Aggregations` on the response here]() - */ var dateHistogram = response.Aggs.DateHistogram("projects_started_per_month"); dateHistogram.Should().NotBeNull(); dateHistogram.Buckets.Should().NotBeNull(); diff --git a/src/Tests/Aggregations/Bucket/DateRange/DateRangeAggregationUsageTests.cs b/src/Tests/Aggregations/Bucket/DateRange/DateRangeAggregationUsageTests.cs index ea3ad958bae..9fb785b9f55 100644 --- a/src/Tests/Aggregations/Bucket/DateRange/DateRangeAggregationUsageTests.cs +++ b/src/Tests/Aggregations/Bucket/DateRange/DateRangeAggregationUsageTests.cs @@ -10,10 +10,12 @@ namespace Tests.Aggregations.Bucket.DateRange { /** * A range aggregation that is dedicated for date values. The main difference between this aggregation and the normal range aggregation is that the `from` - * and `to` values can be expressed in Date Math expressions, and it is also possible to specify a date format by which the from and to response fields will be returned. - * Note that this aggregation includes the from value and excludes the to value for each range. + * and `to` values can be expressed in `DateMath` expressions, and it is also possible to specify a date format by which the from and + * to response fields will be returned. * - * Be sure to read the elasticsearch documentation {ref}/search-aggregations-bucket-daterange-aggregation.html[on this subject here] + * IMPORTANT: this aggregation includes the `from` value and excludes the `to` value for each range. + * + * Be sure to read {ref_current}/search-aggregations-bucket-daterange-aggregation.html[the elasticsearch documentation on Date Range Aggregation] */ public class DateRangeAggregationUsageTests : AggregationUsageTestBase { @@ -30,9 +32,9 @@ public DateRangeAggregationUsageTests(ReadOnlyCluster i, EndpointUsage usage) : field = "startedOn", ranges = new object[] { - new { to = "now", from = "2015-06-06T12:01:02.123||+2d" }, - new { to = "now+1d-30m/h" }, - new { from = "2012-05-05||+1d-1m" }, + new { to = "now", from = "2015-06-06T12:01:02.123||+2d" }, + new { to = "now+1d-30m/h" }, + new { from = "2012-05-05||+1d-1m" }, } }, aggs = new @@ -66,9 +68,9 @@ public DateRangeAggregationUsageTests(ReadOnlyCluster i, EndpointUsage usage) : Field = Field(p => p.StartedOn), Ranges = new List { - {new DateRangeExpression { From = DateMath.Anchored(FixedDate).Add("2d"), To = DateMath.Now} }, - {new DateRangeExpression { To = DateMath.Now.Add(TimeSpan.FromDays(1)).Subtract("30m").RoundTo(TimeUnit.Hour) } }, - {new DateRangeExpression { From = DateMath.Anchored("2012-05-05").Add(TimeSpan.FromDays(1)).Subtract("1m") } } + new DateRangeExpression { From = DateMath.Anchored(FixedDate).Add("2d"), To = DateMath.Now}, + new DateRangeExpression { To = DateMath.Now.Add(TimeSpan.FromDays(1)).Subtract("30m").RoundTo(TimeUnit.Hour) }, + new DateRangeExpression { From = DateMath.Anchored("2012-05-05").Add(TimeSpan.FromDays(1)).Subtract("1m") } }, Aggregations = new TermsAggregation("project_tags") { Field = Field(p => p.Tags) } @@ -77,17 +79,17 @@ public DateRangeAggregationUsageTests(ReadOnlyCluster i, EndpointUsage usage) : protected override void ExpectResponse(ISearchResponse response) { - response.IsValid.Should().BeTrue(); - - /** + /** === Handling Responses * Using the `.Agg` aggregation helper we can fetch our aggregation results easily - * in the correct type. [Be sure to read more about `.Agg` vs `.Aggregations` on the response here]() + * in the correct type. <> */ + response.IsValid.Should().BeTrue(); + var dateHistogram = response.Aggs.DateRange("projects_date_ranges"); dateHistogram.Should().NotBeNull(); dateHistogram.Buckets.Should().NotBeNull(); - /** We specified three ranges so we expect to three of them in the response */ + /** We specified three ranges so we expect to have three of them in the response */ dateHistogram.Buckets.Count.Should().Be(3); foreach (var item in dateHistogram.Buckets) { diff --git a/src/Tests/Aggregations/Bucket/Filter/FilterAggregationUsageTests.cs b/src/Tests/Aggregations/Bucket/Filter/FilterAggregationUsageTests.cs index bd491dff198..15d0731c5ea 100644 --- a/src/Tests/Aggregations/Bucket/Filter/FilterAggregationUsageTests.cs +++ b/src/Tests/Aggregations/Bucket/Filter/FilterAggregationUsageTests.cs @@ -13,9 +13,8 @@ namespace Tests.Aggregations.Bucket.Filter * Defines a single bucket of all the documents in the current document set context that match a specified filter. * Often this will be used to narrow down the current aggregation context to a specific set of documents. * - * Be sure to read the elasticsearch documentation {ref}/search-aggregations-bucket-filter-aggregation.html[on this subject here] + * Be sure to read the Elasticsearch documentation on {ref_current}/search-aggregations-bucket-filter-aggregation.html[Filter Aggregation] */ - public class FilterAggregationUsageTests : AggregationUsageTestBase { public FilterAggregationUsageTests(ReadOnlyCluster i, EndpointUsage usage) : base(i, usage) @@ -68,12 +67,12 @@ public FilterAggregationUsageTests(ReadOnlyCluster i, EndpointUsage usage) : bas protected override void ExpectResponse(ISearchResponse response) { + /** === Handling Responses + * Using the `.Aggs` aggregation helper we can fetch our aggregation results easily + * in the correct type. <> + */ response.IsValid.Should().BeTrue(); - /** - * Using the `.Agg` aggregation helper we can fetch our aggregation results easily - * in the correct type. [Be sure to read more about `.Agg` vs `.Aggregations` on the response here]() - */ var filterAgg = response.Aggs.Filter("bethels_projects"); filterAgg.Should().NotBeNull(); filterAgg.DocCount.Should().BeGreaterThan(0); @@ -83,6 +82,11 @@ protected override void ExpectResponse(ISearchResponse response) } } + /**[float] + * == Empty Filter + * When the collection of filters is empty or all are conditionless, NEST will serialize them + * to an empty object. + */ public class EmptyFilterAggregationUsageTests : AggregationUsageTestBase { public EmptyFilterAggregationUsageTests(ReadOnlyCluster i, EndpointUsage usage) : base(i, usage) { } @@ -125,10 +129,13 @@ protected override void ExpectResponse(ISearchResponse response) } } + /**[float] + * == Inline Script Filter + */ //reproduce of https://github.com/elastic/elasticsearch-net/issues/1931 public class InlineScriptFilterAggregationUsageTests : AggregationUsageTestBase { - private string _ctxNumberofcommits = "_source.numberOfCommits > 0"; + private string _ctxNumberofCommits = "_source.numberOfCommits > 0"; private string _aggName = "script_filter"; public InlineScriptFilterAggregationUsageTests(ReadOnlyCluster i, EndpointUsage usage) : base(i, usage) { } @@ -140,7 +147,7 @@ public InlineScriptFilterAggregationUsageTests(ReadOnlyCluster i, EndpointUsage filter = new { script = new { script = new { - inline = _ctxNumberofcommits + inline = _ctxNumberofCommits } } } @@ -153,7 +160,7 @@ public InlineScriptFilterAggregationUsageTests(ReadOnlyCluster i, EndpointUsage .Filter(_aggName, date => date .Filter(f => f .Script(b => b - .Inline(_ctxNumberofcommits) + .Inline(_ctxNumberofCommits) ) ) ) @@ -166,7 +173,7 @@ public InlineScriptFilterAggregationUsageTests(ReadOnlyCluster i, EndpointUsage { Filter = new ScriptQuery { - Inline = _ctxNumberofcommits + Inline = _ctxNumberofCommits } } }; diff --git a/src/Tests/Aggregations/Bucket/Filters/FiltersAggregationUsageTests.cs b/src/Tests/Aggregations/Bucket/Filters/FiltersAggregationUsageTests.cs index efa0fd0fa9d..327b92657f9 100644 --- a/src/Tests/Aggregations/Bucket/Filters/FiltersAggregationUsageTests.cs +++ b/src/Tests/Aggregations/Bucket/Filters/FiltersAggregationUsageTests.cs @@ -9,13 +9,15 @@ namespace Tests.Aggregations.Bucket.Filters { - /** == Named filters - * + /** * Defines a multi bucket aggregations where each bucket is associated with a filter. * Each bucket will collect all documents that match its associated filter. For documents - * that do not match any filter, these will be collected in the other bucket. + * that do not match any filter, these will be collected in the _other bucket_. * - * Be sure to read the elasticsearch documentation {ref}/search-aggregations-bucket-filters-aggregation.html[on this subject here] + * Be sure to read the Elasticsearch documentation on {ref_current}/search-aggregations-bucket-filters-aggregation.html[Filters Aggregation]. + */ + /**[float] + * == Named filters */ public class FiltersAggregationUsageTests : AggregationUsageTestBase { @@ -71,9 +73,9 @@ public FiltersAggregationUsageTests(ReadOnlyCluster i, EndpointUsage usage) : ba OtherBucketKey = "other_states_of_being", Filters = new NamedFiltersContainer { - { "belly_up", Query.Term(p=>p.State, StateOfBeing.BellyUp) }, - { "stable", Query.Term(p=>p.State, StateOfBeing.Stable) }, - { "very_active", Query.Term(p=>p.State, StateOfBeing.VeryActive) } + { "belly_up", Query.Term(p=>p.State, StateOfBeing.BellyUp) }, + { "stable", Query.Term(p=>p.State, StateOfBeing.Stable) }, + { "very_active", Query.Term(p=>p.State, StateOfBeing.VeryActive) } }, Aggregations = new TermsAggregation("project_tags") { Field = Field(p => p.CuratedTags.First().Name) } @@ -82,12 +84,12 @@ public FiltersAggregationUsageTests(ReadOnlyCluster i, EndpointUsage usage) : ba protected override void ExpectResponse(ISearchResponse response) { - response.IsValid.Should().BeTrue(); - - /** + /** === Handling Responses * Using the `.Agg` aggregation helper we can fetch our aggregation results easily - * in the correct type. [Be sure to read more about `.Agg` vs `.Aggregations` on the response here]() + * in the correct type. <> */ + response.IsValid.Should().BeTrue(); + var filterAgg = response.Aggs.Filters("projects_by_state"); filterAgg.Should().NotBeNull(); @@ -109,7 +111,9 @@ protected override void ExpectResponse(ISearchResponse response) } } - /** == Anonymous filters **/ + /**[float] + *== Anonymous filters + */ public class AnonymousUsage : AggregationUsageTestBase { public AnonymousUsage(ReadOnlyCluster i, EndpointUsage usage) : base(i, usage) { } @@ -160,23 +164,26 @@ public AnonymousUsage(ReadOnlyCluster i, EndpointUsage usage) : base(i, usage) { OtherBucket = true, Filters = new List { - Query.Term(p=>p.State, StateOfBeing.BellyUp) , - Query.Term(p=>p.State, StateOfBeing.Stable) , - Query.Term(p=>p.State, StateOfBeing.VeryActive) + Query.Term(p=>p.State, StateOfBeing.BellyUp) , + Query.Term(p=>p.State, StateOfBeing.Stable) , + Query.Term(p=>p.State, StateOfBeing.VeryActive) }, Aggregations = - new TermsAggregation("project_tags") { Field = Field(p => p.CuratedTags.First().Name) } + new TermsAggregation("project_tags") + { + Field = Field(p => p.CuratedTags.First().Name) + } } }; protected override void ExpectResponse(ISearchResponse response) { - response.IsValid.Should().BeTrue(); - - /** + /** === Handling Responses * Using the `.Agg` aggregation helper we can fetch our aggregation results easily - * in the correct type. [Be sure to read more about `.Agg` vs `.Aggregations` on the response here]() + * in the correct type. <> */ + response.IsValid.Should().BeTrue(); + var filterAgg = response.Aggs.Filters("projects_by_state"); filterAgg.Should().NotBeNull(); var results = filterAgg.AnonymousBuckets(); @@ -187,10 +194,13 @@ protected override void ExpectResponse(ISearchResponse response) singleBucket.DocCount.Should().BeGreaterThan(0); } - results.Last().DocCount.Should().Be(0); + results.Last().DocCount.Should().Be(0); // <1> The last bucket is the _other bucket_ } } + /**[float] + * == Empty Filters + */ public class EmptyFiltersAggregationUsageTests : AggregationUsageTestBase { public EmptyFiltersAggregationUsageTests(ReadOnlyCluster i, EndpointUsage usage) : base(i, usage) { } @@ -232,6 +242,8 @@ protected override void ExpectResponse(ISearchResponse response) } } + /**[float] + * == Conditionless Filters */ public class ConditionlessFiltersAggregationUsageTests : AggregationUsageTestBase { public ConditionlessFiltersAggregationUsageTests(ReadOnlyCluster i, EndpointUsage usage) : base(i, usage) { } @@ -277,4 +289,4 @@ protected override void ExpectResponse(ISearchResponse response) response.Aggs.Filters("conditionless_filters").Buckets.Should().BeEmpty(); } } -} \ No newline at end of file +} diff --git a/src/Tests/Aggregations/Bucket/IpRange/IpRangeAggregationUsageTests.cs b/src/Tests/Aggregations/Bucket/IpRange/IpRangeAggregationUsageTests.cs index 5d0ade378c2..dce2f25f00b 100644 --- a/src/Tests/Aggregations/Bucket/IpRange/IpRangeAggregationUsageTests.cs +++ b/src/Tests/Aggregations/Bucket/IpRange/IpRangeAggregationUsageTests.cs @@ -20,7 +20,7 @@ public IpRangeAggregationUsageTests(ReadOnlyCluster i, EndpointUsage usage) : ba { ip_range = new { - field = Field(p => p.LeadDeveloper.IPAddress), + field = "leadDeveloper.iPAddress", ranges = new object[] { new { to = "10.0.0.5" }, diff --git a/src/Tests/Aggregations/Bucket/Range/RangeAggregationUsageTests.cs b/src/Tests/Aggregations/Bucket/Range/RangeAggregationUsageTests.cs index 3d5a405572c..491b95ed639 100644 --- a/src/Tests/Aggregations/Bucket/Range/RangeAggregationUsageTests.cs +++ b/src/Tests/Aggregations/Bucket/Range/RangeAggregationUsageTests.cs @@ -21,7 +21,7 @@ public RangeAggregationUsageTests(ReadOnlyCluster i, EndpointUsage usage) : base { range = new { - field = Field(p => p.NumberOfCommits), + field = "numberOfCommits", ranges = new object[] { new { to = 100.0 }, diff --git a/src/Tests/Aggregations/Bucket/ReverseNested/ReverseNestedAggregationUsageTests.cs b/src/Tests/Aggregations/Bucket/ReverseNested/ReverseNestedAggregationUsageTests.cs index c848cd88e7b..b375feaf0d7 100644 --- a/src/Tests/Aggregations/Bucket/ReverseNested/ReverseNestedAggregationUsageTests.cs +++ b/src/Tests/Aggregations/Bucket/ReverseNested/ReverseNestedAggregationUsageTests.cs @@ -40,7 +40,7 @@ public ReverseNestedAggregationUsageTests(ReadOnlyCluster i, EndpointUsage usage { terms = new { - field = Field(p => p.Name) + field = "name" } } } diff --git a/src/Tests/Aggregations/Bucket/SignificantTerms/SignificantTermsAggregationUsageTests.cs b/src/Tests/Aggregations/Bucket/SignificantTerms/SignificantTermsAggregationUsageTests.cs index 9c56525d42e..9ad94445552 100644 --- a/src/Tests/Aggregations/Bucket/SignificantTerms/SignificantTermsAggregationUsageTests.cs +++ b/src/Tests/Aggregations/Bucket/SignificantTerms/SignificantTermsAggregationUsageTests.cs @@ -19,7 +19,7 @@ public SignificantTermsAggregationUsageTests(ReadOnlyCluster i, EndpointUsage us { significant_terms = new { - field = Field(p => p.Name), + field = "name", min_doc_count = 10, mutual_information = new { diff --git a/src/Tests/Aggregations/Bucket/Terms/TermsAggregationUsageTests.cs b/src/Tests/Aggregations/Bucket/Terms/TermsAggregationUsageTests.cs index cdb7bb5082a..227917ddf8d 100644 --- a/src/Tests/Aggregations/Bucket/Terms/TermsAggregationUsageTests.cs +++ b/src/Tests/Aggregations/Bucket/Terms/TermsAggregationUsageTests.cs @@ -24,7 +24,7 @@ public TermsAggregationUsageTests(ReadOnlyCluster i, EndpointUsage usage) : base }, terms = new { - field = Field(p => p.State), + field = "state", min_doc_count = 2, size = 5, shard_size = 100, diff --git a/src/Tests/Aggregations/Metric/Average/AverageAggregationUsageTests.cs b/src/Tests/Aggregations/Metric/Average/AverageAggregationUsageTests.cs index 4f7d4f29670..bf7f41a07a8 100644 --- a/src/Tests/Aggregations/Metric/Average/AverageAggregationUsageTests.cs +++ b/src/Tests/Aggregations/Metric/Average/AverageAggregationUsageTests.cs @@ -12,6 +12,9 @@ public class AverageAggregationUsageTests : AggregationUsageTestBase { public AverageAggregationUsageTests(ReadOnlyCluster i, EndpointUsage usage) : base(i, usage) { } + /// + /// + /// protected override object ExpectJson => new { aggs = new @@ -24,7 +27,7 @@ public AverageAggregationUsageTests(ReadOnlyCluster i, EndpointUsage usage) : ba }, avg = new { - field = Field(p => p.NumberOfCommits), + field = "numberOfCommits", missing = 10.0, script = new { diff --git a/src/Tests/Aggregations/Metric/Cardinality/CardinalityAggregationUsageTests.cs b/src/Tests/Aggregations/Metric/Cardinality/CardinalityAggregationUsageTests.cs index ccdd2b391ac..b77cf6816b3 100644 --- a/src/Tests/Aggregations/Metric/Cardinality/CardinalityAggregationUsageTests.cs +++ b/src/Tests/Aggregations/Metric/Cardinality/CardinalityAggregationUsageTests.cs @@ -19,7 +19,7 @@ public CardinalityAggregationUsageTests(ReadOnlyCluster i, EndpointUsage usage) { cardinality = new { - field = Field(p => p.State), + field = "state", precision_threshold = 100 } } diff --git a/src/Tests/Aggregations/Metric/ExtendedStats/ExtendedStatsAggregationUsageTests.cs b/src/Tests/Aggregations/Metric/ExtendedStats/ExtendedStatsAggregationUsageTests.cs index eddd5140909..f20c7de7d0b 100644 --- a/src/Tests/Aggregations/Metric/ExtendedStats/ExtendedStatsAggregationUsageTests.cs +++ b/src/Tests/Aggregations/Metric/ExtendedStats/ExtendedStatsAggregationUsageTests.cs @@ -19,7 +19,7 @@ public ExtendedStatsAggregationUsageTests(ReadOnlyCluster i, EndpointUsage usage { extended_stats = new { - field = Field(p => p.NumberOfCommits) + field = "numberOfCommits" } } } diff --git a/src/Tests/Aggregations/Metric/GeoBounds/GeoBoundsAggregationUsageTests.cs b/src/Tests/Aggregations/Metric/GeoBounds/GeoBoundsAggregationUsageTests.cs index f5c90d1b911..03062bb15e9 100644 --- a/src/Tests/Aggregations/Metric/GeoBounds/GeoBoundsAggregationUsageTests.cs +++ b/src/Tests/Aggregations/Metric/GeoBounds/GeoBoundsAggregationUsageTests.cs @@ -19,7 +19,7 @@ public GeoBoundsAggregationUsageTests(ReadOnlyCluster i, EndpointUsage usage) : { geo_bounds = new { - field = Field(p => p.Location), + field = "location", wrap_longitude = true } } diff --git a/src/Tests/Aggregations/Metric/Max/MaxAggregationUsageTests.cs b/src/Tests/Aggregations/Metric/Max/MaxAggregationUsageTests.cs index 750d2b0b375..eebc4719a86 100644 --- a/src/Tests/Aggregations/Metric/Max/MaxAggregationUsageTests.cs +++ b/src/Tests/Aggregations/Metric/Max/MaxAggregationUsageTests.cs @@ -19,7 +19,7 @@ public MaxAggregationUsageTests(ReadOnlyCluster i, EndpointUsage usage) : base(i { max = new { - field = Field(p => p.NumberOfCommits) + field = "numberOfCommits" } } } diff --git a/src/Tests/Aggregations/Metric/Min/MinAggregationUsageTests.cs b/src/Tests/Aggregations/Metric/Min/MinAggregationUsageTests.cs index effd8911f3d..f915e938ff6 100644 --- a/src/Tests/Aggregations/Metric/Min/MinAggregationUsageTests.cs +++ b/src/Tests/Aggregations/Metric/Min/MinAggregationUsageTests.cs @@ -19,7 +19,7 @@ public MinAggregationUsageTests(ReadOnlyCluster i, EndpointUsage usage) : base(i { min = new { - field = Field(p => p.NumberOfCommits) + field = "numberOfCommits" } } } diff --git a/src/Tests/Aggregations/Metric/PercentileRanks/PercentileRanksAggregationUsageTests.cs b/src/Tests/Aggregations/Metric/PercentileRanks/PercentileRanksAggregationUsageTests.cs index 0fc35aa96ef..1137a591914 100644 --- a/src/Tests/Aggregations/Metric/PercentileRanks/PercentileRanksAggregationUsageTests.cs +++ b/src/Tests/Aggregations/Metric/PercentileRanks/PercentileRanksAggregationUsageTests.cs @@ -20,7 +20,7 @@ public PercentileRanksAggregationUsageTests(ReadOnlyCluster i, EndpointUsage usa { percentile_ranks = new { - field = Field(p => p.NumberOfCommits), + field = "numberOfCommits", values = new [] { 15.0, 30.0 }, tdigest = new { diff --git a/src/Tests/Aggregations/Metric/Percentiles/PercentilesAggregationUsageTests.cs b/src/Tests/Aggregations/Metric/Percentiles/PercentilesAggregationUsageTests.cs index b5084517d02..36e886201fd 100644 --- a/src/Tests/Aggregations/Metric/Percentiles/PercentilesAggregationUsageTests.cs +++ b/src/Tests/Aggregations/Metric/Percentiles/PercentilesAggregationUsageTests.cs @@ -19,7 +19,7 @@ public PercentilesAggregationUsageTests(ReadOnlyCluster i, EndpointUsage usage) { percentiles = new { - field = Field(p => p.NumberOfCommits), + field = "numberOfCommits", percents = new[] { 95.0, 99.0, 99.9 }, hdr = new { diff --git a/src/Tests/Aggregations/Metric/Stats/StatsAggregationUsageTests.cs b/src/Tests/Aggregations/Metric/Stats/StatsAggregationUsageTests.cs index bab79f5e989..dfd29d1694c 100644 --- a/src/Tests/Aggregations/Metric/Stats/StatsAggregationUsageTests.cs +++ b/src/Tests/Aggregations/Metric/Stats/StatsAggregationUsageTests.cs @@ -19,7 +19,7 @@ public StatsAggregationUsageTests(ReadOnlyCluster i, EndpointUsage usage) : base { stats = new { - field = Field(p => p.NumberOfCommits) + field = "numberOfCommits" } } } diff --git a/src/Tests/Aggregations/Metric/Sum/SumAggregationUsageTests.cs b/src/Tests/Aggregations/Metric/Sum/SumAggregationUsageTests.cs index 706d3a7e54e..de2bfd84d57 100644 --- a/src/Tests/Aggregations/Metric/Sum/SumAggregationUsageTests.cs +++ b/src/Tests/Aggregations/Metric/Sum/SumAggregationUsageTests.cs @@ -19,7 +19,7 @@ public SumAggregationUsageTests(ReadOnlyCluster i, EndpointUsage usage) : base(i { sum = new { - field = Field(p => p.NumberOfCommits) + field = "numberOfCommits" } } } diff --git a/src/Tests/Aggregations/Metric/TopHits/TopHitsAggregationUsageTests.cs b/src/Tests/Aggregations/Metric/TopHits/TopHitsAggregationUsageTests.cs index d6113ea5837..9c3c59a60cf 100644 --- a/src/Tests/Aggregations/Metric/TopHits/TopHitsAggregationUsageTests.cs +++ b/src/Tests/Aggregations/Metric/TopHits/TopHitsAggregationUsageTests.cs @@ -21,7 +21,7 @@ public TopHitsAggregationUsageTests(ReadOnlyCluster i, EndpointUsage usage) : ba { terms = new { - field = Field(p => p.State), + field = "state", }, aggs = new { @@ -41,12 +41,12 @@ public TopHitsAggregationUsageTests(ReadOnlyCluster i, EndpointUsage usage) : ba }, _source = new { - include = new string[] { "name", "startedOn" } + include = new [] { "name", "startedOn" } }, size = 1, version = true, explain = true, - fielddata_fields = new string[] { "state", "numberOfCommits" }, + fielddata_fields = new [] { "state", "numberOfCommits" }, highlight = new { fields = new diff --git a/src/Tests/Aggregations/Metric/ValueCount/ValueCountAggregationUsageTests.cs b/src/Tests/Aggregations/Metric/ValueCount/ValueCountAggregationUsageTests.cs index a646b488260..675b6a61b51 100644 --- a/src/Tests/Aggregations/Metric/ValueCount/ValueCountAggregationUsageTests.cs +++ b/src/Tests/Aggregations/Metric/ValueCount/ValueCountAggregationUsageTests.cs @@ -19,7 +19,7 @@ public ValueCountAggregationUsageTests(ReadOnlyCluster i, EndpointUsage usage) : { value_count = new { - field = Field(p => p.NumberOfCommits) + field = "numberOfCommits" } } } diff --git a/src/Tests/Aggregations/WritingAggregations.doc.cs b/src/Tests/Aggregations/WritingAggregations.doc.cs index 44b61ec73dc..4765ca181b9 100644 --- a/src/Tests/Aggregations/WritingAggregations.doc.cs +++ b/src/Tests/Aggregations/WritingAggregations.doc.cs @@ -5,121 +5,183 @@ using static Nest.Infer; using System.Collections.Generic; using System.Linq; +using Tests.Aggregations.Bucket.Children; +using Tests.Framework.Integration; +using FluentAssertions; namespace Tests.Aggregations { - public class WritingAggregations + /** + *== Writing Aggregations + * NEST allows you to write your aggregations using + * + * - a strict fluent DSL + * - a verbatim object initializer syntax that maps verbatim to the elasticsearch API + * - a more terse object initializer aggregation DSL + * + * Three different ways, yikes that's a lot to take in! Lets go over them one by one and explain when you might + * want to use each. + */ + public class Usage : UsageTestBase, SearchRequest> { /** - * Aggregations are arguably one of the most powerful features of Elasticsearch. - * NEST allows you to write your aggregations using a strict fluent dsl, a verbatim object initializer - * syntax that maps verbatim to the elasticsearch API & a more terse object initializer aggregation DSL. - * - * Three different ways, yikes thats a lot to take in! Lets go over them one by one and explain when you might - * want to use which one. - */ - public class Usage : UsageTestBase, SearchRequest> + * This is the json output for each example + **/ + protected override object ExpectJson => new { - protected override object ExpectJson => new + aggs = new { - aggs = new + name_of_child_agg = new { - name_of_child_agg = new + children = new { - children = new { type = "commits" }, - aggs = new { - average_per_child = new + type = "commits" + }, + aggs = new + { + average_per_child = new + { + avg = new { - avg = new { field = "confidenceFactor" } - }, - max_per_child = new + field = "confidenceFactor" + } + }, + max_per_child = new + { + max = new { - max = new { field = "confidenceFactor" } + field = "confidenceFactor" } } } } - }; - /** - * The fluent lambda syntax is the most terse way to write aggregations. - * It benefits from types that are carried over to sub aggregations - */ - protected override Func, ISearchRequest> Fluent => s => s - .Aggregations(aggs => aggs - .Children("name_of_child_agg", child => child - .Aggregations(childAggs => childAggs - .Average("average_per_child", avg => avg.Field(p => p.ConfidenceFactor)) - .Max("max_per_child", avg => avg.Field(p => p.ConfidenceFactor)) - ) + } + }; + + /** === Fluent DSL + * The fluent lambda syntax is the most terse way to write aggregations. + * It benefits from types that are carried over to sub aggregations + */ + protected override Func, ISearchRequest> Fluent => s => s + .Aggregations(aggs => aggs + .Children("name_of_child_agg", child => child + .Aggregations(childAggs => childAggs + .Average("average_per_child", avg => avg.Field(p => p.ConfidenceFactor)) + .Max("max_per_child", avg => avg.Field(p => p.ConfidenceFactor)) ) - ); + ) + ); - /** - * The object initializer syntax (OIS) is a one-to-one mapping with how aggregations - * have to be represented in the Elasticsearch API. While it has the benefit of being a one-to-one - * mapping, being dictionary based in C# means it can grow exponentially in complexity rather quickly. - */ - protected override SearchRequest Initializer => - new SearchRequest + /** === Object Initializer syntax + * The object initializer syntax (OIS) is a one-to-one mapping with how aggregations + * have to be represented in the Elasticsearch API. While it has the benefit of being a one-to-one + * mapping, being dictionary based in C# means it can grow exponentially in complexity rather quickly. + */ + protected override SearchRequest Initializer => + new SearchRequest + { + Aggregations = new ChildrenAggregation("name_of_child_agg", typeof(CommitActivity)) { - Aggregations = new ChildrenAggregation("name_of_child_agg", typeof(CommitActivity)) - { - Aggregations = - new AverageAggregation("average_per_child", "confidenceFactor") - && new MaxAggregation("max_per_child", "confidenceFactor") - } - }; - } + Aggregations = + new AverageAggregation("average_per_child", "confidenceFactor") + && new MaxAggregation("max_per_child", "confidenceFactor") + } + }; + } - public class AggregationDslUsage : Usage - { - /** - * For this reason the OIS syntax can be shortened dramatically by using `*Agg` related family, - * These allow you to forego introducing intermediary Dictionaries to represent the aggregation DSL. - * It also allows you to combine multiple aggregations using bitwise AND (`&&`) operator. - * - * Compare the following example with the previous vanilla OIS syntax - */ - protected override SearchRequest Initializer => - new SearchRequest + public class AggregationDslUsage : Usage + { + /** === Terse Object Initializer DSL + * For this reason the OIS syntax can be shortened dramatically by using `*Agg` related family, + * These allow you to forego introducing intermediary Dictionaries to represent the aggregation DSL. + * It also allows you to combine multiple aggregations using bitwise AND (`&&`) operator. + * + * Compare the following example with the previous vanilla OIS syntax + */ + protected override SearchRequest Initializer => + new SearchRequest + { + Aggregations = new ChildrenAggregation("name_of_child_agg", typeof(CommitActivity)) { - Aggregations = new ChildrenAggregation("name_of_child_agg", typeof(CommitActivity)) - { - Aggregations = - new AverageAggregation("average_per_child", Field(p => p.ConfidenceFactor)) - && new MaxAggregation("max_per_child", Field(p => p.ConfidenceFactor)) - } - }; - } + Aggregations = + new AverageAggregation("average_per_child", Field(p => p.ConfidenceFactor)) + && new MaxAggregation("max_per_child", Field(p => p.ConfidenceFactor)) + } + }; + } - public class AdvancedAggregationDslUsage : Usage - { - /** - * An advanced scenario may involve an existing collection of aggregation functions that should be set as aggregations - * on the request. Using LINQ's `.Aggregate()` method, each function can be applied to the aggregation descriptor - * (`childAggs` below) in turn, returning the descriptor after each function application. - * - */ - protected override Func, ISearchRequest> Fluent - { - get + public class AdvancedAggregationDslUsage : Usage + { + /** === Aggregating over a collection of aggregations + * An advanced scenario may involve an existing collection of aggregation functions that should be set as aggregations + * on the request. Using LINQ's `.Aggregate()` method, each function can be applied to the aggregation descriptor + * (`childAggs` below) in turn, returning the descriptor after each function application. + * + */ + protected override Func, ISearchRequest> Fluent + { + get + { + var aggregations = new List, IAggregationContainer>> //<1> a list of aggregation functions to apply { - var aggregations = new List, IAggregationContainer>> - { - a => a.Average("average_per_child", avg => avg.Field(p => p.ConfidenceFactor)), - a => a.Max("max_per_child", avg => avg.Field(p => p.ConfidenceFactor)) - }; + a => a.Average("average_per_child", avg => avg.Field(p => p.ConfidenceFactor)), + a => a.Max("max_per_child", avg => avg.Field(p => p.ConfidenceFactor)) + }; - return s => s - .Aggregations(aggs => aggs - .Children("name_of_child_agg", child => child - .Aggregations(childAggs => - aggregations.Aggregate(childAggs, (acc, agg) => { agg(acc); return acc; }) - ) + return s => s + .Aggregations(aggs => aggs + .Children("name_of_child_agg", child => child + .Aggregations(childAggs => + aggregations.Aggregate(childAggs, (acc, agg) => { agg(acc); return acc; }) // <2> Using LINQ's `Aggregate()` function to accumulate/apply all of the aggregation functions ) - ); - } + ) + ); } } } + + /**[[aggs-vs-aggregations]] + *=== Aggs vs. Aggregations + * + * The response exposesboth `.Aggregations` and `.Aggs` properties for handling aggregations. Why two properties you ask? + * Well, the former is a dictionary of aggregation names to `IAggregate` types, a common interface for + * aggregation responses (termed __Aggregates__ in NEST), and the latter, is a convenience helper to get the right type + * of aggregation response out of the dictionary based on a key name. + * + * This is better illustrated with an example + */ + public class AggsUsage : ChildrenAggregationUsageTests + { + public AggsUsage(ReadOnlyCluster i, EndpointUsage usage) : base(i, usage) { } + + /** Let's imagine we make the following request. */ + protected override Func, ISearchRequest> Fluent => s => s + .Aggregations(aggs => aggs + .Children("name_of_child_agg", child => child + .Aggregations(childAggs => childAggs + .Average("average_per_child", avg => avg.Field(p => p.ConfidenceFactor)) + .Max("max_per_child", avg => avg.Field(p => p.ConfidenceFactor)) + ) + ) + ); + + /**=== Aggs usage + * Now, using `.Aggs`, we can easily get the `Children` aggregation response out and from that, + * the `Average` and `Max` sub aggregations. + */ + protected override void ExpectResponse(ISearchResponse response) + { + response.IsValid.Should().BeTrue(); + + var childAggregation = response.Aggs.Children("name_of_child_agg"); + + var averagePerChild = childAggregation.Average("average_per_child"); + + averagePerChild.Should().NotBeNull(); //<1> Do something with the average per child. Here we just assert it's not null + + var maxPerChild = childAggregation.Max("max_per_child"); + + maxPerChild.Should().NotBeNull(); //<2> Do something with the max per child. Here we just assert it's not null + } + } } diff --git a/src/Tests/ClientConcepts/ConnectionPooling/BuildingBlocks/ConnectionPooling.Doc.cs b/src/Tests/ClientConcepts/ConnectionPooling/BuildingBlocks/ConnectionPooling.Doc.cs index 8b783966feb..476cacc948b 100644 --- a/src/Tests/ClientConcepts/ConnectionPooling/BuildingBlocks/ConnectionPooling.Doc.cs +++ b/src/Tests/ClientConcepts/ConnectionPooling/BuildingBlocks/ConnectionPooling.Doc.cs @@ -9,12 +9,18 @@ namespace Tests.ClientConcepts.ConnectionPooling.BuildingBlocks { public class ConnectionPooling { - /** = Connection Pooling + /**== Connection Pooling * Connection pooling is the internal mechanism that takes care of registering what nodes there are in the cluster and which - * we can use to issue client calls on. + * NEST can use to issue client calls on. There are 3 types of connection pool + * + * - <> + * - <> + * - <> */ - /** == SingleNodeConnectionPool + /** + * [[single-node-connection-pool]] + * === SingleNodeConnectionPool * The simplest of all connection pools, this takes a single `Uri` and uses that to connect to elasticsearch for all the calls * It doesn't opt in to sniffing and pinging behavior, and will never mark nodes dead or alive. The one `Uri` it holds is always * ready to go. @@ -32,8 +38,8 @@ [U] public void SingleNode() /** and pinging */ pool.SupportsPinging.Should().BeFalse(); - /** When you use the low ceremony ElasticClient constructor that takes a single Uri, - * We default to this SingleNodeConnectionPool */ + /** When you use the low ceremony `ElasticClient` constructor that takes a single `Uri`, + * We default to using `SingleNodeConnectionPool` */ var client = new ElasticClient(uri); client.ConnectionSettings.ConnectionPool.Should().BeOfType(); @@ -46,7 +52,8 @@ [U] public void SingleNode() client.ConnectionSettings.ConnectionPool.Should().BeOfType(); } - /** == StaticConnectionPool + /**[[static-connection-pool]] + * === StaticConnectionPool * The static connection pool is great if you have a known small sized cluster and do no want to enable * sniffing to find out the cluster topology. */ @@ -57,8 +64,8 @@ [U] public void Static() /** a connection pool can be seeded using an enumerable of `Uri`s */ var pool = new StaticConnectionPool(uris); - /** Or using an enumerable of `Node` */ - var nodes = uris.Select(u=>new Node(u)); + /** Or using an enumerable of `Node`s */ + var nodes = uris.Select(u => new Node(u)); pool = new StaticConnectionPool(nodes); /** This type of pool is hardwired to opt out of sniffing*/ @@ -67,15 +74,16 @@ [U] public void Static() /** but supports pinging when enabled */ pool.SupportsPinging.Should().BeTrue(); - /** To create a client using this static connection pool pass - * the connection pool to the connectionsettings you pass to ElasticClient + /** To create a client using this static connection pool, pass + * the connection pool to the `ConnectionSettings` you pass to `ElasticClient` */ var client = new ElasticClient(new ConnectionSettings(pool)); client.ConnectionSettings.ConnectionPool.Should().BeOfType(); } - /** == SniffingConnectionPool - * A subclass of StaticConnectionPool that allows itself to be reseeded at run time. + /**[[sniffing-connection-pool]] + * === SniffingConnectionPool + * A subclass of `StaticConnectionPool` that allows itself to be reseeded at run time. * It comes with a very minor overhead of a `ReaderWriterLockSlim` to ensure thread safety. */ [U] public void Sniffing() @@ -85,8 +93,8 @@ [U] public void Sniffing() /** a connection pool can be seeded using an enumerable of `Uri` */ var pool = new SniffingConnectionPool(uris); - /** Or using an enumerable of `Node` - * A major benefit here is you can include known node roles when seeding + /** Or using an enumerable of `Node`s. + * A major benefit here is you can include known node roles when seeding and * NEST can use this information to favour sniffing on master eligible nodes first * and take master only nodes out of rotation for issuing client calls on. */ @@ -100,7 +108,7 @@ [U] public void Sniffing() pool.SupportsPinging.Should().BeTrue(); /** To create a client using the sniffing connection pool pass - * the connection pool to the connectionsettings you pass to ElasticClient + * the connection pool to the `ConnectionSettings` you pass to `ElasticClient` */ var client = new ElasticClient(new ConnectionSettings(pool)); client.ConnectionSettings.ConnectionPool.Should().BeOfType(); diff --git a/src/Tests/ClientConcepts/ConnectionPooling/BuildingBlocks/DateTimeProviders.Doc.cs b/src/Tests/ClientConcepts/ConnectionPooling/BuildingBlocks/DateTimeProviders.Doc.cs index 472e0af5659..0871b5980ec 100644 --- a/src/Tests/ClientConcepts/ConnectionPooling/BuildingBlocks/DateTimeProviders.Doc.cs +++ b/src/Tests/ClientConcepts/ConnectionPooling/BuildingBlocks/DateTimeProviders.Doc.cs @@ -8,12 +8,12 @@ namespace Tests.ClientConcepts.ConnectionPooling.BuildingBlocks { public class DateTimeProviders { - - /** = Date time providers + /** :section-number: 4.5 + * == Date time providers * * Not typically something you'll have to pass to the client but all calls to `System.DateTime.UtcNow` - * in the client have been abstracted by `IDateTimeProvider`. This allows us to unit test timeouts and clusterfailover - * in run time not being bound to wall clock time. + * in the client have been abstracted by `IDateTimeProvider`. This allows us to unit test timeouts and cluster failover + * without being bound to wall clock time as calculated by using `System.DateTime.UtcNow` directly. */ [U] public void DefaultNowBehaviour() { @@ -31,8 +31,8 @@ [U] public void DeadTimeoutCalculation() { var dateTimeProvider = DateTimeProvider.Default; /** - * The default timeout calculation is: `min(timeout * 2 ^ (attempts * 0.5 -1), maxTimeout)` - * The default values for `timeout` and `maxTimeout` are + * The default timeout calculation is: `min(timeout * 2 ^ (attempts * 0.5 -1), maxTimeout)`, where the + * default values for `timeout` and `maxTimeout` are */ var timeout = TimeSpan.FromMinutes(1); var maxTimeout = TimeSpan.FromMinutes(30); diff --git a/src/Tests/ClientConcepts/ConnectionPooling/BuildingBlocks/KeepingTrackOfNodes.Doc.cs b/src/Tests/ClientConcepts/ConnectionPooling/BuildingBlocks/KeepingTrackOfNodes.Doc.cs index 31419eb4e5a..2bc8899333f 100644 --- a/src/Tests/ClientConcepts/ConnectionPooling/BuildingBlocks/KeepingTrackOfNodes.Doc.cs +++ b/src/Tests/ClientConcepts/ConnectionPooling/BuildingBlocks/KeepingTrackOfNodes.Doc.cs @@ -8,12 +8,16 @@ namespace Tests.ClientConcepts.ConnectionPooling.BuildingBlocks public class KeepingTrackOfNodes { - /** = Keeping track of nodes + /** :section-number: 4.4 + * == Keeping track of nodes * */ [U] public void Creating() { + /** === Creating a Node + * A `Node` can be instantiated by passing it a `Uri` + */ var node = new Node(new Uri("http://localhost:9200")); node.Uri.Should().NotBeNull(); node.Uri.Port.Should().Be(9200); @@ -21,18 +25,25 @@ [U] public void Creating() /** By default master eligible and holds data is presumed to be true **/ node.MasterEligible.Should().BeTrue(); node.HoldsData.Should().BeTrue(); + /** Is resurrected is true on first usage, hints to the transport that a ping might be useful */ node.IsResurrected.Should().BeTrue(); - /** When instantiating your connection pool you could switch these to false to initialize the client to + /** + * When instantiating your connection pool you could switch these to false to initialize the client to * a known cluster topology. */ } [U] public void BuildingPaths() { - /** passing a node with a path should be preserved. Sometimes an elasticsearch node lives behind a proxy */ + /** === Building a Node path + * passing a node with a path should be preserved. + * Sometimes an elasticsearch node lives behind a proxy + */ var node = new Node(new Uri("http://test.example/elasticsearch")); + node.Uri.Port.Should().Be(80); node.Uri.AbsolutePath.Should().Be("/elasticsearch/"); + /** We force paths to end with a forward slash so that they can later be safely combined */ var combinedPath = new Uri(node.Uri, "index/type/_search"); combinedPath.AbsolutePath.Should().Be("/elasticsearch/index/type/_search"); @@ -42,6 +53,7 @@ [U] public void BuildingPaths() combinedPath.AbsolutePath.Should().Be("/elasticsearch/index/type/_search"); } + /** === Marking Nodes */ [U] public void MarkNodes() { var node = new Node(new Uri("http://localhost:9200")); @@ -59,7 +71,7 @@ [U] public void MarkNodes() node.IsAlive.Should().BeFalse(); node.DeadUntil.Should().Be(deadUntil); } - /** however when marking a node alive deaduntil should be reset and attempts reset to 0*/ + /** however when marking a node alive, the `DeadUntil` property should be reset and `FailedAttempts` reset to 0*/ node.MarkAlive(); node.FailedAttempts.Should().Be(0); node.DeadUntil.Should().Be(default(DateTime)); @@ -68,15 +80,20 @@ [U] public void MarkNodes() [U] public void Equality() { - /** Nodes are considered equal if they have the same endpoint no matter what other metadata is associated */ + /** === Node Equality + * Nodes are considered equal if they have the same endpoint, no matter what other metadata is associated */ var node = new Node(new Uri("http://localhost:9200")) { MasterEligible = false }; var nodeAsMaster = new Node(new Uri("http://localhost:9200")) { MasterEligible = true }; + (node == nodeAsMaster).Should().BeTrue(); (node != nodeAsMaster).Should().BeFalse(); + var uri = new Uri("http://localhost:9200"); (node == uri).Should().BeTrue(); + var differentUri = new Uri("http://localhost:9201"); (node != differentUri).Should().BeTrue(); + node.Should().Be(nodeAsMaster); } } diff --git a/src/Tests/ClientConcepts/ConnectionPooling/BuildingBlocks/RequestPipelines.doc.cs b/src/Tests/ClientConcepts/ConnectionPooling/BuildingBlocks/RequestPipelines.doc.cs index 5ce62fa6731..55e0fcf2a2e 100644 --- a/src/Tests/ClientConcepts/ConnectionPooling/BuildingBlocks/RequestPipelines.doc.cs +++ b/src/Tests/ClientConcepts/ConnectionPooling/BuildingBlocks/RequestPipelines.doc.cs @@ -7,33 +7,40 @@ using Tests.Framework; namespace Tests.ClientConcepts.ConnectionPooling.BuildingBlocks -{ - public class RequestPipelines - { - /** = Request pipeline - * Every request is executed in the context of `RequestPipeline` when using the default `ITransport` implementation. - * - */ - +{ + /** :section-number: 4.2 + * == Request Pipeline + * Every request is executed in the context of a `RequestPipeline` when using the + * default `ITransport` implementation. + */ + public class RequestPipelines + { [U] public void RequestPipeline() { var settings = TestClient.CreateSettings(); - /** When calling Request(Async) on Transport the whole coordination of the request is deferred to a new instance in a `using` block. */ + /** When calling Request/RequestAsync on Transport the whole coordination of the request is deferred to a new instance in a `using` block. */ var pipeline = new RequestPipeline(settings, DateTimeProvider.Default, new MemoryStreamFactory(), new SearchRequestParameters()); pipeline.GetType().Should().Implement(); - /** However the transport does not instantiate RequestPipeline directly, it uses a pluggable `IRequestPipelineFactory`*/ + /** However the transport does not instantiate `RequestPipeline` directly; it uses a pluggable `IRequestPipelineFactory` + * to create it + */ var requestPipelineFactory = new RequestPipelineFactory(); - var requestPipeline = requestPipelineFactory.Create(settings, DateTimeProvider.Default, new MemoryStreamFactory(), new SearchRequestParameters()); + var requestPipeline = requestPipelineFactory.Create( + settings, + DateTimeProvider.Default, //<1> An <> + new MemoryStreamFactory(), + new SearchRequestParameters()); + requestPipeline.Should().BeOfType(); requestPipeline.GetType().Should().Implement(); - /** which can be passed to the transport when instantiating a client */ + /** you can pass your own `IRequestPipeline` implementation to the transport when instantiating a client + * allowing you to have requests executed on your own custom request pipeline + */ var transport = new Transport(settings, requestPipelineFactory, DateTimeProvider.Default, new MemoryStreamFactory()); - - /** this allows you to have requests executed on your own custom request pipeline */ } private IRequestPipeline CreatePipeline( @@ -110,8 +117,8 @@ public void SniffsOnStaleCluster() } - /** A request pipeline also checks whether the overall time across multiple retries exceeds the request timeout - * See the maxretry documentation for more details, here we assert that our request pipeline exposes this propertly + /** A request pipeline also checks whether the overall time across multiple retries exceeds the request timeout. + * See the <> for more details, here we assert that our request pipeline exposes this propertly */ [U] public void IsTakingTooLong() diff --git a/src/Tests/ClientConcepts/ConnectionPooling/BuildingBlocks/Transports.Doc.cs b/src/Tests/ClientConcepts/ConnectionPooling/BuildingBlocks/Transports.Doc.cs index a399c373213..359df954292 100644 --- a/src/Tests/ClientConcepts/ConnectionPooling/BuildingBlocks/Transports.Doc.cs +++ b/src/Tests/ClientConcepts/ConnectionPooling/BuildingBlocks/Transports.Doc.cs @@ -8,24 +8,26 @@ namespace Tests.ClientConcepts.ConnectionPooling.BuildingBlocks { public class Transports { - /** = Transports + /** :section-number: 4.3 + * == Transports * - * The `ITransport` interface can be seen as the motor block of the client. It's interface is deceitfully simple. - * It's ultimately responsible from translating a client call to a response. If for some reason you do not agree with the way we wrote - * the internals of the client, by implementing a custom `ITransport`, you can circumvent all of it and introduce your own. + * The `ITransport` interface can be seen as the motor block of the client. It's interface is deceitfully simple and + * it's ultimately responsible from translating a client call to a response. + * + * If for some reason you do not agree with the way we wrote the internals of the client, + * by implementing a custom `ITransport`, you can circumvent all of it and introduce your own. */ - public async Task InterfaceExplained() { /** - * Transport is generically typed to a type that implements IConnectionConfigurationValues - * This is the minimum ITransport needs to report back for the client to function. + * Transport is generically typed to a type that implements `IConnectionConfigurationValues` + * This is the minimum `ITransport` needs to report back for the client to function. * - * e.g in the low level client, transport is instantiated like this: + * e.g in the low level client, Elasticsearch.Net, transport is instantiated like this: */ var lowLevelTransport = new Transport(new ConnectionConfiguration()); - /** In the high level client like this: */ + /** and in the high level client, NEST, like this: */ var highlevelTransport = new Transport(new ConnectionSettings()); var connectionPool = new SingleNodeConnectionPool(new Uri("http://localhost:9200")); @@ -34,10 +36,13 @@ public async Task InterfaceExplained() /** * The only two methods on `ITransport` are `Request()` and `RequestAsync()`, the default `ITransport` implementation is responsible for introducing * many of the building blocks in the client, if these do not work for you can swap them out for your own custom `ITransport` implementation. - * If you feel this need, please let us know as we'd love to learn why you've go down this route! + * If you feel this need, {github}/issues[please let us know] as we'd love to learn why you've go down this route! */ var response = inMemoryTransport.Request>(HttpMethod.GET, "/_search", new { query = new { match_all = new { } } }); - response = await inMemoryTransport.RequestAsync>(HttpMethod.GET, "/_search", new { query = new { match_all = new { } } }); + response = await inMemoryTransport.RequestAsync>( + HttpMethod.GET, + "/_search", + new { query = new { match_all = new { } } }); } } } diff --git a/src/Tests/ClientConcepts/ConnectionPooling/Exceptions/UnrecoverableExceptions.doc.cs b/src/Tests/ClientConcepts/ConnectionPooling/Exceptions/UnrecoverableExceptions.doc.cs index 4cb492e30aa..fcb06d7eb2a 100644 --- a/src/Tests/ClientConcepts/ConnectionPooling/Exceptions/UnrecoverableExceptions.doc.cs +++ b/src/Tests/ClientConcepts/ConnectionPooling/Exceptions/UnrecoverableExceptions.doc.cs @@ -13,22 +13,26 @@ namespace Tests.ClientConcepts.ConnectionPooling.Exceptions { public class UnrecoverableExceptions { - /** == Unrecoverable exceptions - * Unrecoverable exceptions are excepted exceptions that are grounds to exit the client pipeline immediately. - * By default the client won't throw on any ElasticsearchClientException but return an invalid response. - * You can configure the client to throw using ThrowExceptions() on ConnectionSettings. The following test - * both a client that throws and one that returns an invalid response with an `.OriginalException` exposed + /** == Unrecoverable exceptions + * Unrecoverable exceptions are _excepted_ exceptions that are grounds to exit the client pipeline immediately. + * By default, the client won't throw on any `ElasticsearchClientException` but instead return an invalid response which + * can be detected by checking `.IsValid` on the response + * You can configure the client to throw using `ThrowExceptions()` on `ConnectionSettings`. The following test + * both a client that throws and one that returns an invalid response with an `.OriginalException` exposed */ [U] public void SomePipelineFailuresAreRecoverable() { + /** The following are recoverable exceptions */ var recoverablExceptions = new[] { new PipelineException(PipelineFailure.BadResponse), new PipelineException(PipelineFailure.PingFailure), }; + recoverablExceptions.Should().OnlyContain(e => e.Recoverable); + /** and the unrecoverable exceptions */ var unrecoverableExceptions = new[] { new PipelineException(PipelineFailure.CouldNotStartSniffOnStartup), @@ -38,9 +42,13 @@ [U] public void SomePipelineFailuresAreRecoverable() new PipelineException(PipelineFailure.MaxRetriesReached), new PipelineException(PipelineFailure.MaxTimeoutReached) }; + unrecoverableExceptions.Should().OnlyContain(e => !e.Recoverable); } + /** As an example, let's set up a 10 node cluster that will always succeed when pinged but + will fail with a 401 response when making client calls + */ [U] public async Task BadAuthenticationIsUnrecoverable() { var audit = new Auditor(() => Framework.Cluster @@ -51,6 +59,9 @@ [U] public async Task BadAuthenticationIsUnrecoverable() .AllDefaults() ); + /** Here we make a client call and determine that the first audit event was a successful ping, + * followed by a bad response as a result of a bad authentication response + */ audit = await audit.TraceElasticsearchException( new ClientCall { { AuditEvent.PingSuccess, 9200 }, diff --git a/src/Tests/ClientConcepts/ConnectionPooling/Failover/FallingOver.doc.cs b/src/Tests/ClientConcepts/ConnectionPooling/Failover/FallingOver.doc.cs index a7d23a0873b..effd5401fba 100644 --- a/src/Tests/ClientConcepts/ConnectionPooling/Failover/FallingOver.doc.cs +++ b/src/Tests/ClientConcepts/ConnectionPooling/Failover/FallingOver.doc.cs @@ -9,8 +9,9 @@ namespace Tests.ClientConcepts.ConnectionPooling.FailOver { public class FallingOver { - /** == Fail over - * When using connection pooling and the pool has sufficient nodes a request will be retried if + /**[[falling-over]] + * == Fail over + * When using connection pooling and the pool has sufficient nodes a request will be retried if * the call to a node throws an exception or returns a 502 or 503 */ @@ -33,8 +34,9 @@ public async Task ExceptionFallsOverToNextNode() ); } - /** 502 Bad Gateway - * Will be treated as an error that requires retrying + /** === 502 Bad Gateway + * + * Will be treated as an error that requires retrying */ [U] public async Task Http502FallsOver() @@ -55,8 +57,9 @@ public async Task Http502FallsOver() ); } - /** 503 Service Unavailable - * Will be treated as an error that requires retrying + /** === 503 Service Unavailable + * + * Will be treated as an error that requires retrying */ [U] public async Task Http503FallsOver() @@ -78,7 +81,7 @@ public async Task Http503FallsOver() } /** - * If a call returns a valid http status code other then 502/503 the request won't be retried. + * If a call returns a valid (_for the request*_) http status code other then 502/503. the request won't be retried. */ [U] public async Task HttpTeapotDoesNotFallOver() diff --git a/src/Tests/ClientConcepts/ConnectionPooling/MaxRetries/RespectsMaxRetry.doc.cs b/src/Tests/ClientConcepts/ConnectionPooling/MaxRetries/RespectsMaxRetry.doc.cs index 27b11b32ebe..5e7e2a1c380 100644 --- a/src/Tests/ClientConcepts/ConnectionPooling/MaxRetries/RespectsMaxRetry.doc.cs +++ b/src/Tests/ClientConcepts/ConnectionPooling/MaxRetries/RespectsMaxRetry.doc.cs @@ -8,10 +8,12 @@ namespace Tests.ClientConcepts.ConnectionPooling.MaxRetries { public class RespectsMaxRetry { - /** == MaxRetries - * By default retry as many times as we have nodes. However retries still respect the request timeout. - * Meaning if you have a 100 node cluster and a request timeout of 20 seconds we will retry as many times as we can - * but give up after 20 seconds + /**[[max-retries]] + *== Max Retries + * By default, NEST will retry as many times as there are nodes in the cluster that the client knows about. + * Retries still respects the request timeout however, + * meaning if you have a 100 node cluster and a request timeout of 20 seconds, + * the client will retry as many times as it before giving up at the request timeout of 20 seconds. */ [U] @@ -42,8 +44,9 @@ public async Task DefaultMaxIsNumberOfNodes() } /** - * When you have a 100 node cluster you might want to ensure a fixed number of retries. - * Remember that the actual number of requests is initial attempt + set number of retries + * When you have a 100 node cluster, you might want to ensure a fixed number of retries. + * + * IMPORTANT: the actual number of requests is **initial attempt + set number of retries** */ [U] @@ -68,9 +71,10 @@ public async Task FixedMaximumNumberOfRetries() ); } /** - * In our previous test we simulated very fast failures, in the real world a call might take upwards of a second - * Here we simulate a particular heavy search that takes 10 seconds to fail, our Request timeout is set to 20 seconds. - * In this case it does not make sense to retry our 10 second query on 10 nodes. We should try it twice and give up before a third call is attempted + * In our previous test we simulated very fast failures, but in the real world a call might take upwards of a second. + * In this next example, we simulate a particular heavy search that takes 10 seconds to fail, and set a request timeout of 20 seconds. + * We see that the request is tried twice and gives up before a third call is attempted, since the call takes 10 seconds and thus can be + * tried twice (initial call and one retry) before the request timeout. */ [U] public async Task RespectsOveralRequestTimeout() @@ -93,10 +97,11 @@ public async Task RespectsOveralRequestTimeout() } /** - * If you set smaller request time outs you might not want it to also affect the retry timeout, therefor you can configure these separately too. - * Here we simulate calls taking 3 seconds, a request time out of 2 and an overall retry timeout of 10 seconds. - * We should see 5 attempts to perform this query, testing that our request timeout cuts the query off short and that our max retry timeout of 10 - * wins over the configured request timeout + * If you set a smaller request timeout you might not want it to also affect the retry timeout. + * In cases like this, you can configure the `MaxRetryTimeout` separately. + * Here we simulate calls taking 3 seconds, a request timeout of 2 seconds and a max retry timeout of 10 seconds. + * We should see 5 attempts to perform this query, testing that our request timeout cuts the query off short and that + * our max retry timeout of 10 seconds wins over the configured request timeout */ [U] public async Task RespectsMaxRetryTimeoutOverRequestTimeout() @@ -122,7 +127,7 @@ public async Task RespectsMaxRetryTimeoutOverRequestTimeout() } /** - * If your retry policy expands beyond available nodes we won't retry the same node twice + * If your retry policy expands beyond the number of available nodes, the client **won't** retry the same node twice */ [U] public async Task RetriesAreLimitedByNodesInPool() @@ -145,9 +150,9 @@ public async Task RetriesAreLimitedByNodesInPool() } /** - * This makes setting any retry setting on a single node connection pool a NOOP, this is by design! - * Connection pooling and connection failover is about trying to fail sanely whilst still utilizing available resources and - * not giving up on the fail fast principle. It's *NOT* a mechanism for forcing requests to succeed. + * This makes setting any retry setting on a single node connection pool a no-op by design! + * Connection pooling and failover is all about trying to fail sanely whilst still utilizing the available resources and + * not giving up on the fail fast principle; **It is NOT a mechanism for forcing requests to succeed.** */ [U] public async Task DoesNotRetryOnSingleNodeConnectionPool() @@ -165,7 +170,6 @@ public async Task DoesNotRetryOnSingleNodeConnectionPool() { BadResponse, 9200 } } ); - } } } diff --git a/src/Tests/ClientConcepts/ConnectionPooling/Pinging/FirstUsage.doc.cs b/src/Tests/ClientConcepts/ConnectionPooling/Pinging/FirstUsage.doc.cs index 7339fd73400..c07486835fd 100644 --- a/src/Tests/ClientConcepts/ConnectionPooling/Pinging/FirstUsage.doc.cs +++ b/src/Tests/ClientConcepts/ConnectionPooling/Pinging/FirstUsage.doc.cs @@ -11,17 +11,18 @@ namespace Tests.ClientConcepts.ConnectionPooling.Pinging { public class FirstUsage { - /** == Pinging - * - * Pinging is enabled by default for the Static & Sniffing connection pool. - * This means that the first time a node is used or resurrected we issue a ping with a smaller (configurable) timeout. - * This allows us to fail and fallover to a healthy node faster + /** :section-number: 5.1 + * == Pinging - First Usage + * + * Pinging is enabled by default for the <> and <> connection pools. + * This means that the first time a node is used or resurrected, a ping is issued a with a small (configurable) timeout, + * allowing the client to fail and fallover to a healthy node much faster than attempting a request that may be heavier than a ping. */ [U, SuppressMessage("AsyncUsage", "AsyncFixer001:Unnecessary async/await usage", Justification = "Its a test")] public async Task PingFailsFallsOverToHealthyNodeWithoutPing() { - /** A cluster with 2 nodes where the second node fails on ping */ + /** Here's an example with a cluster with 2 nodes where the second node fails on ping */ var audit = new Auditor(() => Framework.Cluster .Nodes(2) .Ping(p => p.Succeeds(Always)) @@ -30,9 +31,15 @@ public async Task PingFailsFallsOverToHealthyNodeWithoutPing() .AllDefaults() ); + /** When making the calls, the first call goes to 9200 which succeeds, + * and the 2nd call does a ping on 9201 because it's used for the first time. + * The ping fails so we wrap over to node 9200 which we've already pinged. + * + * Finally we assert that the connectionpool has one node that is marked as dead + */ await audit.TraceCalls( - /** The first call goes to 9200 which succeeds */ - new ClientCall { + + new ClientCall { { PingSuccess, 9200}, { HealthyResponse, 9200}, { pool => @@ -40,12 +47,9 @@ await audit.TraceCalls( pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(0); } } }, - /** The 2nd call does a ping on 9201 because its used for the first time. - * It fails so we wrap over to node 9200 which we've already pinged */ - new ClientCall { + new ClientCall { { PingFailure, 9201}, { HealthyResponse, 9200}, - /** Finally we assert that the connectionpool has one node that is marked as dead */ { pool => pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(1) } } ); @@ -65,7 +69,7 @@ public async Task PingFailsFallsOverMultipleTimesToHealthyNode() await audit.TraceCalls( /** The first call goes to 9200 which succeeds */ - new ClientCall { + new ClientCall { { PingSuccess, 9200}, { HealthyResponse, 9200}, { pool => @@ -73,10 +77,10 @@ await audit.TraceCalls( pool.Nodes.Where(n=>!n.IsAlive).Should().HaveCount(0); } } }, - /** The 2nd call does a ping on 9201 because its used for the first time. - * It fails and so we ping 9202 which also fails. We then ping 9203 becuase + /** The 2nd call does a ping on 9201 because its used for the first time. + * It fails and so we ping 9202 which also fails. We then ping 9203 becuase * we haven't used it before and it succeeds */ - new ClientCall { + new ClientCall { { PingFailure, 9201}, { PingFailure, 9202}, { PingSuccess, 9203}, @@ -89,7 +93,7 @@ await audit.TraceCalls( [U, SuppressMessage("AsyncUsage", "AsyncFixer001:Unnecessary async/await usage", Justification = "Its a test")] public async Task AllNodesArePingedOnlyOnFirstUseProvidedTheyAreHealthy() { - /** A healthy cluster of 4 (min master nodes of 3 of course!) */ + /**A healthy cluster of 4 (min master nodes of 3 of course!) */ var audit = new Auditor(() => Framework.Cluster .Nodes(4) .Ping(p => p.SucceedAlways()) diff --git a/src/Tests/ClientConcepts/ConnectionPooling/Pinging/Revival.doc.cs b/src/Tests/ClientConcepts/ConnectionPooling/Pinging/Revival.doc.cs index 3a2fce7d519..da080504367 100644 --- a/src/Tests/ClientConcepts/ConnectionPooling/Pinging/Revival.doc.cs +++ b/src/Tests/ClientConcepts/ConnectionPooling/Pinging/Revival.doc.cs @@ -11,7 +11,8 @@ namespace Tests.ClientConcepts.ConnectionPooling.Pinging { public class Revival { - /** == Pinging + /** :section-number: 5.2 + * == Pinging - Revival * * When a node is marked dead it will only be put in the dog house for a certain amount of time. Once it comes out of the dog house, or revived, we schedule a ping * before the actual call to make sure its up and running. If its still down we put it back in the dog house a little longer. For an explanation on these timeouts see: TODO LINK diff --git a/src/Tests/ClientConcepts/ConnectionPooling/RequestOverrides/DisableSniffPingPerRequest.cs b/src/Tests/ClientConcepts/ConnectionPooling/RequestOverrides/DisableSniffPingPerRequest.doc.cs similarity index 59% rename from src/Tests/ClientConcepts/ConnectionPooling/RequestOverrides/DisableSniffPingPerRequest.cs rename to src/Tests/ClientConcepts/ConnectionPooling/RequestOverrides/DisableSniffPingPerRequest.doc.cs index 427ff1156b3..41178df046e 100644 --- a/src/Tests/ClientConcepts/ConnectionPooling/RequestOverrides/DisableSniffPingPerRequest.cs +++ b/src/Tests/ClientConcepts/ConnectionPooling/RequestOverrides/DisableSniffPingPerRequest.doc.cs @@ -1,92 +1,96 @@ -using System; -using System.Threading.Tasks; -using Elasticsearch.Net; -using Tests.Framework; -using static Elasticsearch.Net.AuditEvent; - -namespace Tests.ClientConcepts.ConnectionPooling.RequestOverrides -{ - public class DisableSniffPingPerRequest - { - /** == Disabling sniffing and pinging on a request basis - * Even if you are using a sniffing connection pool thats set up to sniff on start/failure - * and pinging enabled, you can opt out of this behaviour on a per request basis - * - * In our first test we set up a cluster that pings and sniffs on startup - * but we disable the sniffing on our first request so we only see the ping and the response - */ - - [U] public async Task DisableSniff() - { - var audit = new Auditor(() => Framework.Cluster - .Nodes(10) - .ClientCalls(r => r.SucceedAlways()) - .SniffingConnectionPool() - .Settings(s => s.SniffOnStartup()) - ); - - audit = await audit.TraceCalls( - /** - * We disable sniffing so eventhoug its our first call we do not want to sniff on startup - */ - new ClientCall(r=>r.DisableSniffing()) { - { PingSuccess, 9200 }, - { HealthyResponse, 9200 } - }, - /** - * Instead the sniff on startup is deffered to the second call into the cluster that - * does not disable sniffing on a per request basis - */ - new ClientCall() - { - { SniffOnStartup }, - { SniffSuccess, 9200 }, - { PingSuccess, 9200 }, - { HealthyResponse, 9200 } - }, - /** - * And after that no sniff on startup will happen again - */ - new ClientCall() - { - { PingSuccess, 9201 }, - { HealthyResponse, 9201 } - } - ); - } - - [U] public async Task DisablePing() - { - var audit = new Auditor(() => Framework.Cluster - .Nodes(10) - .ClientCalls(r => r.SucceedAlways()) - .SniffingConnectionPool() - .Settings(s => s.SniffOnStartup()) - ); - - audit = await audit.TraceCall( - new ClientCall(r=>r.DisablePing()) { - { SniffOnStartup }, - { SniffSuccess, 9200 }, - { HealthyResponse, 9200 } - } - ); - } - - [U] public async Task DisableSniffAndPing() - { - var audit = new Auditor(() => Framework.Cluster - .Nodes(10) - .ClientCalls(r => r.SucceedAlways()) - .SniffingConnectionPool() - .Settings(s => s.SniffOnStartup()) - ); - - audit = await audit.TraceCall( - new ClientCall(r=>r.DisableSniffing().DisablePing()) { - { HealthyResponse, 9200 } - } - ); - } - } -} +using System; +using System.Threading.Tasks; +using Elasticsearch.Net; +using Tests.Framework; +using static Elasticsearch.Net.AuditEvent; + +namespace Tests.ClientConcepts.ConnectionPooling.RequestOverrides +{ + public class DisableSniffPingPerRequest + { + /** :section-number: 6.1 + * == Disabling sniffing and pinging on a request basis + * + * Even if you are using a sniffing connection pool thats set up to sniff on start/failure + * and pinging enabled, you can opt out of this behaviour on a _per request_ basis. + * + * In our first test we set up a cluster that pings and sniffs on startup + * but we disable the sniffing on our first request so we only see the ping and the response + */ + + [U] public async Task DisableSniff() + { + /** Let's set up the cluster and configure clients to **always** sniff on startup */ + var audit = new Auditor(() => Framework.Cluster + .Nodes(10) + .ClientCalls(r => r.SucceedAlways()) + .SniffingConnectionPool() + .Settings(s => s.SniffOnStartup()) // <1> sniff on startup + ); + + + audit = await audit.TraceCalls( + /** Now We disable sniffing on the request so even though it's our first call, we do not want to sniff on startup */ + new ClientCall(r => r.DisableSniffing()) // <1> disable sniffing + { + { PingSuccess, 9200 }, // <2> first call is a successful ping + { HealthyResponse, 9200 } + }, + /** Instead, the sniff on startup is deferred to the second call into the cluster that + * does not disable sniffing on a per request basis + */ + new ClientCall() + { + { SniffOnStartup }, // <3> sniff on startup call happens here, on the second call + { SniffSuccess, 9200 }, + { PingSuccess, 9200 }, + { HealthyResponse, 9200 } + }, + /** And after that no sniff on startup will happen again */ + new ClientCall() + { + { PingSuccess, 9201 }, + { HealthyResponse, 9201 } + } + ); + } + + /** Now, let's disable pinging on the request */ + [U] public async Task DisablePing() + { + var audit = new Auditor(() => Framework.Cluster + .Nodes(10) + .ClientCalls(r => r.SucceedAlways()) + .SniffingConnectionPool() + .Settings(s => s.SniffOnStartup()) + ); + + audit = await audit.TraceCall( + new ClientCall(r => r.DisablePing()) // <1> disable ping + { + { SniffOnStartup }, + { SniffSuccess, 9200 }, // <2> No ping after sniffing + { HealthyResponse, 9200 } + } + ); + } + + /** Finally, let's demonstrate disabling both sniff and ping on the request */ + [U] public async Task DisableSniffAndPing() + { + var audit = new Auditor(() => Framework.Cluster + .Nodes(10) + .ClientCalls(r => r.SucceedAlways()) + .SniffingConnectionPool() + .Settings(s => s.SniffOnStartup()) + ); + + audit = await audit.TraceCall( + new ClientCall(r=>r.DisableSniffing().DisablePing()) // <1> diable ping and sniff + { + { HealthyResponse, 9200 } // <2> no ping or sniff before the call + } + ); + } + } +} diff --git a/src/Tests/ClientConcepts/ConnectionPooling/RequestOverrides/RequestTimeoutsOverrides.cs b/src/Tests/ClientConcepts/ConnectionPooling/RequestOverrides/RequestTimeoutsOverrides.doc.cs similarity index 97% rename from src/Tests/ClientConcepts/ConnectionPooling/RequestOverrides/RequestTimeoutsOverrides.cs rename to src/Tests/ClientConcepts/ConnectionPooling/RequestOverrides/RequestTimeoutsOverrides.doc.cs index 703726dd4dd..cd1ca1fdec1 100644 --- a/src/Tests/ClientConcepts/ConnectionPooling/RequestOverrides/RequestTimeoutsOverrides.cs +++ b/src/Tests/ClientConcepts/ConnectionPooling/RequestOverrides/RequestTimeoutsOverrides.doc.cs @@ -6,19 +6,21 @@ namespace Tests.ClientConcepts.ConnectionPooling.RequestOverrides { public class RequestTimeoutsOverrides - { - /** == Request Timeouts + { + /** :section-number: 6.2 + * == Request Timeouts + * * While you can specify Request time out globally you can override this per request too - */ - - [U] + */ + + [U] public async Task RespectsRequestTimeoutOverride() - { - + { + /** we set up a 10 node cluster with a global time out of 20 seconds. * Each call on a node takes 10 seconds. So we can only try this call on 2 nodes * before the max request time out kills the client call. - */ + */ var audit = new Auditor(() => Framework.Cluster .Nodes(10) .ClientCalls(r => r.FailAlways().Takes(TimeSpan.FromSeconds(10))) @@ -30,70 +32,70 @@ public async Task RespectsRequestTimeoutOverride() audit = await audit.TraceCalls( new ClientCall { { BadResponse, 9200 }, - { BadResponse, 9201 }, + { BadResponse, 9201 }, { MaxTimeoutReached } - }, + }, /** * On the second request we specify a request timeout override to 60 seconds * We should now see more nodes being tried. - */ - new ClientCall(r => r.RequestTimeout(TimeSpan.FromSeconds(80))) - { - { BadResponse, 9203 }, - { BadResponse, 9204 }, - { BadResponse, 9205 }, - { BadResponse, 9206 }, - { BadResponse, 9207 }, - { BadResponse, 9208 }, - { HealthyResponse, 9209 }, - } + */ + new ClientCall(r => r.RequestTimeout(TimeSpan.FromSeconds(80))) + { + { BadResponse, 9203 }, + { BadResponse, 9204 }, + { BadResponse, 9205 }, + { BadResponse, 9206 }, + { BadResponse, 9207 }, + { BadResponse, 9208 }, + { HealthyResponse, 9209 }, + } + ); + + } + + /** == Connect Timeouts + * Connect timeouts can be overridden, webrequest/httpclient can not distinguish connect and retry timeouts however + * we use this separate configuration value for ping requests. + */ + [U] + public async Task RespectsConnectTimeoutOverride() + { + /** we set up a 10 node cluster with a global time out of 20 seconds. + * Each call on a node takes 10 seconds. So we can only try this call on 2 nodes + * before the max request time out kills the client call. + */ + var audit = new Auditor(() => Framework.Cluster + .Nodes(10) + .Ping(p => p.SucceedAlways().Takes(TimeSpan.FromSeconds(20))) + .ClientCalls(r => r.SucceedAlways()) + .StaticConnectionPool() + .Settings(s => s.RequestTimeout(TimeSpan.FromSeconds(10)).PingTimeout(TimeSpan.FromSeconds(10))) + ); + + audit = await audit.TraceCalls( + /** + * The first call uses the configured global settings, request times out after 10 seconds and ping + * calls always take 20, so we should see a single ping failure + */ + new ClientCall { + { PingFailure, 9200 }, + { MaxTimeoutReached } + }, + /** + * On the second request we set a request ping timeout override of 2seconds + * We should now see more nodes being tried before the request timeout is hit. + */ + new ClientCall(r => r.PingTimeout(TimeSpan.FromSeconds(2))) + { + { PingFailure, 9202 }, + { PingFailure, 9203 }, + { PingFailure, 9204 }, + { PingFailure, 9205 }, + { PingFailure, 9206 }, + { MaxTimeoutReached } + } ); - } - - /** == Connect Timeouts - * Connect timeouts can be overridden, webrequest/httpclient can not distinguish connect and retry timeouts however - * we use this separate configuration value for ping requests. - */ - [U] - public async Task RespectsConnectTimeoutOverride() - { - /** we set up a 10 node cluster with a global time out of 20 seconds. - * Each call on a node takes 10 seconds. So we can only try this call on 2 nodes - * before the max request time out kills the client call. - */ - var audit = new Auditor(() => Framework.Cluster - .Nodes(10) - .Ping(p => p.SucceedAlways().Takes(TimeSpan.FromSeconds(20))) - .ClientCalls(r => r.SucceedAlways()) - .StaticConnectionPool() - .Settings(s => s.RequestTimeout(TimeSpan.FromSeconds(10)).PingTimeout(TimeSpan.FromSeconds(10))) - ); - - audit = await audit.TraceCalls( - /** - * The first call uses the configured global settings, request times out after 10 seconds and ping - * calls always take 20, so we should see a single ping failure - */ - new ClientCall { - { PingFailure, 9200 }, - { MaxTimeoutReached } - }, - /** - * On the second request we set a request ping timeout override of 2seconds - * We should now see more nodes being tried before the request timeout is hit. - */ - new ClientCall(r => r.PingTimeout(TimeSpan.FromSeconds(2))) - { - { PingFailure, 9202 }, - { PingFailure, 9203 }, - { PingFailure, 9204 }, - { PingFailure, 9205 }, - { PingFailure, 9206 }, - { MaxTimeoutReached } - } - ); - } } } diff --git a/src/Tests/ClientConcepts/ConnectionPooling/RequestOverrides/RespectsAllowedStatusCode.cs b/src/Tests/ClientConcepts/ConnectionPooling/RequestOverrides/RespectsAllowedStatusCode.doc.cs similarity index 91% rename from src/Tests/ClientConcepts/ConnectionPooling/RequestOverrides/RespectsAllowedStatusCode.cs rename to src/Tests/ClientConcepts/ConnectionPooling/RequestOverrides/RespectsAllowedStatusCode.doc.cs index eca019f4184..d327c0f6bea 100644 --- a/src/Tests/ClientConcepts/ConnectionPooling/RequestOverrides/RespectsAllowedStatusCode.cs +++ b/src/Tests/ClientConcepts/ConnectionPooling/RequestOverrides/RespectsAllowedStatusCode.doc.cs @@ -1,34 +1,35 @@ -using System; -using System.Threading.Tasks; -using Elasticsearch.Net; -using Tests.Framework; -using static Elasticsearch.Net.AuditEvent; - -namespace Tests.ClientConcepts.ConnectionPooling.RequestOverrides -{ - public class RespectsAllowedStatusCode - { - /** == Allowed status codes - */ - - [U] - public async Task CanOverrideBadResponse() - { - var audit = new Auditor(() => Framework.Cluster - .Nodes(10) - .ClientCalls(r => r.FailAlways(400)) - .StaticConnectionPool() - .Settings(s => s.DisablePing().MaximumRetries(0)) - ); - - audit = await audit.TraceCalls( - new ClientCall() { - { BadResponse, 9200 } - }, - new ClientCall(r => r.AllowedStatusCodes(400)) { - { HealthyResponse, 9201 } - } - ); - } - } -} +using System; +using System.Threading.Tasks; +using Elasticsearch.Net; +using Tests.Framework; +using static Elasticsearch.Net.AuditEvent; + +namespace Tests.ClientConcepts.ConnectionPooling.RequestOverrides +{ + public class RespectsAllowedStatusCode + { + /** :section-number: 6.3 + * == Allowed status codes + */ + + [U] + public async Task CanOverrideBadResponse() + { + var audit = new Auditor(() => Framework.Cluster + .Nodes(10) + .ClientCalls(r => r.FailAlways(400)) + .StaticConnectionPool() + .Settings(s => s.DisablePing().MaximumRetries(0)) + ); + + audit = await audit.TraceCalls( + new ClientCall() { + { BadResponse, 9200 } + }, + new ClientCall(r => r.AllowedStatusCodes(400)) { + { HealthyResponse, 9201 } + } + ); + } + } +} diff --git a/src/Tests/ClientConcepts/ConnectionPooling/RequestOverrides/RespectsForceNode.cs b/src/Tests/ClientConcepts/ConnectionPooling/RequestOverrides/RespectsForceNode.doc.cs similarity index 93% rename from src/Tests/ClientConcepts/ConnectionPooling/RequestOverrides/RespectsForceNode.cs rename to src/Tests/ClientConcepts/ConnectionPooling/RequestOverrides/RespectsForceNode.doc.cs index 4f7d0bdd40e..ee59976b003 100644 --- a/src/Tests/ClientConcepts/ConnectionPooling/RequestOverrides/RespectsForceNode.cs +++ b/src/Tests/ClientConcepts/ConnectionPooling/RequestOverrides/RespectsForceNode.doc.cs @@ -1,34 +1,35 @@ -using System; -using System.Threading.Tasks; -using Elasticsearch.Net; -using Tests.Framework; -using static Elasticsearch.Net.AuditEvent; - -namespace Tests.ClientConcepts.ConnectionPooling.RequestOverrides -{ - public class RespectsForceNode - { - /** == Forcing nodes - * Sometimes you might want to fire a single request to a specific node. You can do so using the `ForceNode` - * request configuration. This will ignore the pool and not retry. - */ - - [U] - public async Task OnlyCallsForcedNode() - { - var audit = new Auditor(() => Framework.Cluster - .Nodes(10) - .ClientCalls(r => r.SucceedAlways()) - .ClientCalls(r => r.OnPort(9208).FailAlways()) - .StaticConnectionPool() - .Settings(s => s.DisablePing()) - ); - - audit = await audit.TraceCall( - new ClientCall(r => r.ForceNode(new Uri("http://localhost:9208"))) { - { BadResponse, 9208 } - } - ); - } - } -} +using System; +using System.Threading.Tasks; +using Elasticsearch.Net; +using Tests.Framework; +using static Elasticsearch.Net.AuditEvent; + +namespace Tests.ClientConcepts.ConnectionPooling.RequestOverrides +{ + public class RespectsForceNode + { + /** :section-number: 6.4 + * == Forcing nodes + * Sometimes you might want to fire a single request to a specific node. You can do so using the `ForceNode` + * request configuration. This will ignore the pool and not retry. + */ + + [U] + public async Task OnlyCallsForcedNode() + { + var audit = new Auditor(() => Framework.Cluster + .Nodes(10) + .ClientCalls(r => r.SucceedAlways()) + .ClientCalls(r => r.OnPort(9208).FailAlways()) + .StaticConnectionPool() + .Settings(s => s.DisablePing()) + ); + + audit = await audit.TraceCall( + new ClientCall(r => r.ForceNode(new Uri("http://localhost:9208"))) { + { BadResponse, 9208 } + } + ); + } + } +} diff --git a/src/Tests/ClientConcepts/ConnectionPooling/RequestOverrides/RespectsMaxRetryOverrides.doc.cs b/src/Tests/ClientConcepts/ConnectionPooling/RequestOverrides/RespectsMaxRetryOverrides.doc.cs index f419cfacc34..82a58527b94 100644 --- a/src/Tests/ClientConcepts/ConnectionPooling/RequestOverrides/RespectsMaxRetryOverrides.doc.cs +++ b/src/Tests/ClientConcepts/ConnectionPooling/RequestOverrides/RespectsMaxRetryOverrides.doc.cs @@ -8,7 +8,9 @@ namespace Tests.ClientConcepts.ConnectionPooling.RequestOverrides { public class RespectsMaxRetryOverrides { - /** == MaxRetries + /** :section-number: 6.5 + * == Maximum Retries + * * By default retry as many times as we have nodes. However retries still respect the request timeout. * Meaning if you have a 100 node cluster and a request timeout of 20 seconds we will retry as many times as we can * but give up after 20 seconds diff --git a/src/Tests/ClientConcepts/ConnectionPooling/RoundRobin/RoundRobin.doc.cs b/src/Tests/ClientConcepts/ConnectionPooling/RoundRobin/RoundRobin.doc.cs index 7f20bcca692..8483672b142 100644 --- a/src/Tests/ClientConcepts/ConnectionPooling/RoundRobin/RoundRobin.doc.cs +++ b/src/Tests/ClientConcepts/ConnectionPooling/RoundRobin/RoundRobin.doc.cs @@ -11,16 +11,15 @@ namespace Tests.ClientConcepts.ConnectionPooling.RoundRobin { public class RoundRobin { - /** Round Robin + /** == Round Robin * Each connection pool round robins over the `live` nodes, to evenly distribute the load over all known nodes. */ - /** == GetNext - * GetNext is implemented in a lock free thread safe fashion, meaning each callee gets returned its own cursor to advance + /** === GetNext + * `GetNext` is implemented in a lock free thread safe fashion, meaning each callee gets returned its own cursor to advance * over the internal list of nodes. This to guarantee each request that needs to fall over tries all the nodes without * suffering from noisy neighboors advancing a global cursor. */ - protected int NumberOfNodes = 10; [U] public void EachViewStartsAtNexPositionAndWrapsOver() @@ -36,7 +35,7 @@ [U] public void EachViewStartsAtNexPositionAndWrapsOver() public void AssertCreateView(IConnectionPool pool) { /** - * Here we have setup a static connection pool seeded with 10 nodes. We force randomizationOnStartup to false + * Here we have setup a static connection pool seeded with 10 nodes. We force randomization OnStartup to false * so that we can test the nodes being returned are int the order we expect them to. * So what order we expect? Imagine the following: * diff --git a/src/Tests/ClientConcepts/ConnectionPooling/RoundRobin/SkipDeadNodes.doc.cs b/src/Tests/ClientConcepts/ConnectionPooling/RoundRobin/SkipDeadNodes.doc.cs index 18624805b34..2429ad96fbf 100644 --- a/src/Tests/ClientConcepts/ConnectionPooling/RoundRobin/SkipDeadNodes.doc.cs +++ b/src/Tests/ClientConcepts/ConnectionPooling/RoundRobin/SkipDeadNodes.doc.cs @@ -12,11 +12,12 @@ namespace Tests.ClientConcepts.ConnectionPooling.RoundRobin { public class SkippingDeadNodes { - /** Round Robin - Skipping Dead Nodes + /** == Round Robin - Skipping Dead Nodes + * * When selecting nodes the connection pool will try and skip all the nodes that are marked dead. */ - /** == GetNext + /** === GetNext * GetNext is implemented in a lock free thread safe fashion, meaning each callee gets returned its own cursor to advance * over the internal list of nodes. This to guarantee each request that needs to fall over tries all the nodes without * suffering from noisy neighboors advancing a global cursor. @@ -51,7 +52,7 @@ [U] public void EachViewSeesNextButSkipsTheDeadNode() node = pool.CreateView().First(); node.Uri.Port.Should().Be(9202); } - /** After we marke the first node alive again we expect it to be hit again*/ + /** After we marked the first node alive again, we expect it to be hit again*/ seeds.First().MarkAlive(); for (var i = 0; i < 20; i++) { @@ -76,7 +77,7 @@ [U] public void ViewSeesResurrectedNodes() node = pool.CreateView().First(); node.Uri.Port.Should().Be(9202); } - /** If we forward our clock 2 days the node that was marked dead until tomorrow (or yesterday!) should be resurrected */ + /** If we roll the clock forward two days, the node that was marked dead until tomorrow (or yesterday!) should be resurrected */ dateTimeProvider.ChangeTime(d => d.AddDays(2)); var n = pool.CreateView().First(); n.Uri.Port.Should().Be(9201); diff --git a/src/Tests/ClientConcepts/ConnectionPooling/Sniffing/OnConnectionFailure.doc.cs b/src/Tests/ClientConcepts/ConnectionPooling/Sniffing/OnConnectionFailure.doc.cs index c2ed84ad5ba..017709e6bd6 100644 --- a/src/Tests/ClientConcepts/ConnectionPooling/Sniffing/OnConnectionFailure.doc.cs +++ b/src/Tests/ClientConcepts/ConnectionPooling/Sniffing/OnConnectionFailure.doc.cs @@ -9,7 +9,9 @@ namespace Tests.ClientConcepts.ConnectionPooling.Sniffing { public class OnConnectionFailure { - /** == Sniffing on connection failure + /** :section-number: 7.1 + * == Sniffing on connection failure + * * Sniffing on connection is enabled by default when using a connection pool that allows reseeding. * The only IConnectionPool we ship that allows this is the SniffingConnectionPool. * diff --git a/src/Tests/ClientConcepts/ConnectionPooling/Sniffing/OnStaleClusterState.doc.cs b/src/Tests/ClientConcepts/ConnectionPooling/Sniffing/OnStaleClusterState.doc.cs index 3076e6f1aa1..9eb9a53c2ad 100644 --- a/src/Tests/ClientConcepts/ConnectionPooling/Sniffing/OnStaleClusterState.doc.cs +++ b/src/Tests/ClientConcepts/ConnectionPooling/Sniffing/OnStaleClusterState.doc.cs @@ -10,7 +10,8 @@ namespace Tests.ClientConcepts.ConnectionPooling.Sniffing { public class OnStaleClusterState { - /** == Sniffing periodically + /** :section-number: 7.2 + * == Sniffing periodically * * Connection pools that return true for `SupportsReseeding` can be configured to sniff periodically. * In addition to sniffing on startup and sniffing on failures, sniffing periodically can benefit scenerio's where diff --git a/src/Tests/ClientConcepts/ConnectionPooling/Sniffing/OnStartup.doc.cs b/src/Tests/ClientConcepts/ConnectionPooling/Sniffing/OnStartup.doc.cs index 6223fa14a91..7abb85d5c1e 100644 --- a/src/Tests/ClientConcepts/ConnectionPooling/Sniffing/OnStartup.doc.cs +++ b/src/Tests/ClientConcepts/ConnectionPooling/Sniffing/OnStartup.doc.cs @@ -10,7 +10,8 @@ namespace Tests.ClientConcepts.ConnectionPooling.Sniffing { public class OnStartupSniffing { - /** == Sniffing on startup + /** :section-number: 7.3 + * == Sniffing on startup * * Connection pools that return true for `SupportsReseeding` by default sniff on startup. */ diff --git a/src/Tests/ClientConcepts/ConnectionPooling/Sniffing/RoleDetection.doc.cs b/src/Tests/ClientConcepts/ConnectionPooling/Sniffing/RoleDetection.doc.cs index c5e0a99df11..720d4a512be 100644 --- a/src/Tests/ClientConcepts/ConnectionPooling/Sniffing/RoleDetection.doc.cs +++ b/src/Tests/ClientConcepts/ConnectionPooling/Sniffing/RoleDetection.doc.cs @@ -13,12 +13,13 @@ namespace Tests.ClientConcepts.ConnectionPooling.Sniffing { public class RoleDetection - { - /** == Sniffing role detection + { + /** :section-number: 7.4 + * == Sniffing role detection * * When we sniff the custer state we detect the role of the node whether its master eligible and holds data * We use this information when selecting a node to perform an API call on. - */ + */ [U, SuppressMessage("AsyncUsage", "AsyncFixer001:Unnecessary async/await usage", Justification = "Its a test")] public async Task DetectsMasterNodes() { diff --git a/src/Tests/ClientConcepts/HighLevel/CovariantHits/CovariantSearchResults.doc.cs b/src/Tests/ClientConcepts/HighLevel/CovariantHits/CovariantSearchResults.doc.cs index 2a0ff0f766e..e6a1480b6ab 100644 --- a/src/Tests/ClientConcepts/HighLevel/CovariantHits/CovariantSearchResults.doc.cs +++ b/src/Tests/ClientConcepts/HighLevel/CovariantHits/CovariantSearchResults.doc.cs @@ -6,59 +6,59 @@ namespace Tests.ClientConcepts.HighLevel.CovariantHits { - /** # Covariant Search Results - * - * NEST directly supports returning covariant result sets. - * Meaning a result can be typed to an interface or baseclass - * but the actual instance type of the result can be that of the subclass directly - * - * Let look at an example, imagine we want to search over multiple types that all implement - * `ISearchResult` - * - */ - public interface ISearchResult + public class CovariantSearchResults { - string Name { get; set; } - } + /**== Covariant Search Results + * + * NEST directly supports returning covariant result sets. + * Meaning a result can be typed to an interface or base class + * but the actual instance type of the result can be that of the subclass directly + * + * Let's look at an example; Imagine we want to search over multiple types that all implement + * `ISearchResult` + */ + public interface ISearchResult + { + string Name { get; set; } + } - /** - * We have three implementations of `ISearchResult` namely `A`, `B` and `C` - */ + /** + * We have three implementations of `ISearchResult` namely `A`, `B` and `C` + */ + public class A : ISearchResult + { + public string Name { get; set; } + public int PropertyOnA { get; set; } + } - public class A : ISearchResult - { - public string Name { get; set; } - public int PropertyOnA { get; set; } - } + public class B : ISearchResult + { + public string Name { get; set; } + public int PropertyOnB { get; set; } + } - public class B : ISearchResult - { - public string Name { get; set; } - public int PropertyOnB { get; set; } - } + public class C : ISearchResult + { + public string Name { get; set; } + public int PropertyOnC { get; set; } + } - public class C : ISearchResult - { - public string Name { get; set; } - public int PropertyOnC { get; set; } - } - public class CovariantSearchResults - { - private IElasticClient _client = TestClient.GetFixedReturnClient(CovariantSearchResultMock.Json); + private readonly IElasticClient _client = TestClient.GetFixedReturnClient(CovariantSearchResultMock.Json); + [U] public void UsingTypes() { - /** + /** === Using Types * The most straightforward way to search over multiple types is to * type the response to the parent interface or base class - * and pass the actual types we want to search over using `.Types()` + * and pass the actual types we want to search over using `.Type()` */ var result = this._client.Search(s => s .Type(Types.Type(typeof(A), typeof(B), typeof(C))) .Size(100) ); /** - * Nest will translate this to a search over /index/a,b,c/_search. + * NEST will translate this to a search over `/index/a,b,c/_search`; * hits that have `"_type" : "a"` will be serialized to `A` and so forth */ @@ -91,7 +91,7 @@ [U] public void UsingTypes() [U] public void UsingConcreteTypeSelector() { - /** + /** === Using ConcreteTypeSelector * A more low level approach is to inspect the hit yourself and determine the CLR type to deserialize to */ var result = this._client.Search(s => s @@ -100,8 +100,9 @@ [U] public void UsingConcreteTypeSelector() ); /** - * here for each hit we'll call the delegate with `d` which a dynamic representation of the `_source` - * and a typed `h` which represents the encapsulating hit. + * here for each hit we'll call the delegate passed to `ConcreteTypeSelector where + * - `d` is a representation of the `_source` exposed as a `dynamic` type + * - a typed `h` which represents the encapsulating hit of the source i.e. `Hit` */ /** @@ -137,14 +138,14 @@ [U] public void UsingConcreteTypeSelector() [U] public void UsingCovariantTypesOnScroll() { /** - * Scroll() is a continuation of a previous Search() so Types() are lost. - * You can hint the type types again using CovariantTypes() + * The Scroll API is a continuation of the previous Search example so Types() are lost. + * You can hint at the types using `.CovariantTypes()` */ var result = this._client.Scroll(TimeSpan.FromMinutes(60), "scrollId", s => s .CovariantTypes(Types.Type(typeof(A), typeof(B), typeof(C))) ); /** - * Nest will translate this to a search over /index/a,b,c/_search. + * NEST will translate this to a search over `/index/a,b,c/_search`; * hits that have `"_type" : "a"` will be serialized to `A` and so forth */ @@ -185,8 +186,9 @@ [U] public void UsingConcreteTypeSelectorOnScroll() ); /** - * here for each hit we'll call the delegate with `d` which a dynamic representation of the `_source` - * and a typed `h` which represents the encapsulating hit. + * As before, within the delegate passed to `.ConcreteTypeSelector` + * - `d` is the `_source` typed as `dynamic` + * - `h` is the encapsulating typed hit */ /** diff --git a/src/Tests/ClientConcepts/HighLevel/Inferrence/DocumentPaths.doc.cs b/src/Tests/ClientConcepts/HighLevel/Inference/DocumentPaths.doc.cs similarity index 64% rename from src/Tests/ClientConcepts/HighLevel/Inferrence/DocumentPaths.doc.cs rename to src/Tests/ClientConcepts/HighLevel/Inference/DocumentPaths.doc.cs index 8b770364865..7d7ee3eaae8 100644 --- a/src/Tests/ClientConcepts/HighLevel/Inferrence/DocumentPaths.doc.cs +++ b/src/Tests/ClientConcepts/HighLevel/Inference/DocumentPaths.doc.cs @@ -3,17 +3,18 @@ using Tests.Framework.MockData; using static Tests.Framework.RoundTripper; -namespace Tests.ClientConcepts.HighLevel.Inferrence +namespace Tests.ClientConcepts.HighLevel.Inference { public class DocumentPaths { - /** # DocumentPaths - * Many API's in elasticsearch describe a path to a document. In NEST besides generating a constructor that takes - * and Index, Type and Id seperately we also generate a constructor taking a DocumentPath that allows you to describe the path - * to your document more succintly + /**== Document Paths + * + * Many API's in Elasticsearch describe a path to a document. In NEST, besides generating a constructor that takes + * and Index, Type and Id seperately, we also generate a constructor taking a `DocumentPath` that allows you to describe the path + * to your document more succintly */ - /** Manually newing */ + /** === Creating new instances */ [U] public void FromId() { /** here we create a new document path based on Project with the id 1 */ @@ -28,20 +29,22 @@ [U] public void FromId() path = new DocumentPath(1).Index("project1"); Expect("project1").WhenSerializing(path.Index); - - /** there is also a static way to describe such paths */ - path = DocumentPath.Id(1); + + /** and there is also a static way to describe such paths */ + path = DocumentPath.Id(1); Expect("project").WhenSerializing(path.Index); Expect("project").WhenSerializing(path.Type); Expect(1).WhenSerializing(path.Id); } - //** if you have an instance of your document you can use it as well generate document paths */ + /** === Creating from a document type instance + * if you have an instance of your document you can use it as well generate document paths + */ [U] public void FromObject() { var project = new Project { Name = "hello-world" }; - /** here we create a new document path based on a Project */ + /** here we create a new document path based on the instance of `Project`, project */ IDocumentPath path = new DocumentPath(project); Expect("project").WhenSerializing(path.Index); Expect("project").WhenSerializing(path.Type); @@ -53,9 +56,9 @@ [U] public void FromObject() path = new DocumentPath(project).Index("project1"); Expect("project1").WhenSerializing(path.Index); - - /** there is also a static way to describe such paths */ - path = DocumentPath.Id(project); + + /** and again, there is also a static way to describe such paths */ + path = DocumentPath.Id(project); Expect("project").WhenSerializing(path.Index); Expect("project").WhenSerializing(path.Type); Expect("hello-world").WhenSerializing(path.Id); @@ -63,16 +66,18 @@ [U] public void FromObject() DocumentPath p = project; } + /** === An example with requests */ [U] public void UsingWithRequests() { + /* Given the following CLR type that describes a document */ var project = new Project { Name = "hello-world" }; - /** Here we can see and example how DocumentPath helps your describe your requests more tersely */ + /** we can see an example of how `DocumentPath` helps your describe your requests more tersely */ var request = new IndexRequest(2) { Document = project }; request = new IndexRequest(project) { }; - - /** when comparing with the full blown constructor and passing document manually - * DocumentPath<T>'s benefits become apparent. + + /** when comparing with the full blown constructor and passing document manually, + * `DocumentPath`'s benefits become apparent. */ request = new IndexRequest(IndexName.From(), TypeName.From(), 2) { diff --git a/src/Tests/ClientConcepts/HighLevel/Inference/FeaturesInference.doc.cs b/src/Tests/ClientConcepts/HighLevel/Inference/FeaturesInference.doc.cs new file mode 100644 index 00000000000..2f305f52de4 --- /dev/null +++ b/src/Tests/ClientConcepts/HighLevel/Inference/FeaturesInference.doc.cs @@ -0,0 +1,38 @@ +using Elasticsearch.Net; +using Nest; +using Tests.Framework; +using static Nest.Indices; +using static Tests.Framework.RoundTripper; + +namespace Tests.ClientConcepts.HighLevel.Inference +{ + public class FeaturesInference + { + /**[[features-inference]] + * == Features Inference + * Some URIs in Elasticsearch take a `Feature` enum. + * Within NEST, route values on the URI are represented as classes that implement an interface, `IUrlParameter`. + * Since enums _cannot_ implement interfaces in C#, a route parameter that would be of type `Feature` is represented using the `Features` class that + * the `Feature` enum implicitly converts to. + */ + + /**=== Constructor + * Using the `Features` constructor directly is possible but rather involved */ + [U] public void Serializes() + { + Features fieldString = Feature.Mappings | Feature.Aliases; + Expect("_mappings,_aliases") + .WhenSerializing(fieldString); + } + + [U] + public void ImplicitConversion() + { + /** === Implicit conversion + * Here we instantiate a GET index request whichs takes two features, settings and warmers. + * Notice how we can use the `Feature` enum directly. + */ + var request = new GetIndexRequest(All, Feature.Settings | Feature.Warmers); + } + } +} diff --git a/src/Tests/ClientConcepts/HighLevel/Inferrence/FieldInference.doc.cs b/src/Tests/ClientConcepts/HighLevel/Inference/FieldInference.doc.cs similarity index 66% rename from src/Tests/ClientConcepts/HighLevel/Inferrence/FieldInference.doc.cs rename to src/Tests/ClientConcepts/HighLevel/Inference/FieldInference.doc.cs index 5c2978ca3e9..cd9110ef09e 100644 --- a/src/Tests/ClientConcepts/HighLevel/Inferrence/FieldInference.doc.cs +++ b/src/Tests/ClientConcepts/HighLevel/Inference/FieldInference.doc.cs @@ -15,25 +15,26 @@ using Field = Nest.Field; using Xunit; -namespace Tests.ClientConcepts.HighLevel.Inferrence +namespace Tests.ClientConcepts.HighLevel.Inference { public class FieldInferrence { - /** # Strongly typed field access - * - * Several places in the elasticsearch API expect the path to a field from your original source document as a string. - * NEST allows you to use C# expressions to strongly type these field path strings. + /**== Field Inference * - * These expressions are assigned to a type called `Field` and there are several ways to create a instance of that type + * Several places in the Elasticsearch API expect the path to a field from your original source document as a string. + * NEST allows you to use C# expressions to strongly type these field path strings. + * + * These expressions are assigned to a type called `Field` and there are several ways to create an instance of one: */ - /** Using the constructor directly is possible but rather involved */ + /**=== Constructor + * Using the constructor directly is possible but rather involved */ [U] public void UsingConstructors() { var fieldString = new Field { Name = "name" }; - /** especially when using C# expressions since these can not be simply new'ed*/ + /** This is more cumbersome when using C# expressions since they cannot be instantiated easily*/ Expression> expression = p => p.Name; var fieldExpression = Field.Create(expression); @@ -42,13 +43,14 @@ public void UsingConstructors() .WhenSerializing(fieldString); } - /** Therefore you can also implicitly convert strings and expressions to Field's */ + /**=== Implicit Conversion + * Therefore you can also implicitly convert strings and expressions to ``Field``s */ [U] public void ImplicitConversion() { Field fieldString = "name"; - /** but for expressions this is still rather involved */ + /** but for expressions this is _still_ rather involved */ Expression> expression = p => p.Name; Field fieldExpression = expression; @@ -57,7 +59,9 @@ public void ImplicitConversion() .WhenSerializing(fieldString); } - /** to ease creating Field's from expressions there is a static Property class you can use */ + /**=== ``Nest.Infer`` + * to ease creating ``Field``s from expressions there is a static `Infer` class you can use + */ [U] public void UsingStaticPropertyField() { @@ -66,11 +70,11 @@ public void UsingStaticPropertyField() /** but for expressions this is still rather involved */ var fieldExpression = Infer.Field(p => p.Name); - /** Using static imports in c# 6 this can be even shortened: - using static Nest.Static; + /** this can be even shortened even further using a https://msdn.microsoft.com/en-us/library/sf0df423.aspx#Anchor_0[static import in C# 6] i.e. + `using static Nest.Infer;` */ fieldExpression = Field(p => p.Name); - /** Now this is much much terser then our first example using the constructor! */ + /** Now that is much terser then our first example using the constructor! */ Expect("name") .WhenSerializing(fieldString) @@ -86,49 +90,59 @@ public void UsingStaticPropertyField() .WhenSerializing(fieldExpression); } - /** By default NEST will camelCase all the field names to be more javascripty */ + /**=== Field name casing + * By default, NEST will camel-case **all** field names to better align with typical + * javascript/json conventions + */ [U] public void DefaultFieldNameInferrer() { - /** using DefaultFieldNameInferrer() on ConnectionSettings you can change this behavior */ + /** using `DefaultFieldNameInferrer()` on ConnectionSettings you can change this behavior */ var setup = WithConnectionSettings(s => s.DefaultFieldNameInferrer(p => p.ToUpper())); setup.Expect("NAME").WhenSerializing(Field(p => p.Name)); - /** However string are *always* passed along verbatim */ + /** However ``string``s are *always* passed along verbatim */ setup.Expect("NaMe").WhenSerializing("NaMe"); - /** if you want the same behavior for expressions simply do nothing in the default inferrer */ + /** if you want the same behavior for expressions, simply pass a Func to `DefaultFieldNameInferrer` + * to make no changes to the name + */ setup = WithConnectionSettings(s => s.DefaultFieldNameInferrer(p => p)); setup.Expect("Name").WhenSerializing(Field(p => p.Name)); } - /** Complex field name expressions */ - + /**=== Complex field name expressions */ [U] public void ComplexFieldNameExpressions() { - /** You can follow your property expression to any depth, here we are traversing to the LeadDeveloper's (Person) FirstName */ + /** You can follow your property expression to any depth. Here we are traversing to the ``LeadDeveloper``'s `FirstName` */ Expect("leadDeveloper.firstName").WhenSerializing(Field(p => p.LeadDeveloper.FirstName)); - /** When dealing with collection index access is ingnored allowing you to traverse into properties of collections */ + + /** When dealing with collection indexers, the indexer access is ignored allowing you to traverse into properties of collections */ Expect("curatedTags").WhenSerializing(Field(p => p.CuratedTags[0])); - /** Similarly .First() also works, remember these are expressions and not actual code that will be executed */ + + /** Similarly, LINQ's `.First()` method also works */ Expect("curatedTags").WhenSerializing(Field(p => p.CuratedTags.First())); Expect("curatedTags.added").WhenSerializing(Field(p => p.CuratedTags[0].Added)); Expect("curatedTags.name").WhenSerializing(Field(p => p.CuratedTags.First().Name)); - /** When we see an indexer on a dictionary we assume they describe property names */ + /** NOTE: Remember, these are _expressions_ and not actual code that will be executed + * + * An indexer on a dictionary is assumed to describe a property name */ Expect("metadata.hardcoded").WhenSerializing(Field(p => p.Metadata["hardcoded"])); Expect("metadata.hardcoded.created").WhenSerializing(Field(p => p.Metadata["hardcoded"].Created)); - /** A cool feature here is that we'll evaluate variables passed to these indexers */ + /** A cool feature here is that we'll evaluate variables passed to an indexer */ var variable = "var"; Expect("metadata.var").WhenSerializing(Field(p => p.Metadata[variable])); Expect("metadata.var.created").WhenSerializing(Field(p => p.Metadata[variable].Created)); - - /** If you are using elasticearch's multifield mapping (you really should!) these "virtual" sub fields - * do not always map back on to your POCO, by calling .Suffix() you describe the sub fields that do not live in your c# objects + /** + * If you are using Elasticearch's {ref_current}/_multi_fields.html[multi_fields], which you really should as they allow + * you to analyze a string in a number of different ways, these __"virtual"__ sub fields + * do not always map back on to your POCO. By calling `.Suffix()` on expressions, you describe the sub fields that + * should be mapped and <> */ Expect("leadDeveloper.firstName.raw").WhenSerializing(Field(p => p.LeadDeveloper.FirstName.Suffix("raw"))); Expect("curatedTags.raw").WhenSerializing(Field(p => p.CuratedTags[0].Suffix("raw"))); @@ -138,25 +152,24 @@ public void ComplexFieldNameExpressions() Expect("metadata.hardcoded.created.raw").WhenSerializing(Field(p => p.Metadata["hardcoded"].Created.Suffix("raw"))); /** - * You can even chain them to any depth! + * You can even chain `.Suffix()` calls to any depth! */ Expect("curatedTags.name.raw.evendeeper").WhenSerializing(Field(p => p.CuratedTags.First().Name.Suffix("raw").Suffix("evendeeper"))); - /** Variables passed to suffix will be evaluated as well */ var suffix = "unanalyzed"; Expect("metadata.var.unanalyzed").WhenSerializing(Field(p => p.Metadata[variable].Suffix(suffix))); Expect("metadata.var.created.unanalyzed").WhenSerializing(Field(p => p.Metadata[variable].Created.Suffix(suffix))); } - /** - * Suffixes can be appended to expressions. This is useful in cases where you want to apply the same suffix - * to a list of fields + /** + * Suffixes can also be appended to expressions using `.ApplySuffix()`. This is useful in cases where you want to apply the same suffix + * to a list of fields. */ [U] public void AppendingSuffixToExpressions() { - /** */ + /** Here we have a list of expressions */ var expressions = new List>> { p => p.Name, @@ -165,8 +178,8 @@ public void AppendingSuffixToExpressions() p => p.LeadDeveloper.FirstName }; - /** append the suffix "raw" to each expression */ - var fieldExpressions = + /** and we want to append the suffix "raw" to each */ + var fieldExpressions = expressions.Select>, Field>(e => e.AppendSuffix("raw")).ToList(); Expect("name.raw").WhenSerializing(fieldExpressions[0]); @@ -175,8 +188,8 @@ public void AppendingSuffixToExpressions() Expect("leadDeveloper.firstName.raw").WhenSerializing(fieldExpressions[3]); } - /** Annotations - * + /**=== Annotations + * * When using NEST's property attributes you can specify a new name for the properties */ public class BuiltIn @@ -190,23 +203,26 @@ public void BuiltInAnnotiatons() Expect("naam").WhenSerializing(Field(p => p.Name)); } - /** + /** * Starting with NEST 2.x we also ask the serializer if it can resolve the property to a name. - * Here we ask the default JsonNetSerializer and it takes JsonProperty into account + * Here we ask the default `JsonNetSerializer` to resolve a property name and it takes + * the `JsonPropertyAttribute` into account */ public class SerializerSpecific { [JsonProperty("nameInJson")] public string Name { get; set; } } + [U] public void SerializerSpecificAnnotations() { Expect("nameInJson").WhenSerializing(Field(p => p.Name)); } - /** - * If both are specified NEST takes precedence though + /** + * If both a NEST property attribute and a serializer specific attribute are present on a property, + * NEST takes precedence */ public class Both { @@ -224,16 +240,14 @@ public void NestAttributeTakesPrecedence() }).WhenSerializing(new Both { Name = "Martijn Laarman" }); } - class A { public C C { get; set; } } - class B { public C C { get; set; } } - class C - { - public string Name { get; set; } - } /** - * Resolving field names is cached but this is per connection settings + * Resolution of field names is cached per connection settings instance. To demonstrate, + * take the following simple POCOs */ + class A { public C C { get; set; } } + class B { public C C { get; set; } } + class C { public string Name { get; set; } } [U] public void ExpressionsAreCachedButSeeDifferentTypes() @@ -253,8 +267,9 @@ public void ExpressionsAreCachedButSeeDifferentTypes() fieldNameOnB.Should().Be("c.name"); /** - * now we create a new connectionsettings with a remap for C on class A to `d` - * now when we resolve the field path for A will be different + * now we create a new connectionsettings with a remap for `C` on class `A` to `"d"` + * now when we resolve the field path for property `C` on `A`, it will be different than + * for property `C` on `B` */ var newConnectionSettings = TestClient.CreateSettings(forceInMemory: true, modifySettings: s => s .InferMappingFor(m => m @@ -278,49 +293,43 @@ public void ExpressionsAreCachedButSeeDifferentTypes() } /** - * To wrap up lets showcase the precedence that field names are inferred - * 1. A hard rename of the property on connection settings using Rename() - * 2. A NEST property mapping - * 3. Ask the serializer if the property has a verbatim value e.g it has an explicit JsonPropery attribute. - * 4. Pass the MemberInfo's Name to the DefaultFieldNameInferrer which by default camelCases + * To wrap up, the precedence in which field names are inferred is: * - * In the following example we have a class where each case wins + * . A hard rename of the property on connection settings using `.Rename()` + * . A NEST property mapping + * . Ask the serializer if the property has a verbatim value e.g it has an explicit JsonPropery attribute. + * . Pass the MemberInfo's Name to the DefaultFieldNameInferrer which by default camelCases + * + * The following example class will demonstrate this precedence */ - class Precedence { - /** - * Eventhough this property has a NEST property mapping and a JsonProperty attribute - * We are going to provide a hard rename for it on ConnectionSettings later that should win. - */ + // Even though this property has a NEST property mapping and a JsonProperty attribute, + // We are going to provide a hard rename for it on ConnectionSettings later that should win. [String(Name = "renamedIgnoresNest")] [JsonProperty("renamedIgnoresJsonProperty")] public string RenamedOnConnectionSettings { get; set; } - /** - * This property has both a NEST attribute and a JsonProperty, NEST should win. - */ + // This property has both a NEST attribute and a JsonProperty, NEST should win. [String(Name = "nestAtt")] [JsonProperty("jsonProp")] public string NestAttribute { get; set; } - /** We should take the json property into account by itself */ + // We should take the json property into account by itself [JsonProperty("jsonProp")] public string JsonProperty { get; set; } - /** This property we are going to special case in our custom serializer to resolve to `ask` */ + // This property we are going to special case in our custom serializer to resolve to ask [JsonProperty("dontaskme")] public string AskSerializer { get; set; } - /** We are going to register a DefaultFieldNameInferrer on ConnectionSettings - * that will uppercase all properties. - */ + // We are going to register a DefaultFieldNameInferrer on ConnectionSettings + // that will uppercase all properties. public string DefaultFieldNameInferrer { get; set; } - } - /** - * Here we create a custom converter that renames any property named `AskSerializer` to `ask` + /** + * Here we create a custom serializer that renames any property named `AskSerializer` to `ask` */ class CustomSerializer : JsonNetSerializer { @@ -328,20 +337,23 @@ public CustomSerializer(IConnectionSettingsValues settings) : base(settings) { } public override IPropertyMapping CreatePropertyMapping(MemberInfo memberInfo) { - if (memberInfo.Name == "AskSerializer") return new PropertyMapping { Name = "ask" }; - return base.CreatePropertyMapping(memberInfo); + return memberInfo.Name == nameof(Precedence.AskSerializer) + ? new PropertyMapping { Name = "ask" } + : base.CreatePropertyMapping(memberInfo); } } [U] public void PrecedenceIsAsExpected() { + /** here we provide an explicit rename of a property on `ConnectionSettings` using `.Rename()` + * and all properties that are not mapped verbatim should be uppercased + */ var usingSettings = WithConnectionSettings(s => s - /** here we provide an explicit rename of a property on connectionsettings */ + .InferMappingFor(m => m .Rename(p => p.RenamedOnConnectionSettings, "renamed") ) - /** All properties that are not mapped verbatim should be uppercased*/ .DefaultFieldNameInferrer(p => p.ToUpperInvariant()) ).WithSerializer(s => new CustomSerializer(s)); @@ -350,9 +362,9 @@ public void PrecedenceIsAsExpected() usingSettings.Expect("jsonProp").ForField(Field(p => p.JsonProperty)); usingSettings.Expect("ask").ForField(Field(p => p.AskSerializer)); usingSettings.Expect("DEFAULTFIELDNAMEINFERRER").ForField(Field(p => p.DefaultFieldNameInferrer)); - - /** The same rules apply when indexing an object */ - usingSettings.Expect(new [] + + /** The same naming rules also apply when indexing a document */ + usingSettings.Expect(new [] { "ask", "DEFAULTFIELDNAMEINFERRER", @@ -368,6 +380,6 @@ public void PrecedenceIsAsExpected() DefaultFieldNameInferrer = "shouting much?" }); - } + } } } diff --git a/src/Tests/ClientConcepts/HighLevel/Inference/IdsInference.doc.cs b/src/Tests/ClientConcepts/HighLevel/Inference/IdsInference.doc.cs new file mode 100644 index 00000000000..cd0eb2b5e68 --- /dev/null +++ b/src/Tests/ClientConcepts/HighLevel/Inference/IdsInference.doc.cs @@ -0,0 +1,119 @@ +using System; +using Nest; +using Tests.Framework; +using static Tests.Framework.RoundTripper; + +namespace Tests.ClientConcepts.HighLevel.Inference +{ + public class IdsInference + { + /**[[ids-inference]] + *== Ids Inference + * + * === Implicit Conversions + * + * Several places in the Elasticsearch API expect an `Id` object to be passed. + * This is a special box type that you can implicitly convert to from the following types + * + * - `Int32` + * - `Int64` + * - `String` + * - `Guid` + * + * Methods that take an `Id` can be passed any of these types and it will be implicitly converted to an `Id` + */ + [U] public void CanImplicitlyConvertToId() + { + Id idFromInt = 1; + Id idFromLong = 2L; + Id idFromString = "hello-world"; + Id idFromGuid = new Guid("D70BD3CF-4E38-46F3-91CA-FCBEF29B148E"); + + Expect(1).WhenSerializing(idFromInt); + Expect(2).WhenSerializing(idFromLong); + Expect("hello-world").WhenSerializing(idFromString); + Expect("d70bd3cf-4e38-46f3-91ca-fcbef29b148e").WhenSerializing(idFromGuid); + } + + /** === Inferring from a Type + * + * Sometimes a method takes an object and we need an Id from that object to build up a path. + * There is no implicit conversion from any object to Id but we can call `Id.From`. + * + * Imagine your codebase has the following type that we want to index into Elasticsearch + */ + class MyDTO + { + public Guid Id { get; set; } + public string Name { get; set; } + public string OtherName { get; set; } + } + + [U] public void CanGetIdFromDocument() + { + /** By default NEST will try to find a property called `Id` on the class using reflection + * and create a cached fast func delegate based on the properties getter + */ + var dto = new MyDTO + { + Id = new Guid("D70BD3CF-4E38-46F3-91CA-FCBEF29B148E"), + Name = "x", + OtherName = "y" + }; + + Expect("d70bd3cf-4e38-46f3-91ca-fcbef29b148e").WhenInferringIdOn(dto); + + /** Using the connection settings you can specify a different property that NEST should use to infer the document Id. + * Here we instruct NEST to infer the Id for `MyDTO` based on its `Name` property + */ + WithConnectionSettings(x => x + .InferMappingFor(m => m + .IdProperty(p => p.Name) + ) + ).Expect("x").WhenInferringIdOn(dto); + + /** IMPORTANT: Inference rules are cached __per__ `ConnectionSettings` instance. + * + * Because the cache is per `ConnectionSettings` instance, we can create another `ConnectionSettings` instance + * with different inference rules + */ + WithConnectionSettings(x => x + .InferMappingFor(m => m + .IdProperty(p => p.OtherName) + ) + ).Expect("y").WhenInferringIdOn(dto); + } + + /** === Using the `ElasticsearchType` attribute + * + * Another way is to mark the type with an `ElasticsearchType` attribute, setting `IdProperty` + * to the name of the property that should be used for the document id + */ + [ElasticsearchType(IdProperty = nameof(Name))] + class MyOtherDTO + { + public Guid Id { get; set; } + public string Name { get; set; } + public string OtherName { get; set; } + } + + [U] public void CanGetIdFromAttribute() + { + /** Now when we infer the id we expect it to be the value of the `Name` property without doing any configuration on the `ConnectionSettings` */ + var dto = new MyOtherDTO { Id = new Guid("D70BD3CF-4E38-46F3-91CA-FCBEF29B148E"), Name = "x", OtherName = "y" }; + Expect("x").WhenInferringIdOn(dto); + + /** === Using Mapping inference on `ConnectionSettings` + * + * This attribute *is* cached statically/globally, however an inference rule on the `ConnectionSettings` for the type will + * still win over the attribute. Here we demonstrate this by creating a different `ConnectionSettings` instance + * that will infer the document id from the property `OtherName`: + */ + WithConnectionSettings(x => x + .InferMappingFor(m => m + .IdProperty(p => p.OtherName) + ) + ).Expect("y").WhenInferringIdOn(dto); + } + } +} diff --git a/src/Tests/ClientConcepts/HighLevel/Inference/IndexNameInference.doc.cs b/src/Tests/ClientConcepts/HighLevel/Inference/IndexNameInference.doc.cs new file mode 100644 index 00000000000..db607fd075e --- /dev/null +++ b/src/Tests/ClientConcepts/HighLevel/Inference/IndexNameInference.doc.cs @@ -0,0 +1,181 @@ +using Elasticsearch.Net; +using FluentAssertions; +using Nest; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; +using Tests.Framework; +using Tests.Framework.MockData; +using Xunit; + +namespace Tests.ClientConcepts.HighLevel.Inference +{ + /**[[index-name-inference]] + *== Index Name Inference + * + * Many endpoints within the Elasticsearch API expect to receive one or more index names + * as part of the request in order to know what index/indices a request should operate on. + * + * NEST has a number of ways in which an index name can be specified + */ + public class IndexNameInference + { + /**=== Default Index name on ConnectionSettings + * A default index name can be specified on `ConnectionSettings` usinf `.DefaultIndex()`. + * This is the default index name to use when no other index name can be resolved for a request + */ + [U] + public void DefaultIndexIsInferred() + { + var settings = new ConnectionSettings() + .DefaultIndex("defaultindex"); + var resolver = new IndexNameResolver(settings); + var index = resolver.Resolve(); + index.Should().Be("defaultindex"); + } + + /**=== Mapping an Index name for POCOs + * A index name can be mapped for CLR types using `.MapDefaultTypeIndices()` on `ConnectionSettings`. + */ + [U] + public void ExplicitMappingIsInferred() + { + var settings = new ConnectionSettings() + .MapDefaultTypeIndices(m => m + .Add(typeof(Project), "projects") + ); + var resolver = new IndexNameResolver(settings); + var index = resolver.Resolve(); + index.Should().Be("projects"); + } + + /**=== Mapping an Index name for POCOs + * An index name for a POCO provided using `.MapDefaultTypeIndices()` **will take precedence** over + * the default index name + */ + [U] + public void ExplicitMappingTakesPrecedence() + { + var settings = new ConnectionSettings() + .DefaultIndex("defaultindex") + .MapDefaultTypeIndices(m => m + .Add(typeof(Project), "projects") + ); + var resolver = new IndexNameResolver(settings); + var index = resolver.Resolve(); + index.Should().Be("projects"); + } + + /**=== Explicitly specifying Index name on the request + * For API calls that expect an index name, the index name can be explicitly provided + * on the request + */ + [U] + public void ExplicitIndexOnRequest() + { + Uri requestUri = null; + var client = TestClient.GetInMemoryClient(s => s + .OnRequestCompleted(r => { requestUri = r.Uri; })); + + var response = client.Search(s => s.Index("some-other-index")); //<1> Provide the index name on the request + + requestUri.Should().NotBeNull(); + requestUri.LocalPath.Should().StartWith("/some-other-index/"); + } + + /** When an index name is provided on a request, it **will take precedence** over the default + * index name and any index name specified for the POCO type using `.MapDefaultTypeIndices()` + */ + [U] + public void ExplicitIndexOnRequestTakesPrecedence() + { + var client = TestClient.GetInMemoryClient(s => + new ConnectionSettings() + .DefaultIndex("defaultindex") + .MapDefaultTypeIndices(m => m + .Add(typeof(Project), "projects") + ) + ); + + var response = client.Search(s => s.Index("some-other-index")); //<1> Provide the index name on the request + + response.ApiCall.Uri.Should().NotBeNull(); + response.ApiCall.Uri.LocalPath.Should().StartWith("/some-other-index/"); + } + + /**=== Naming Conventions + * Index names within Elasticsearch cannot contain upper case letters. + * NEST will check the index name at the point at which the index + * name needs to be resolved to make a request; if the index name contains + * upper case letters, a `ResolveException` will be thrown indicating + * the problem and the index name that caused the problem. + */ + [U] + public void UppercaseCharacterThrowsResolveException() + { + /** + * In the following example, we create a connection settings withboth a default index + * name and an index name to use for the `Project` type. + */ + var settings = new ConnectionSettings() + .DefaultIndex("Default") + .MapDefaultTypeIndices(m => m + .Add(typeof(Project), "myProjects") + ); + + var resolver = new IndexNameResolver(settings); + + /** When resolving the index name for the `Project` type, a `ResolveException` + * is thrown, indicating that the index name "__myProjects__" contains upper case letters + */ + var e = Assert.Throws(() => resolver.Resolve()); + e.Message.Should().Be($"Index names cannot contain uppercase characters: myProjects."); + + /** + * Similarly, when resolving the index name for the `Tag` type, which will use the default index + * name, a `ResolveException` is thrown indicating that the default index name contains upper case + * letters + */ + e = Assert.Throws(() => resolver.Resolve()); + e.Message.Should().Be($"Index names cannot contain uppercase characters: Default."); + + /** + * Finally, when resolving an index name from a string, a `ResolveException` will be thrown + * if the string contains upper case letters + */ + e = Assert.Throws(() => resolver.Resolve("Foo")); + e.Message.Should().Be($"Index names cannot contain uppercase characters: Foo."); + } + + /** If no index name can be resolved for a request i.e. if + * + * - no default index name is set on connection settings + * - no index name is mapped for a POCO + * - no index name is explicitly specified on the request + * + * then a `ResolveException` will be thrown to indicate that the index name is `null` + */ + [U] + public void NoIndexThrowsResolveException() + { + var settings = new ConnectionSettings(); + var resolver = new IndexNameResolver(settings); + var e = Assert.Throws(() => resolver.Resolve()); + e.Message.Should().Contain("Index name is null"); + } + + /** + * ``ResolveException``s bubble out of the client and should be dealt with as <> + * similar to `ArgumentException`, `ArgumentOutOfRangeException` and other exceptions that _usually_ indicate + * misuse of the client API + */ + [U] + public void ResolveExceptionBubblesOut() + { + var client = TestClient.GetInMemoryClient(s => new ConnectionSettings()); + var e = Assert.Throws(() => client.Search()); + } + } +} diff --git a/src/Tests/ClientConcepts/HighLevel/Inferrence/IndicesPaths.doc.cs b/src/Tests/ClientConcepts/HighLevel/Inference/IndicesPaths.doc.cs similarity index 71% rename from src/Tests/ClientConcepts/HighLevel/Inferrence/IndicesPaths.doc.cs rename to src/Tests/ClientConcepts/HighLevel/Inference/IndicesPaths.doc.cs index 3bd8fa97bcb..9555508c296 100644 --- a/src/Tests/ClientConcepts/HighLevel/Inferrence/IndicesPaths.doc.cs +++ b/src/Tests/ClientConcepts/HighLevel/Inference/IndicesPaths.doc.cs @@ -2,17 +2,18 @@ using Tests.Framework; using Tests.Framework.MockData; -namespace Tests.ClientConcepts.HighLevel.Inferrence +namespace Tests.ClientConcepts.HighLevel.Inference { public class IndicesPaths { - /** # Indices paths - * + /**== Indices paths + * * Some API's in elasticsearch take one or many index name or a special "_all" marker to send the request to all the indices * In nest this is encoded using `Indices` */ - /** Several types implicitly convert to `Indices` */ + /**=== Implicit Conversion + * Several types implicitly convert to `Indices` */ [U] public void ImplicitConversionFromString() { Nest.Indices singleIndexFromString = "name"; @@ -38,16 +39,18 @@ [U] public void ImplicitConversionFromString() ); } - /** to ease creating Field's from expressions there is a static Property class you can use */ + /**=== Using `Nest.Indices` + * To ease creating ``Indice``s from expressions, there is a static `Nest.Indices` class you can use + */ [U] public void UsingStaticPropertyField() { - /** */ - var all = Nest.Indices.All; - var many = Nest.Indices.Index("name1", "name2"); - var manyTyped = Nest.Indices.Index().And(); + var all = Nest.Indices.All; //<1> Using "_all" indices + var many = Nest.Indices.Index("name1", "name2"); //<2> specifying multiple indices using strings + var manyTyped = Nest.Indices.Index().And(); //<3> speciying multiple using types var singleTyped = Nest.Indices.Index(); var singleString = Nest.Indices.Index("name1"); - var invalidSingleString = Nest.Indices.Index("name1, name2"); + + var invalidSingleString = Nest.Indices.Index("name1, name2"); //<4> **invalid** single index name } } } diff --git a/src/Tests/ClientConcepts/HighLevel/Inference/PropertyInference.doc.cs b/src/Tests/ClientConcepts/HighLevel/Inference/PropertyInference.doc.cs new file mode 100644 index 00000000000..caa0ec550b8 --- /dev/null +++ b/src/Tests/ClientConcepts/HighLevel/Inference/PropertyInference.doc.cs @@ -0,0 +1,94 @@ +using System; +using System.Linq.Expressions; +using Nest; +using Tests.Framework; +using Tests.Framework.Integration; +using Tests.Framework.MockData; +using static Tests.Framework.RoundTripper; +using Xunit; +using FluentAssertions; + +namespace Tests.ClientConcepts.HighLevel.Inference +{ + /**[[property-inference]] + * == Property Name Inference + */ + [Collection(IntegrationContext.Indexing)] + public class PropertyNames : SimpleIntegration + { + private IElasticClient _client; + + public PropertyNames(IndexingCluster cluster) : base(cluster) + { + _client = cluster.Node.Client(); + } + + /**=== Appending suffixes to a Lambda expression body + * Suffixes can be appended to the body of a lambda expression, useful in cases where + * you have a POCO property mapped as a {ref_current}/_multi_fields.html[multi_field] + * and want to use strongly typed access based on the property, yet append a suffix to the + * generated field name in order to access a particular `multi_field`. + * + * The `.Suffix()` extension method can be used for this purpose and when serializing expressions suffixed + * in this way, the serialized field name resolves to the last token + */ + [U] public void PropertyNamesAreResolvedToLastToken() + { + Expression> expression = p => p.Name.Suffix("raw"); + Expect("raw").WhenSerializing(expression); + } + + /**=== Appending suffixes to a Lambda expression + * Alternatively, suffixes can be applied to a lambda expression directly using + * the `.ApplySuffix()` extension method. Again, the serialized field name + * resolves to the last token + */ + [U] + public void PropertyNamesAreResolvedToLastTokenUsingApplySuffix() + { + Expression> expression = p => p.Name; + expression = expression.AppendSuffix("raw"); + Expect("raw").WhenSerializing(expression); + } + + /**=== Naming conventions + * Currently, the name of a field cannot contain a `.` in Elasticsearch due to the potential for ambiguity with + * a field that is mapped as a {ref_current}/_multi_fields.html[multi_field]. + * + * In these cases, NEST allows the call to go to Elasticsearch, deferring the naming conventions to the server side and, + * in the case of a `.` in a field name, a `400 Bad Response` is returned with a server error indicating the reason + */ + [I] public void PropertyNamesContainingDotsCausesElasticsearchServerError() + { + var createIndexResponse = _client.CreateIndex("random-" + Guid.NewGuid().ToString().ToLowerInvariant(), c => c + .Mappings(m => m + .Map("type-with-dot", mm => mm + .Properties(p => p + .String(s => s + .Name("name-with.dot") + ) + ) + ) + ) + ); + + /** The response is not valid */ + createIndexResponse.IsValid.Should().BeFalse(); + + /** `DebugInformation` provides an audit trail of information to help diagnose the issue */ + createIndexResponse.DebugInformation.Should().NotBeNullOrEmpty(); + + /** `ServerError` contains information about the response from Elasticsearch */ + createIndexResponse.ServerError.Should().NotBeNull(); + createIndexResponse.ServerError.Status.Should().Be(400); + createIndexResponse.ServerError.Error.Should().NotBeNull(); + createIndexResponse.ServerError.Error.RootCause.Should().NotBeNullOrEmpty(); + + var rootCause = createIndexResponse.ServerError.Error.RootCause[0]; + + /** We can see that the underlying reason is a `.` in the field name "name-with.dot" */ + rootCause.Reason.Should().Be("Field name [name-with.dot] cannot contain '.'"); + rootCause.Type.Should().Be("mapper_parsing_exception"); + } + } +} diff --git a/src/Tests/ClientConcepts/HighLevel/Inferrence/FeaturesInference.cs b/src/Tests/ClientConcepts/HighLevel/Inferrence/FeaturesInference.cs deleted file mode 100644 index 4dcaa3b9b20..00000000000 --- a/src/Tests/ClientConcepts/HighLevel/Inferrence/FeaturesInference.cs +++ /dev/null @@ -1,36 +0,0 @@ -using Elasticsearch.Net; -using Nest; -using Tests.Framework; -using static Nest.Indices; -using static Tests.Framework.RoundTripper; - -namespace Tests.ClientConcepts.HighLevel.Inferrence -{ - public class FeaturesInference - { - /** # Features - * Some urls in Elasticsearch take a {feature} enum - * RouteValues in NEST are represented as classes implementing IUrlParameter - * Since enums can not implement interfaces in C# this route param is represented using the Features class that can - * be implicitly converted to from the Feature enum - */ - - /** Using the constructor directly is possible but rather involved */ - [U] public void Serializes() - { - Features fieldString = Feature.Mappings | Feature.Aliases; - Expect("_mappings,_aliases") - .WhenSerializing(fieldString); - } - - [U] - public void ImplicitConversion() - { - /** - * Here we new an GET index elasticsearch request whichs takes Indices and Features. - * Notice how we can use the Feature enum directly. - */ - var request = new GetIndexRequest(All, Feature.Settings | Feature.Warmers); - } - } -} diff --git a/src/Tests/ClientConcepts/HighLevel/Inferrence/IdsInference.doc.cs b/src/Tests/ClientConcepts/HighLevel/Inferrence/IdsInference.doc.cs deleted file mode 100644 index 95f1a6f6b59..00000000000 --- a/src/Tests/ClientConcepts/HighLevel/Inferrence/IdsInference.doc.cs +++ /dev/null @@ -1,88 +0,0 @@ -using System; -using Nest; -using Tests.Framework; -using static Tests.Framework.RoundTripper; - -namespace Tests.ClientConcepts.HighLevel.Inferrence -{ - public class IdsInference - { - /** # Ids - * - * Several places in the elasticsearch API expect an Id object to be passed. This is a special box type that you can implicitly convert to and from many value types. - */ - - /** Methods that take an Id can be passed longs, ints, strings & Guids and they will implicitly converted to Ids */ - [U] public void CanImplicitlyConvertToId() - { - Id idFromInt = 1; - Id idFromLong = 2L; - Id idFromString = "hello-world"; - Id idFromGuid = new Guid("D70BD3CF-4E38-46F3-91CA-FCBEF29B148E"); - - Expect(1).WhenSerializing(idFromInt); - Expect(2).WhenSerializing(idFromLong); - Expect("hello-world").WhenSerializing(idFromString); - Expect("d70bd3cf-4e38-46f3-91ca-fcbef29b148e").WhenSerializing(idFromGuid); - } - - /** Sometimes a method takes an object and we need an Id from that object to build up a path. - * There is no implicit conversion from any object to Id but we can call Id.From. - * - * Imagine your codebase has the following type that we want to index into elasticsearch - */ - class MyDTO - { - public Guid Id { get; set; } - public string Name { get; set; } - public string OtherName { get; set; } - } - - [U] public void CanGetIdFromDocument() - { - /** By default NEST will try to find a property called `Id` on the class using reflection - * and create a cached fast func delegate based on the properties getter*/ - var dto = new MyDTO { Id =new Guid("D70BD3CF-4E38-46F3-91CA-FCBEF29B148E"), Name = "x", OtherName = "y" }; - Expect("d70bd3cf-4e38-46f3-91ca-fcbef29b148e").WhenInferringIdOn(dto); - - /** Using the connection settings you can specify a different property NEST should look for ids. - * Here we instruct NEST to infer the Id for MyDTO based on its Name property */ - WithConnectionSettings(x => x - .InferMappingFor(m => m - .IdProperty(p => p.Name) - ) - ).Expect("x").WhenInferringIdOn(dto); - - /** Even though we have a cache at play the cache is per connection settings, so we can create a different config */ - WithConnectionSettings(x => x - .InferMappingFor(m => m - .IdProperty(p => p.OtherName) - ) - ).Expect("y").WhenInferringIdOn(dto); - } - - /** Another way is to mark the type with an ElasticType attribute, using a string IdProperty */ - [ElasticsearchType(IdProperty = nameof(Name))] - class MyOtherDTO - { - public Guid Id { get; set; } - public string Name { get; set; } - public string OtherName { get; set; } - } - - [U] public void CanGetIdFromAttribute() - { - /** Now when we infer the id we expect it to be the Name property without doing any configuration on the ConnectionSettings */ - var dto = new MyOtherDTO { Id =new Guid("D70BD3CF-4E38-46F3-91CA-FCBEF29B148E"), Name = "x", OtherName = "y" }; - Expect("x").WhenInferringIdOn(dto); - /** This attribute IS cached statically/globally, however connectionsettings with a config for the type will - * still win over this static configuration*/ - /** Eventhough we have a cache at play the cache its per connection settings, so we can create a different config */ - WithConnectionSettings(x => x - .InferMappingFor(m => m - .IdProperty(p => p.OtherName) - ) - ).Expect("y").WhenInferringIdOn(dto); - } - } -} diff --git a/src/Tests/ClientConcepts/HighLevel/Inferrence/IndexNameInference.doc.cs b/src/Tests/ClientConcepts/HighLevel/Inferrence/IndexNameInference.doc.cs deleted file mode 100644 index 2702a5209c0..00000000000 --- a/src/Tests/ClientConcepts/HighLevel/Inferrence/IndexNameInference.doc.cs +++ /dev/null @@ -1,88 +0,0 @@ -using Elasticsearch.Net; -using FluentAssertions; -using Nest; -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; -using System.Threading.Tasks; -using Tests.Framework; -using Tests.Framework.MockData; -using Xunit; - -namespace Tests.ClientConcepts.HighLevel.Inferrence -{ - public class IndexNameInference - { - [U] - public void DefaultIndexIsInferred() - { - var settings = new ConnectionSettings() - .DefaultIndex("defaultindex"); - var resolver = new IndexNameResolver(settings); - var index = resolver.Resolve(); - index.Should().Be("defaultindex"); - } - - [U] - public void ExplicitMappingIsInferred() - { - var settings = new ConnectionSettings() - .MapDefaultTypeIndices(m => m - .Add(typeof(Project), "projects") - ); - var resolver = new IndexNameResolver(settings); - var index = resolver.Resolve(); - index.Should().Be("projects"); - } - - [U] - public void ExplicitMappingTakesPrecedence() - { - var settings = new ConnectionSettings() - .DefaultIndex("defaultindex") - .MapDefaultTypeIndices(m => m - .Add(typeof(Project), "projects") - ); - var resolver = new IndexNameResolver(settings); - var index = resolver.Resolve(); - index.Should().Be("projects"); - } - - [U] - public void UppercaseCharacterThrowsResolveException() - { - var settings = new ConnectionSettings() - .DefaultIndex("Default") - .MapDefaultTypeIndices(m => m - .Add(typeof(Project), "myProjects") - ); - - var resolver = new IndexNameResolver(settings); - - var e = Assert.Throws(() => resolver.Resolve()); - e.Message.Should().Be($"Index names cannot contain uppercase characters: myProjects."); - e = Assert.Throws(() => resolver.Resolve()); - e.Message.Should().Be($"Index names cannot contain uppercase characters: Default."); - e = Assert.Throws(() => resolver.Resolve("Foo")); - e.Message.Should().Be($"Index names cannot contain uppercase characters: Foo."); - } - - [U] - public void NoIndexThrowsResolveException() - { - var settings = new ConnectionSettings(); - var resolver = new IndexNameResolver(settings); - var e = Assert.Throws(() => resolver.Resolve()); - e.Message.Should().Contain("Index name is null"); - } - - [U] - public void ResolveExceptionBubblesOut() - { - var client = TestClient.GetInMemoryClient(s => new ConnectionSettings()); - var e = Assert.Throws(() => client.Search()); - - } - } -} diff --git a/src/Tests/ClientConcepts/HighLevel/Inferrence/PropertyInference.doc.cs b/src/Tests/ClientConcepts/HighLevel/Inferrence/PropertyInference.doc.cs deleted file mode 100644 index bc9671c0ddf..00000000000 --- a/src/Tests/ClientConcepts/HighLevel/Inferrence/PropertyInference.doc.cs +++ /dev/null @@ -1,70 +0,0 @@ -using System; -using System.Linq.Expressions; -using Nest; -using Tests.Framework; -using Tests.Framework.Integration; -using Tests.Framework.MockData; -using static Tests.Framework.RoundTripper; -using Xunit; -using FluentAssertions; - -namespace Tests.ClientConcepts.HighLevel.Inferrence.PropertyNames -{ - /** == Property Names */ - [Collection(IntegrationContext.Indexing)] - public class PropertyNames : SimpleIntegration - { - private IElasticClient _client; - - public PropertyNames(IndexingCluster cluster) : base(cluster) - { - _client = cluster.Node.Client(); - } - - /** Property names resolve to the last token */ - [U] public void PropertyNamesAreResolvedToLastToken() - { - Expression> expression = p => p.Name.Suffix("raw"); - Expect("raw").WhenSerializing(expression); - } - - /** :multi-field: {ref_current}/_multi_fields.html - *Property names cannot contain a `.` (dot), because of the potential for ambiguity with - *a field that is mapped as a {multi-field}[`multi_field`]. - * - *NEST allows the call to go to Elasticsearch, deferring the naming conventions to the server side and, - * in the case of dots in field names, returns a `400 Bad Response` with a server error indicating the reason. - */ - [I] public void PropertyNamesContainingDotsCausesElasticsearchServerError() - { - var createIndexResponse = _client.CreateIndex("random-" + Guid.NewGuid().ToString().ToLowerInvariant(), c => c - .Mappings(m => m - .Map("type-with-dot", mm => mm - .Properties(p => p - .String(s => s - .Name("name-with.dot") - ) - ) - ) - ) - ); - - /** The response is not valid */ - createIndexResponse.IsValid.Should().BeFalse(); - - /** `DebugInformation` provides an audit trail of information to help diagnose the issue */ - createIndexResponse.DebugInformation.Should().NotBeNullOrEmpty(); - - /** `ServerError` contains information from the response from Elasticsearch */ - createIndexResponse.ServerError.Should().NotBeNull(); - createIndexResponse.ServerError.Status.Should().Be(400); - createIndexResponse.ServerError.Error.Should().NotBeNull(); - createIndexResponse.ServerError.Error.RootCause.Should().NotBeNullOrEmpty(); - - var rootCause = createIndexResponse.ServerError.Error.RootCause[0]; - - rootCause.Reason.Should().Be("Field name [name-with.dot] cannot contain '.'"); - rootCause.Type.Should().Be("mapper_parsing_exception"); - } - } -} diff --git a/src/Tests/ClientConcepts/HighLevel/Mapping/AutoMap.doc.cs b/src/Tests/ClientConcepts/HighLevel/Mapping/AutoMap.doc.cs index df88d9e7431..a427822df3f 100644 --- a/src/Tests/ClientConcepts/HighLevel/Mapping/AutoMap.doc.cs +++ b/src/Tests/ClientConcepts/HighLevel/Mapping/AutoMap.doc.cs @@ -1,1009 +1,1023 @@ -using System; -using System.Collections.Generic; -using System.Reflection; -using Nest; -using Newtonsoft.Json; -using Tests.Framework; -using static Tests.Framework.RoundTripper; - -namespace Tests.ClientConcepts.HighLevel.Mapping -{ - /** # Auto mapping properties - * - * When creating a mapping (either when creating an index or via the put mapping API), - * NEST offers a feature called AutoMap(), which will automagically infer the correct - * Elasticsearch datatypes of the POCO properties you are mapping. Alternatively, if - * you're using attributes to map your properties, then calling AutoMap() is required - * in order for your attributes to be applied. We'll look at examples of both. - * - **/ - public class AutoMap - { - /** - * For these examples, we'll define two POCOS. A Company, which has a name - * and a collection of Employees. And Employee, which has various properties of - * different types, and itself has a collection of Employees. - */ - public class Company - { - public string Name { get; set; } - public List Employees { get; set; } - } - - public class Employee - { - public string FirstName { get; set; } - public string LastName { get; set; } - public int Salary { get; set; } - public DateTime Birthday { get; set; } - public bool IsManager { get; set; } - public List Employees { get; set; } - public TimeSpan Hours { get; set; } - } - - [U] - public void MappingManually() - { - /** ## Manual mapping - * To create a mapping for our Company type, we can use the fluent API - * and map each property explicitly - */ - var descriptor = new CreateIndexDescriptor("myindex") - .Mappings(ms => ms - .Map(m => m - .Properties(ps => ps - .String(s => s - .Name(c => c.Name) - ) - .Object(o => o - .Name(c => c.Employees) - .Properties(eps => eps - .String(s => s - .Name(e => e.FirstName) - ) - .String(s => s - .Name(e => e.LastName) - ) - .Number(n => n - .Name(e => e.Salary) - .Type(NumberType.Integer) - ) - ) - ) - ) - ) - ); - - /** - * Which is all fine and dandy, and useful for some use cases. However in most cases - * this is becomes too cumbersome of an approach, and you simply just want to map *all* - * the properties of your POCO in a single go. - */ - var expected = new - { - mappings = new - { - company = new - { - properties = new - { - name = new - { - type = "string" - }, - employees = new - { - type = "object", - properties = new - { - firstName = new - { - type = "string" - }, - lastName = new - { - type = "string" - }, - salary = new - { - type = "integer" - } - } - } - } - } - } - }; - - Expect(expected).WhenSerializing((ICreateIndexRequest)descriptor); - } - - [U] - public void UsingAutoMap() - { - /** ## Simple Automapping - * This is exactly where `AutoMap()` becomes useful. Instead of manually mapping each property, - * explicitly, we can instead call `.AutoMap()` for each of our mappings and let NEST do all the work - */ - var descriptor = new CreateIndexDescriptor("myindex") - .Mappings(ms => ms - .Map(m => m.AutoMap()) - .Map(m => m.AutoMap()) - ); - - /** - * Observe that NEST has inferred the Elasticsearch types based on the CLR type of our POCO properties. - * In this example, - * - Birthday was mapped as a date, - * - Hours was mapped as a long (ticks) - * - IsManager was mapped as a boolean, - * - Salary as an integer - * - Employees as an object - * and the remaining string properties as strings. - */ - var expected = new - { - mappings = new - { - company = new - { - properties = new - { - employees = new - { - properties = new - { - birthday = new - { - type = "date" - }, - employees = new - { - properties = new { }, - type = "object" - }, - firstName = new - { - type = "string" - }, - hours = new - { - type = "long" - }, - isManager = new - { - type = "boolean" - }, - lastName = new - { - type = "string" - }, - salary = new - { - type = "integer" - } - }, - type = "object" - }, - name = new - { - type = "string" - } - } - }, - employee = new - { - properties = new - { - birthday = new - { - type = "date" - }, - employees = new - { - properties = new { }, - type = "object" - }, - firstName = new - { - type = "string" - }, - hours = new - { - type = "long" - }, - isManager = new - { - type = "boolean" - }, - lastName = new - { - type = "string" - }, - salary = new - { - type = "integer" - } - } - } - } - }; - - Expect(expected).WhenSerializing((ICreateIndexRequest)descriptor); - } - - /** ## Automapping with overrides - * In most cases, you'll want to map more than just the vanilla datatypes and also provide - * various options on your properties (analyzer, doc_values, etc...). In that case, it's - * possible to use AutoMap() in conjuction with explicitly mapped properties. - */ - [U] - public void OverridingAutoMappedProperties() - { - /** - * Here we are using AutoMap() to automatically map our company type, but then we're - * overriding our employee property and making it a `nested` type, since by default, - * AutoMap() will infer objects as `object`. - */ - var descriptor = new CreateIndexDescriptor("myindex") - .Mappings(ms => ms - .Map(m => m - .AutoMap() - .Properties(ps => ps - .Nested(n => n - .Name(c => c.Employees) - ) - ) - ) - ); - - var expected = new - { - mappings = new - { - company = new - { - properties = new - { - name = new - { - type = "string" - }, - employees = new - { - type = "nested", - } - } - } - } - }; - - Expect(expected).WhenSerializing((ICreateIndexRequest)descriptor); - - /** - * AutoMap is idempotent. Calling it before or after manually - * mapped properties should still yield the same results. - */ - descriptor = new CreateIndexDescriptor("myindex") - .Mappings(ms => ms - .Map(m => m - .Properties(ps => ps - .Nested(n => n - .Name(c => c.Employees) - ) - ) - .AutoMap() - ) - ); - - Expect(expected).WhenSerializing((ICreateIndexRequest)descriptor); - } - - /** ## Automap with attributes - * It is also possible to define your mappings using attributes on your POCOS. When you - * use attributes, you MUST use AutoMap() in order for the attributes to be applied. - * Here we define the same two types but this time using attributes. - */ - [ElasticsearchType(Name = "company")] - public class CompanyWithAttributes - { - [String(Analyzer = "keyword", NullValue = "null", Similarity = SimilarityOption.BM25)] - public string Name { get; set; } - - [String(Name = "office_hours")] - public TimeSpan? HeadOfficeHours { get; set; } - - [Object(Path = "employees", Store = false)] - public List Employees { get; set; } - } - - [ElasticsearchType(Name = "employee")] - public class EmployeeWithAttributes - { - [String(Name = "first_name")] - public string FirstName { get; set; } - - [String(Name = "last_name")] - public string LastName { get; set; } - - [Number(DocValues = false, IgnoreMalformed = true, Coerce = true)] - public int Salary { get; set; } - - [Date(Format = "MMddyyyy", NumericResolution = NumericResolutionUnit.Seconds)] - public DateTime Birthday { get; set; } - - [Boolean(NullValue = false, Store = true)] - public bool IsManager { get; set; } - - [Nested(Path = "employees")] - [JsonProperty("empl")] - public List Employees { get; set; } - } - - [U] - public void UsingAutoMapWithAttributes() - { - var descriptor = new CreateIndexDescriptor("myindex") - .Mappings(ms => ms - .Map(m => m.AutoMap()) - .Map(m => m.AutoMap()) - ); - - var expected = new - { - mappings = new - { - company = new - { - properties = new - { - employees = new - { - path = "employees", - properties = new - { - birthday = new - { - type = "date" - }, - employees = new - { - properties = new { }, - type = "object" - }, - firstName = new - { - type = "string" - }, - hours = new - { - type = "long" - }, - isManager = new - { - type = "boolean" - }, - lastName = new - { - type = "string" - }, - salary = new - { - type = "integer" - } - }, - store = false, - type = "object" - }, - name = new - { - analyzer = "keyword", - null_value = "null", - similarity = "BM25", - type = "string" - }, - office_hours = new - { - type = "string" - } - } - }, - employee = new - { - properties = new - { - birthday = new - { - format = "MMddyyyy", - numeric_resolution = "seconds", - type = "date" - }, - empl = new - { - path = "employees", - properties = new - { - birthday = new - { - type = "date" - }, - employees = new - { - properties = new { }, - type = "object" - }, - firstName = new - { - type = "string" - }, - hours = new - { - type = "long" - }, - isManager = new - { - type = "boolean" - }, - lastName = new - { - type = "string" - }, - salary = new - { - type = "integer" - } - }, - type = "nested" - }, - first_name = new - { - type = "string" - }, - isManager = new - { - null_value = false, - store = true, - type = "boolean" - }, - last_name = new - { - type = "string" - }, - salary = new - { - coerce = true, - doc_values = false, - ignore_malformed = true, - type = "double" - } - } - } - } - }; - - Expect(expected).WhenSerializing(descriptor as ICreateIndexRequest); - } - - /** - * Just as we were able to override the inferred properties in our earlier example, explicit (manual) - * mappings also take precedence over attributes. Therefore we can also override any mappings applied - * via any attributes defined on the POCO - */ - [U] - public void OverridingAutoMappedAttributes() - { - var descriptor = new CreateIndexDescriptor("myindex") - .Mappings(ms => ms - .Map(m => m - .AutoMap() - .Properties(ps => ps - .Nested(n => n - .Name(c => c.Employees) - ) - ) - ) - .Map(m => m - .AutoMap() - .TtlField(ttl => ttl - .Enable() - .Default("10m") - ) - .Properties(ps => ps - .String(s => s - .Name(e => e.FirstName) - .Fields(fs => fs - .String(ss => ss - .Name("firstNameRaw") - .Index(FieldIndexOption.NotAnalyzed) - ) - .TokenCount(t => t - .Name("length") - .Analyzer("standard") - ) - ) - ) - .Number(n => n - .Name(e => e.Salary) - .Type(NumberType.Double) - .IgnoreMalformed(false) - ) - .Date(d => d - .Name(e => e.Birthday) - .Format("MM-dd-yy") - ) - ) - ) - ); - - var expected = new - { - mappings = new - { - company = new - { - properties = new - { - employees = new - { - type = "nested" - }, - name = new - { - analyzer = "keyword", - null_value = "null", - similarity = "BM25", - type = "string" - }, - office_hours = new - { - type = "string" - } - } - }, - employee = new - { - _ttl = new - { - enabled = true, - @default = "10m" - }, - properties = new - { - birthday = new - { - format = "MM-dd-yy", - type = "date" - }, - empl = new - { - path = "employees", - properties = new - { - birthday = new - { - type = "date" - }, - employees = new - { - properties = new { }, - type = "object" - }, - firstName = new - { - type = "string" - }, - hours = new - { - type = "long" - }, - isManager = new - { - type = "boolean" - }, - lastName = new - { - type = "string" - }, - salary = new - { - type = "integer" - } - }, - type = "nested" - }, - first_name = new - { - fields = new - { - firstNameRaw = new - { - index = "not_analyzed", - type = "string" - }, - length = new - { - type = "token_count", - analyzer = "standard" - } - }, - type = "string" - }, - isManager = new - { - null_value = false, - store = true, - type = "boolean" - }, - last_name = new - { - type = "string" - }, - salary = new - { - ignore_malformed = false, - type = "double" - } - } - } - } - }; - - Expect(expected).WhenSerializing((ICreateIndexRequest)descriptor); - } - - [ElasticsearchType(Name = "company")] - public class CompanyWithAttributesAndPropertiesToIgnore - { - public string Name { get; set; } - - [String(Ignore = true)] - public string PropertyToIgnore { get; set; } - - public string AnotherPropertyToIgnore { get; set; } - - [JsonIgnore] - public string JsonIgnoredProperty { get; set; } - } - - /** == Ignoring Properties - * Properties on a POCO can be ignored in a few ways: - */ - /** - * - Using the `Ignore` property on a derived `ElasticsearchPropertyAttribute` type applied to the property that should be ignored on the POCO - */ - /** - * - Using the `.InferMappingFor(Func, IClrTypeMapping> selector)` on the connection settings - */ - /** - * - Using an ignore attribute applied to the POCO property that is understood by the `IElasticsearchSerializer` used and inspected inside of `CreatePropertyMapping()` on the serializer. In the case of the default `JsonNetSerializer`, this is the Json.NET `JsonIgnoreAttribute` - */ - /** - * This example demonstrates all ways, using the attribute way to ignore the property `PropertyToIgnore`, the infer mapping way to ignore the - * property `AnotherPropertyToIgnore` and the json serializer specific attribute way to ignore the property `JsonIgnoredProperty` - */ - [U] - public void IgnoringProperties() - { - var descriptor = new CreateIndexDescriptor("myindex") - .Mappings(ms => ms - .Map(m => m - .AutoMap() - ) - ); - - var expected = new - { - mappings = new - { - company = new - { - properties = new - { - name = new - { - type = "string" - } - } - } - } - }; - - var settings = WithConnectionSettings(s => s - .InferMappingFor(i => i - .Ignore(p => p.AnotherPropertyToIgnore) - ) - ); - - settings.Expect(expected).WhenSerializing((ICreateIndexRequest)descriptor); - } - - /** - * If you notice in our previous Company/Employee examples, the Employee type is recursive - * in that itself contains a collection of type `Employee`. By default, `.AutoMap()` will only - * traverse a single depth when it encounters recursive instances like this. Hence, in the - * previous examples, the second level of Employee did not get any of its properties mapped. - * This is done as a safe-guard to prevent stack overflows and all the fun that comes with - * infinite recursion. Additionally, in most cases, when it comes to Elasticsearch mappings, it is - * often an edge case to have deeply nested mappings like this. However, you may still have - * the need to do this, so you can control the recursion depth of AutoMap(). - * - * Let's introduce a very simple class A, to reduce the noise, which itself has a property - * Child of type A. - */ - public class A - { - public A Child { get; set; } - } - - [U] - public void ControllingRecursionDepth() - { - /** By default, AutoMap() only goes as far as depth 1 */ - var descriptor = new CreateIndexDescriptor("myindex") - .Mappings(ms => ms - .Map(m => m.AutoMap()) - ); - - /** Thus we do not map properties on the second occurrence of our Child property */ - var expected = new - { - mappings = new - { - a = new - { - properties = new - { - child = new - { - properties = new { }, - type = "object" - } - } - } - } - }; - - Expect(expected).WhenSerializing((ICreateIndexRequest)descriptor); - - /** Now lets specify a maxRecursion of 3 */ - var withMaxRecursionDescriptor = new CreateIndexDescriptor("myindex") - .Mappings(ms => ms - .Map(m => m.AutoMap(3)) - ); - - /** AutoMap() has now mapped three levels of our Child property */ - var expectedWithMaxRecursion = new - { - mappings = new - { - a = new - { - properties = new - { - child = new - { - type = "object", - properties = new - { - child = new - { - type = "object", - properties = new - { - child = new - { - type = "object", - properties = new - { - child = new - { - type = "object", - properties = new { } - } - } - } - } - } - } - } - } - } - } - }; - - Expect(expectedWithMaxRecursion).WhenSerializing((ICreateIndexRequest)withMaxRecursionDescriptor); - } - - [U] - //hide - public void PutMappingAlsoAdheresToMaxRecursion() - { - var descriptor = new PutMappingDescriptor().AutoMap(); - - var expected = new - { - properties = new - { - child = new - { - properties = new { }, - type = "object" - } - } - }; - - Expect(expected).WhenSerializing((IPutMappingRequest)descriptor); - - var withMaxRecursionDescriptor = new PutMappingDescriptor().AutoMap(3); - - var expectedWithMaxRecursion = new - { - properties = new - { - child = new - { - type = "object", - properties = new - { - child = new - { - type = "object", - properties = new - { - child = new - { - type = "object", - properties = new - { - child = new - { - type = "object", - properties = new { } - } - } - } - } - } - } - } - } - }; - - Expect(expectedWithMaxRecursion).WhenSerializing((IPutMappingRequest)withMaxRecursionDescriptor); - } - //endhide - - /** # Applying conventions through the Visitor pattern - * It is also possible to apply a transformation on all or specific properties. - * - * AutoMap internally implements the visitor pattern. The default visitor `NoopPropertyVisitor` does - * nothing, and acts as a blank canvas for you to implement your own visiting methods. - * - * For instance, lets create a custom visitor that disables doc values for numeric and boolean types. - * (Not really a good idea in practice, but let's do it anyway for the sake of a clear example.) - */ - public class DisableDocValuesPropertyVisitor : NoopPropertyVisitor - { - /** Override the Visit method on INumberProperty and set DocValues = false */ - public override void Visit(INumberProperty type, PropertyInfo propertyInfo, ElasticsearchPropertyAttributeBase attribute) - { - type.DocValues = false; - } - - /** Similarily, override the Visit method on IBooleanProperty and set DocValues = false */ - public override void Visit(IBooleanProperty type, PropertyInfo propertyInfo, ElasticsearchPropertyAttributeBase attribute) - { - type.DocValues = false; - } - } - - [U] - public void UsingACustomPropertyVisitor() - { - /** Now we can pass an instance of our custom visitor to AutoMap() */ - var descriptor = new CreateIndexDescriptor("myindex") - .Mappings(ms => ms - .Map(m => m.AutoMap(new DisableDocValuesPropertyVisitor())) - ); - - /** and anytime it maps a property as a number (INumberProperty) or boolean (IBooleanProperty) - * it will apply the transformation defined in each Visit() respectively, which in this example - * disables doc values. - */ - var expected = new - { - mappings = new - { - employee = new - { - properties = new - { - birthday = new - { - type = "date" - }, - employees = new - { - properties = new { }, - type = "object" - }, - firstName = new - { - type = "string" - }, - isManager = new - { - doc_values = false, - type = "boolean" - }, - lastName = new - { - type = "string" - }, - salary = new - { - doc_values = false, - type = "integer" - } - } - } - } - }; - } - - /** You can even take the visitor approach a step further, and instead of visiting on IProperty types, visit - * directly on your POCO properties (PropertyInfo). For example, lets create a visitor that maps all CLR types - * to an Elasticsearch string (IStringProperty). - */ - public class EverythingIsAStringPropertyVisitor : NoopPropertyVisitor - { - public override IProperty Visit(PropertyInfo propertyInfo, ElasticsearchPropertyAttributeBase attribute) => new StringProperty(); - } - - [U] - public void UsingACustomPropertyVisitorOnPropertyInfo() - { - var descriptor = new CreateIndexDescriptor("myindex") - .Mappings(ms => ms - .Map(m => m.AutoMap(new EverythingIsAStringPropertyVisitor())) - ); - - var expected = new - { - mappings = new - { - employee = new - { - properties = new - { - birthday = new - { - type = "string" - }, - employees = new - { - type = "string" - }, - firstName = new - { - type = "string" - }, - isManager = new - { - type = "string" - }, - lastName = new - { - type = "string" - }, - salary = new - { - type = "string" - } - } - } - } - }; - } - } -} +using System; +using System.Collections.Generic; +using System.Reflection; +using Nest; +using Newtonsoft.Json; +using Tests.Framework; +using static Tests.Framework.RoundTripper; + +namespace Tests.ClientConcepts.HighLevel.Mapping +{ + /** + * [[auto-map]] + * == Auto mapping properties + * + * When creating a mapping (either when creating an index or via the put mapping API), + * NEST offers a feature called AutoMap(), which will automagically infer the correct + * Elasticsearch datatypes of the POCO properties you are mapping. Alternatively, if + * you're using attributes to map your properties, then calling AutoMap() is required + * in order for your attributes to be applied. We'll look at examples of both. + * + **/ + public class AutoMap + { + /** + * For these examples, we'll define two POCOs, `Company`, which has a name + * and a collection of Employees, and `Employee` which has various properties of + * different types, and itself has a collection of `Employee` types. + */ + public class Company + { + public string Name { get; set; } + public List Employees { get; set; } + } + + public class Employee + { + public string FirstName { get; set; } + public string LastName { get; set; } + public int Salary { get; set; } + public DateTime Birthday { get; set; } + public bool IsManager { get; set; } + public List Employees { get; set; } + public TimeSpan Hours { get; set;} + } + + [U] + public void MappingManually() + { + /** === Manual mapping + * To create a mapping for our Company type, we can use the fluent API + * and map each property explicitly + */ + var descriptor = new CreateIndexDescriptor("myindex") + .Mappings(ms => ms + .Map(m => m + .Properties(ps => ps + .String(s => s + .Name(c => c.Name) + ) + .Object(o => o + .Name(c => c.Employees) + .Properties(eps => eps + .String(s => s + .Name(e => e.FirstName) + ) + .String(s => s + .Name(e => e.LastName) + ) + .Number(n => n + .Name(e => e.Salary) + .Type(NumberType.Integer) + ) + ) + ) + ) + ) + ); + + /** + * This is all fine and dandy and useful for some use cases however in most cases + * this can become verbose and wieldy. The majority of the time you simply just want to map *all* + * the properties of a POCO in a single go. + */ + var expected = new + { + mappings = new + { + company = new + { + properties = new + { + name = new + { + type = "string" + }, + employees = new + { + type = "object", + properties = new + { + firstName = new + { + type = "string" + }, + lastName = new + { + type = "string" + }, + salary = new + { + type = "integer" + } + } + } + } + } + } + }; + + Expect(expected).WhenSerializing((ICreateIndexRequest)descriptor); + } + + [U] + public void UsingAutoMap() + { + /** === Simple Automapping + * This is exactly where `AutoMap()` becomes useful. Instead of manually mapping each property, + * explicitly, we can instead call `.AutoMap()` for each of our mappings and let NEST do all the work + */ + var descriptor = new CreateIndexDescriptor("myindex") + .Mappings(ms => ms + .Map(m => m.AutoMap()) + .Map(m => m.AutoMap()) + ); + + /** + * Observe that NEST has inferred the Elasticsearch types based on the CLR type of our POCO properties. + * In this example, + * - Birthday was mapped as a date, + * - Hours was mapped as a long (ticks) + * - IsManager was mapped as a boolean, + * - Salary as an integer + * - Employees as an object + * and the remaining string properties as strings. + */ + var expected = new + { + mappings = new + { + company = new + { + properties = new + { + employees = new + { + properties = new + { + birthday = new + { + type = "date" + }, + employees = new + { + properties = new { }, + type = "object" + }, + firstName = new + { + type = "string" + }, + hours = new + { + type = "long" + }, + isManager = new + { + type = "boolean" + }, + lastName = new + { + type = "string" + }, + salary = new + { + type = "integer" + } + }, + type = "object" + }, + name = new + { + type = "string" + } + } + }, + employee = new + { + properties = new + { + birthday = new + { + type = "date" + }, + employees = new + { + properties = new { }, + type = "object" + }, + firstName = new + { + type = "string" + }, + hours = new + { + type = "long" + }, + isManager = new + { + type = "boolean" + }, + lastName = new + { + type = "string" + }, + salary = new + { + type = "integer" + } + } + } + } + }; + + Expect(expected).WhenSerializing((ICreateIndexRequest)descriptor); + } + + /**[float] + * == Auto mapping with overrides + * In most cases, you'll want to map more than just the vanilla datatypes and also provide + * various options for your properties (analyzer to use, whether to enable doc_values, etc...). + * In that case, it's possible to use `.AutoMap()` in conjuction with explicitly mapped properties. + */ + [U] + public void OverridingAutoMappedProperties() + { + /** + * Here we are using `.AutoMap()` to automatically map our company type, but then we're + * overriding our employee property and making it a `nested` type, since by default, + * `.AutoMap()` will infer objects as `object`. + */ + var descriptor = new CreateIndexDescriptor("myindex") + .Mappings(ms => ms + .Map(m => m + .AutoMap() + .Properties(ps => ps + .Nested(n => n + .Name(c => c.Employees) + ) + ) + ) + ); + + var expected = new + { + mappings = new + { + company = new + { + properties = new + { + name = new + { + type = "string" + }, + employees = new + { + type = "nested" + } + } + } + } + }; + + Expect(expected).WhenSerializing((ICreateIndexRequest)descriptor); + + /** + * `.AutoMap()` is __idempotent__; calling it before or after manually + * mapped properties should still yield the same results. + */ + descriptor = new CreateIndexDescriptor("myindex") + .Mappings(ms => ms + .Map(m => m + .Properties(ps => ps + .Nested(n => n + .Name(c => c.Employees) + ) + ) + .AutoMap() + ) + ); + + Expect(expected).WhenSerializing((ICreateIndexRequest)descriptor); + } + + /**[[attribute-mapping]] + * [float] + * == Attribute mapping + * It is also possible to define your mappings using attributes on your POCOs. When you + * use attributes, you *must* use `.AutoMap()` in order for the attributes to be applied. + * Here we define the same two types as before, but this time using attributes to define the mappings. + */ + [ElasticsearchType(Name = "company")] + public class CompanyWithAttributes + { + [String(Analyzer = "keyword", NullValue = "null", Similarity = SimilarityOption.BM25)] + public string Name { get; set; } + + [String(Name = "office_hours")] + public TimeSpan? HeadOfficeHours { get; set; } + + [Object(Path = "employees", Store = false)] + public List Employees { get; set; } + } + + [ElasticsearchType(Name = "employee")] + public class EmployeeWithAttributes + { + [String(Name = "first_name")] + public string FirstName { get; set; } + + [String(Name = "last_name")] + public string LastName { get; set; } + + [Number(DocValues = false, IgnoreMalformed = true, Coerce = true)] + public int Salary { get; set; } + + [Date(Format = "MMddyyyy", NumericResolution = NumericResolutionUnit.Seconds)] + public DateTime Birthday { get; set; } + + [Boolean(NullValue = false, Store = true)] + public bool IsManager { get; set; } + + [Nested(Path = "employees")] + [JsonProperty("empl")] + public List Employees { get; set; } + } + + /**Then we map the types by calling `.AutoMap()` */ + [U] + public void UsingAutoMapWithAttributes() + { + var descriptor = new CreateIndexDescriptor("myindex") + .Mappings(ms => ms + .Map(m => m.AutoMap()) + .Map(m => m.AutoMap()) + ); + + var expected = new + { + mappings = new + { + company = new + { + properties = new + { + employees = new + { + path = "employees", + properties = new + { + birthday = new + { + type = "date" + }, + employees = new + { + properties = new { }, + type = "object" + }, + firstName = new + { + type = "string" + }, + hours = new + { + type = "long" + }, + isManager = new + { + type = "boolean" + }, + lastName = new + { + type = "string" + }, + salary = new + { + type = "integer" + } + }, + store = false, + type = "object" + }, + name = new + { + analyzer = "keyword", + null_value = "null", + similarity = "BM25", + type = "string" + }, + office_hours = new + { + type = "string" + } + } + }, + employee = new + { + properties = new + { + birthday = new + { + format = "MMddyyyy", + numeric_resolution = "seconds", + type = "date" + }, + empl = new + { + path = "employees", + properties = new + { + birthday = new + { + type = "date" + }, + employees = new + { + properties = new { }, + type = "object" + }, + firstName = new + { + type = "string" + }, + hours = new + { + type = "long" + }, + isManager = new + { + type = "boolean" + }, + lastName = new + { + type = "string" + }, + salary = new + { + type = "integer" + } + }, + type = "nested" + }, + first_name = new + { + type = "string" + }, + isManager = new + { + null_value = false, + store = true, + type = "boolean" + }, + last_name = new + { + type = "string" + }, + salary = new + { + coerce = true, + doc_values = false, + ignore_malformed = true, + type = "double" + } + } + } + } + }; + + Expect(expected).WhenSerializing((ICreateIndexRequest) descriptor); + } + + /** + * Just as we were able to override the inferred properties in our earlier example, explicit (manual) + * mappings also take precedence over attributes. Therefore we can also override any mappings applied + * via any attributes defined on the POCO + */ + [U] + public void OverridingAutoMappedAttributes() + { + var descriptor = new CreateIndexDescriptor("myindex") + .Mappings(ms => ms + .Map(m => m + .AutoMap() + .Properties(ps => ps + .Nested(n => n + .Name(c => c.Employees) + ) + ) + ) + .Map(m => m + .AutoMap() + .TtlField(ttl => ttl + .Enable() + .Default("10m") + ) + .Properties(ps => ps + .String(s => s + .Name(e => e.FirstName) + .Fields(fs => fs + .String(ss => ss + .Name("firstNameRaw") + .Index(FieldIndexOption.NotAnalyzed) + ) + .TokenCount(t => t + .Name("length") + .Analyzer("standard") + ) + ) + ) + .Number(n => n + .Name(e => e.Salary) + .Type(NumberType.Double) + .IgnoreMalformed(false) + ) + .Date(d => d + .Name(e => e.Birthday) + .Format("MM-dd-yy") + ) + ) + ) + ); + + var expected = new + { + mappings = new + { + company = new + { + properties = new + { + employees = new + { + type = "nested" + }, + name = new + { + analyzer = "keyword", + null_value = "null", + similarity = "BM25", + type = "string" + }, + office_hours = new + { + type = "string" + } + } + }, + employee = new + { + _ttl = new + { + enabled = true, + @default = "10m" + }, + properties = new + { + birthday = new + { + format = "MM-dd-yy", + type = "date" + }, + empl = new + { + path = "employees", + properties = new + { + birthday = new + { + type = "date" + }, + employees = new + { + properties = new { }, + type = "object" + }, + firstName = new + { + type = "string" + }, + hours = new + { + type = "long" + }, + isManager = new + { + type = "boolean" + }, + lastName = new + { + type = "string" + }, + salary = new + { + type = "integer" + } + }, + type = "nested" + }, + first_name = new + { + fields = new + { + firstNameRaw = new + { + index = "not_analyzed", + type = "string" + }, + length = new + { + type = "token_count", + analyzer = "standard" + } + }, + type = "string" + }, + isManager = new + { + null_value = false, + store = true, + type = "boolean" + }, + last_name = new + { + type = "string" + }, + salary = new + { + ignore_malformed = false, + type = "double" + } + } + } + } + }; + + Expect(expected).WhenSerializing((ICreateIndexRequest)descriptor); + } + + /**[float] + * == Ignoring Properties + * Properties on a POCO can be ignored in a few ways: + * + * - Using the `Ignore` property on a derived `ElasticsearchPropertyAttribute` type applied to the property that should be ignored on the POCO + * + * - Using the `.InferMappingFor(Func, IClrTypeMapping> selector)` on the connection settings + * + * - Using an ignore attribute applied to the POCO property that is understood by the `IElasticsearchSerializer` used, and inspected inside of the `CreatePropertyMapping()` on the serializer. In the case of the default `JsonNetSerializer`, this is the Json.NET `JsonIgnoreAttribute` + * + * This example demonstrates all ways, using the `Ignore` property on the attribute to ignore the property `PropertyToIgnore`, the infer mapping to ignore the + * property `AnotherPropertyToIgnore` and the json serializer specific attribute to ignore the property `JsonIgnoredProperty` + */ + + [ElasticsearchType(Name = "company")] + public class CompanyWithAttributesAndPropertiesToIgnore + { + public string Name { get; set; } + + [String(Ignore = true)] + public string PropertyToIgnore { get; set; } + + public string AnotherPropertyToIgnore { get; set; } + + [JsonIgnore] + public string JsonIgnoredProperty { get; set; } + } + + [U] + public void IgnoringProperties() + { + /** All of the properties except `Name` have been ignored in the mapping */ + var descriptor = new CreateIndexDescriptor("myindex") + .Mappings(ms => ms + .Map(m => m + .AutoMap() + ) + ); + + var expected = new + { + mappings = new + { + company = new + { + properties = new + { + name = new + { + type = "string" + } + } + } + } + }; + + var settings = WithConnectionSettings(s => s + .InferMappingFor(i => i + .Ignore(p => p.AnotherPropertyToIgnore) + ) + ); + + settings.Expect(expected).WhenSerializing((ICreateIndexRequest)descriptor); + } + + /**[float] + * == Mapping Recursion + * If you notice in our previous `Company` and `Employee` examples, the `Employee` type is recursive + * in that the `Employee` class itself contains a collection of type `Employee`. By default, `.AutoMap()` will only + * traverse a single depth when it encounters recursive instances like this. Hence, in the + * previous examples, the collection of type `Employee` on the `Employee` class did not get any of its properties mapped. + * This is done as a safe-guard to prevent stack overflows and all the fun that comes with + * infinite recursion. Additionally, in most cases, when it comes to Elasticsearch mappings, it is + * often an edge case to have deeply nested mappings like this. However, you may still have + * the need to do this, so you can control the recursion depth of `.AutoMap()`. + * + * Let's introduce a very simple class, `A`, which itself has a property + * Child of type `A`. + */ + public class A + { + public A Child { get; set; } + } + + [U] + public void ControllingRecursionDepth() + { + /** By default, `.AutoMap()` only goes as far as depth 1 */ + var descriptor = new CreateIndexDescriptor("myindex") + .Mappings(ms => ms + .Map(m => m.AutoMap()) + ); + + /** Thus we do not map properties on the second occurrence of our Child property */ + var expected = new + { + mappings = new + { + a = new + { + properties = new + { + child = new + { + properties = new { }, + type = "object" + } + } + } + } + }; + + Expect(expected).WhenSerializing((ICreateIndexRequest) descriptor); + + /** Now lets specify a maxRecursion of 3 */ + var withMaxRecursionDescriptor = new CreateIndexDescriptor("myindex") + .Mappings(ms => ms + .Map(m => m.AutoMap(3)) + ); + + /** `.AutoMap()` has now mapped three levels of our Child property */ + var expectedWithMaxRecursion = new + { + mappings = new + { + a = new + { + properties = new + { + child = new + { + type = "object", + properties = new + { + child = new + { + type = "object", + properties = new + { + child = new + { + type = "object", + properties = new + { + child = new + { + type = "object", + properties = new { } + } + } + } + } + } + } + } + } + } + } + }; + + Expect(expectedWithMaxRecursion).WhenSerializing((ICreateIndexRequest) withMaxRecursionDescriptor); + } + + [U] + //hide + public void PutMappingAlsoAdheresToMaxRecursion() + { + var descriptor = new PutMappingDescriptor().AutoMap(); + + var expected = new + { + properties = new + { + child = new + { + properties = new { }, + type = "object" + } + } + }; + + Expect(expected).WhenSerializing((IPutMappingRequest)descriptor); + + var withMaxRecursionDescriptor = new PutMappingDescriptor().AutoMap(3); + + var expectedWithMaxRecursion = new + { + properties = new + { + child = new + { + type = "object", + properties = new + { + child = new + { + type = "object", + properties = new + { + child = new + { + type = "object", + properties = new + { + child = new + { + type = "object", + properties = new { } + } + } + } + } + } + } + } + } + }; + + Expect(expectedWithMaxRecursion).WhenSerializing((IPutMappingRequest)withMaxRecursionDescriptor); + } + //endhide + + /**[float] + * == Applying conventions through the Visitor pattern + * It is also possible to apply a transformation on all or specific properties. + * + * AutoMap internally implements the https://en.wikipedia.org/wiki/Visitor_pattern[visitor pattern]. The default visitor, `NoopPropertyVisitor`, + * does nothing and acts as a blank canvas for you to implement your own visiting methods. + * + * For instance, lets create a custom visitor that disables doc values for numeric and boolean types + * (Not really a good idea in practice, but let's do it anyway for the sake of a clear example.) + */ + public class DisableDocValuesPropertyVisitor : NoopPropertyVisitor + { + public override void Visit( + INumberProperty type, + PropertyInfo propertyInfo, + ElasticsearchPropertyAttributeBase attribute) //<1> Override the `Visit` method on `INumberProperty` and set `DocValues = false` + { + type.DocValues = false; + } + + public override void Visit( + IBooleanProperty type, + PropertyInfo propertyInfo, + ElasticsearchPropertyAttributeBase attribute) //<2> Similarily, override the `Visit` method on `IBooleanProperty` and set `DocValues = false` + { + type.DocValues = false; + } + } + + [U] + public void UsingACustomPropertyVisitor() + { + /** Now we can pass an instance of our custom visitor to `.AutoMap()` */ + var descriptor = new CreateIndexDescriptor("myindex") + .Mappings(ms => ms + .Map(m => m.AutoMap(new DisableDocValuesPropertyVisitor())) + ); + + /** and anytime the client maps a property of the POCO (``Employee`` in this example) as a number (``INumberProperty``) or boolean (``IBooleanProperty``), + * it will apply the transformation defined in each `Visit()` call respectively, which in this example + * disables {ref_current}/doc-values.html[doc_values]. + */ + var expected = new + { + mappings = new + { + employee = new + { + properties = new + { + birthday = new + { + type = "date" + }, + employees = new + { + properties = new { }, + type = "object" + }, + firstName = new + { + type = "string" + }, + isManager = new + { + doc_values = false, + type = "boolean" + }, + lastName = new + { + type = "string" + }, + salary = new + { + doc_values = false, + type = "integer" + } + } + } + } + }; + } + + /**=== Visiting on ``PropertyInfo`` + * You can even take the visitor approach a step further, and instead of visiting on `IProperty` types, visit + * directly on your POCO properties (``PropertyInfo``). As an example, let's create a visitor that maps all CLR types + * to an Elasticsearch string (``IStringProperty``). + */ + public class EverythingIsAStringPropertyVisitor : NoopPropertyVisitor + { + public override IProperty Visit( + PropertyInfo propertyInfo, + ElasticsearchPropertyAttributeBase attribute) => new StringProperty(); + } + + [U] + public void UsingACustomPropertyVisitorOnPropertyInfo() + { + var descriptor = new CreateIndexDescriptor("myindex") + .Mappings(ms => ms + .Map(m => m.AutoMap(new EverythingIsAStringPropertyVisitor())) + ); + + var expected = new + { + mappings = new + { + employee = new + { + properties = new + { + birthday = new + { + type = "string" + }, + employees = new + { + type = "string" + }, + firstName = new + { + type = "string" + }, + isManager = new + { + type = "string" + }, + lastName = new + { + type = "string" + }, + salary = new + { + type = "string" + } + } + } + } + }; + } + } +} diff --git a/src/Tests/ClientConcepts/LowLevel/Connecting.doc.cs b/src/Tests/ClientConcepts/LowLevel/Connecting.doc.cs index 1739501e455..eb77c3e82bf 100644 --- a/src/Tests/ClientConcepts/LowLevel/Connecting.doc.cs +++ b/src/Tests/ClientConcepts/LowLevel/Connecting.doc.cs @@ -16,21 +16,19 @@ namespace Tests.ClientConcepts.LowLevel { public class Connecting { - /** # Connecting - * Connecting to *Elasticsearch* with `Elasticsearch.Net` is quite easy but has a few toggles and options worth knowing. - * - * # Choosing the right connection strategy + /**== Connecting + * Connecting to Elasticsearch with `Elasticsearch.Net` is quite easy and there a few options to suit a number of different use cases. + * + * [[connection-strategies]] + * === Choosing the right Connection Strategy * If you simply new an `ElasticLowLevelClient`, it will be a non-failover connection to `http://localhost:9200` */ - public void InstantiateUsingAllDefaults() { var client = new ElasticLowLevelClient(); - var tokenizers = new TokenizersDescriptor(); - } /** - * If your Elasticsearch node does not live at `http://localhost:9200` but i.e `http://mynode.example.com:8082/apiKey`, then + * If your Elasticsearch node does not live at `http://localhost:9200` but instead lives somewhere else, for example, `http://mynode.example.com:8082/apiKey`, then * you will need to pass in some instance of `IConnectionConfigurationValues`. * * The easiest way to do this is: @@ -43,12 +41,11 @@ public void InstantiatingASingleNodeClient() var client = new ElasticLowLevelClient(config); } - /** - * This however is still a non-failover connection. Meaning if that `node` goes down the operation will not be retried on any other nodes in the cluster. - * - * To get a failover connection we have to pass an `IConnectionPool` instance instead of a `Uri`. + /** + * This will still be a non-failover connection, meaning if that `node` goes down the operation will not be retried on any other nodes in the cluster. + * + * To get a failover connection we have to pass an <> instance instead of a `Uri`. */ - public void InstantiatingAConnectionPoolClient() { var node = new Uri("http://mynode.example.com:8082/apiKey"); @@ -57,27 +54,25 @@ public void InstantiatingAConnectionPoolClient() var client = new ElasticLowLevelClient(config); } - /** - * Here instead of directly passing `node`, we pass a `SniffingConnectionPool` which will use our `node` to find out the rest of the available cluster nodes. - * Be sure to read more about [Connection Pooling and Cluster Failover here](/elasticsearch-net/cluster-failover.html) - * - * ## Options - * - * Besides either passing a `Uri` or `IConnectionPool` to `ConnectionConfiguration`, you can also fluently control many more options. For instance: + /** + * Here instead of directly passing `node`, we pass a <> + * which will use our `node` to find out the rest of the available cluster nodes. + * Be sure to read more about <>. + * + * === Configuration Options + * + *Besides either passing a `Uri` or `IConnectionPool` to `ConnectionConfiguration`, you can also fluently control many more options. For instance: */ public void SpecifyingClientOptions() { - //hide var node = new Uri("http://mynode.example.com:8082/apiKey"); var connectionPool = new SniffingConnectionPool(new[] { node }); - //endhide var config = new ConnectionConfiguration(connectionPool) - .DisableDirectStreaming() - .BasicAuthentication("user", "pass") - .RequestTimeout(TimeSpan.FromSeconds(5)); - + .DisableDirectStreaming() //<1> Additional options + .BasicAuthentication("user", "pass") //<1> + .RequestTimeout(TimeSpan.FromSeconds(5)); //<1> } /** * The following is a list of available connection configuration options: @@ -85,112 +80,83 @@ public void SpecifyingClientOptions() public void AvailableOptions() { - //hide - var client = new ElasticLowLevelClient(); - //endhide - var config = new ConnectionConfiguration() + .DisableAutomaticProxyDetection() // <1> Disable automatic proxy detection. Defaults to `true`. + .EnableHttpCompression() // <2> Enable compressed request and reesponses from Elasticsearch (Note that nodes need to be configured to allow this. See the {ref_current}/modules-http.html[http module settings] for more info). + .DisableDirectStreaming(); // <3> By default responses are deserialized directly from the response stream to the object you tell it to. For debugging purposes, it can be very useful to keep a copy of the raw response on the result object, which is what calling this method will do. - .DisableAutomaticProxyDetection() - /** Disable automatic proxy detection. Defaults to true. */ - - .EnableHttpCompression() - /** - * Enable compressed request and reesponses from Elasticsearch (Note that nodes need to be configured - * to allow this. See the [http module settings](http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/modules-http.html) for more info). - */ - - .DisableDirectStreaming() - /** - * By default responses are deserialized off stream to the object you tell it to. - * For debugging purposes it can be very useful to keep a copy of the raw response on the result object. - */; - + var client = new ElasticLowLevelClient(config); var result = client.Search>(new { size = 12 }); + + /** This will only have a value if the client configuration has `DisableDirectStreaming` set */ var raw = result.ResponseBodyInBytes; - /** This will only have a value if the client configuration has ExposeRawResponse set */ - /** - * Please note that this only make sense if you need a mapped response and the raw response at the same time. - * If you need a `string` or `byte[]` response simply call: + /** + * Please note that using `.DisableDirectStreaming` only makes sense if you need the mapped response **and** the raw response __at the same time__. + * If you need a only `string` or `byte[]` response simply call */ var stringResult = client.Search(new { }); - //hide + /** other configuration options */ config = config - //endhide - .GlobalQueryStringParameters(new NameValueCollection()) - /** - * Allows you to set querystring parameters that have to be added to every request. For instance, if you use a hosted elasticserch provider, and you need need to pass an `apiKey` parameter onto every request. - */ - - .Proxy(new Uri("http://myproxy"), "username", "pass") - /** Sets proxy information on the connection. */ - - .RequestTimeout(TimeSpan.FromSeconds(4)) - /** - * Sets the global maximum time a connection may take. - * Please note that this is the request timeout, the builtin .NET `WebRequest` has no way to set connection timeouts - * (see http://msdn.microsoft.com/en-us/library/system.net.httpwebrequest.timeout(v=vs.110).aspx). - */ - - .ThrowExceptions() - /** - * As an alternative to the C/go like error checking on `response.IsValid`, you can instead tell the client to throw - * exceptions. - * - * There are three category of exceptions thay may be thrown: - * - * 1) ElasticsearchClientException: These are known exceptions, either an exception that occurred in the request pipeline - * (such as max retries or timeout reached, bad authentication, etc...) or Elasticsearch itself returned an error (could - * not parse the request, bad query, missing field, etc...). If it is an Elasticsearch error, the `ServerError` property - * on the response will contain the the actual error that was returned. The inner exception will always contain the - * root causing exception. - * - * 2) UnexpectedElasticsearchClientException: These are unknown exceptions, for instance a response from Elasticsearch not - * properly deserialized. These are usually bugs and should be reported. This excpetion also inherits from ElasticsearchClientException - * so an additional catch block isn't necessary, but can be helpful in distinguishing between the two. - * - * 3) Development time exceptions: These are CLR exceptions like ArgumentException, NullArgumentException etc... that are thrown - * when an API in the client is misused. These should not be handled as you want to know about them during development. - * - */ - - .PrettyJson() - /** - * Forces all serialization to be indented and appends `pretty=true` to all the requests so that the responses are indented as well - */ - - .BasicAuthentication("username", "password") - /** Sets the HTTP basic authentication credentials to specify with all requests. */; - + .GlobalQueryStringParameters(new NameValueCollection()) // <1> Allows you to set querystring parameters that have to be added to every request. For instance, if you use a hosted elasticserch provider, and you need need to pass an `apiKey` parameter onto every request. + .Proxy(new Uri("http://myproxy"), "username", "pass") // <2> Sets proxy information on the connection. + .RequestTimeout(TimeSpan.FromSeconds(4)) // <3> Sets the global maximum time a connection may take. Please note that this is the request timeout, the builtin .NET `WebRequest` has no way to set connection timeouts (see http://msdn.microsoft.com/en-us/library/system.net.httpwebrequest.timeout(v=vs.110).aspx[the MSDN documentation on `HttpWebRequest.Timeout` Property]). + .ThrowExceptions() // <4> As an alternative to the C/go like error checking on `response.IsValid`, you can instead tell the client to <>. + .PrettyJson() // <5> forces all serialization to be indented and appends `pretty=true` to all the requests so that the responses are indented as well + .BasicAuthentication("username", "password"); // <6> sets the HTTP basic authentication credentials to specify with all requests. + /** - * **Note:** This can alternatively be specified on the node URI directly: - */ - + * NOTE: Basic authentication credentials can alternatively be specified on the node URI directly: + */ var uri = new Uri("http://username:password@localhost:9200"); var settings = new ConnectionConfiguration(uri); /** - * ...but may become tedious when using connection pooling with multiple nodes. + *...but this may become tedious when using connection pooling with multiple nodes. + * + * [[thrown-exceptions]] + * === Exceptions + * There are three category of exceptions that may be thrown: + * + * `ElasticsearchClientException`:: These are known exceptions, either an exception that occurred in the request pipeline + * (such as max retries or timeout reached, bad authentication, etc...) or Elasticsearch itself returned an error (could + * not parse the request, bad query, missing field, etc...). If it is an Elasticsearch error, the `ServerError` property + * on the response will contain the the actual error that was returned. The inner exception will always contain the + * root causing exception. + * + * `UnexpectedElasticsearchClientException`:: These are unknown exceptions, for instance a response from Elasticsearch not + * properly deserialized. These are usually bugs and {github}/issues[should be reported]. This exception also inherits from `ElasticsearchClientException` + * so an additional catch block isn't necessary, but can be helpful in distinguishing between the two. + * + * Development time exceptions:: These are CLR exceptions like `ArgumentException`, `ArgumentOutOfRangeException`, etc. and other exceptions like + * `ResolveException` that are thrown when an API in the client is misused. + * These should not be handled as you want to know about them during development. + * */ } - /** - * You can pass a callback of type `Action<IApiCallDetails>` that can eaves drop every time a response (good or bad) is created. + /** === OnRequestCompleted + * You can pass a callback of type `Action` that can eaves drop every time a response (good or bad) is created. * If you have complex logging needs this is a good place to add that in. */ [U] public void OnRequestCompletedIsCalled() { var counter = 0; - var client = TestClient.GetInMemoryClient(s => s.OnRequestCompleted(r => counter++)); + var connectionPool = new SingleNodeConnectionPool(new Uri("http://localhost:9200")); + var settings = new ConnectionSettings(connectionPool, new InMemoryConnection()) + .OnRequestCompleted(r => counter++); + var client = new ElasticClient(settings); client.RootNodeInfo(); counter.Should().Be(1); client.RootNodeInfoAsync(); counter.Should().Be(2); } + /** + *`OnRequestCompleted` is called even when an exception is thrown + */ [U] public void OnRequestCompletedIsCalledWhenExceptionIsThrown() { @@ -205,21 +171,22 @@ public void OnRequestCompletedIsCalledWhenExceptionIsThrown() counter.Should().Be(2); } - /** - * An example of using `OnRequestCompleted()` for complex logging. Remember, if you would also like - * to capture the request and/or response bytes, you also need to set `.DisableDirectStreaming()` - * to `true` + /** [[complex-logging]] + * === Complex logging with OnRequestCompleted + * Here's an example of using `OnRequestCompleted()` for complex logging. Remember, if you would also like + * to capture the request and/or response bytes, you also need to set `.DisableDirectStreaming()` to `true` */ [U]public async Task UsingOnRequestCompletedForLogging() { var list = new List(); var connectionPool = new SingleNodeConnectionPool(new Uri("http://localhost:9200")); - var settings = new ConnectionSettings(connectionPool, new InMemoryConnection()) + + var settings = new ConnectionSettings(connectionPool, new InMemoryConnection()) // <1> Here we use `InMemoryConnection`; in reality you would use another type of `IConnection` that actually makes a request. .DefaultIndex("default-index") .DisableDirectStreaming() .OnRequestCompleted(response => { - // log out the request + // log out the request and the request body, if available if (response.RequestBodyInBytes != null) { list.Add( @@ -231,7 +198,7 @@ [U]public async Task UsingOnRequestCompletedForLogging() list.Add($"{response.HttpMethod} {response.Uri}"); } - // log out the response + // log out the response and the response body, if available if (response.ResponseBodyInBytes != null) { list.Add($"Status: {response.HttpStatusCode}\n" + @@ -280,37 +247,36 @@ [U]public async Task UsingOnRequestCompletedForLogging() public void ConfiguringSSL() { /** - * ## Configuring SSL - * SSL must be configured outside of the client using .NET's - * [ServicePointManager](http://msdn.microsoft.com/en-us/library/system.net.servicepointmanager%28v=vs.110%29.aspx) - * class and setting the [ServerCertificateValidationCallback](http://msdn.microsoft.com/en-us/library/system.net.servicepointmanager.servercertificatevalidationcallback.aspx) + * [[configuring-ssl]] + * === Configuring SSL + * SSL must be configured outside of the client using .NET's + * http://msdn.microsoft.com/en-us/library/system.net.servicepointmanager%28v=vs.110%29.aspx[ServicePointManager] + * class and setting the http://msdn.microsoft.com/en-us/library/system.net.servicepointmanager.servercertificatevalidationcallback.aspx[ServerCertificateValidationCallback] * property. - * + * * The bare minimum to make .NET accept self-signed SSL certs that are not in the Window's CA store would be to have the callback simply return `true`: */ #if !DOTNETCORE - ServicePointManager.ServerCertificateValidationCallback += (sender, cert, chain, errors) => true; + ServicePointManager.ServerCertificateValidationCallback += (sender, cert, chain, errors) => true; #endif /** - * However, this will accept all requests from the AppDomain to untrusted SSL sites, - * therefore we recommend doing some minimal introspection on the passed in certificate. + * However, this will accept **all** requests from the AppDomain to untrusted SSL sites, + * therefore **we recommend doing some minimal introspection on the passed in certificate.** + * + * IMPORTANT: Using `ServicePointManager` does not work on **Core CLR** as the request does not go through `ServicePointManager`; please file an {github}/issues[issue] if you need support for certificate validation on Core CLR. */ } - /** - * ## Overriding default Json.NET behavior - * - * Please be advised that this is an expert behavior but if you need to get to the nitty gritty this can be really useful + /**=== Overriding default Json.NET behavior * - * Create a subclass of the `JsonNetSerializer` - + * Overriding the default Json.NET behaviour in NEST is an expert behavior but if you need to get to the nitty gritty, this can be really useful. + * First, create a subclass of the `JsonNetSerializer` */ public class MyJsonNetSerializer : JsonNetSerializer { public MyJsonNetSerializer(IConnectionSettingsValues settings) : base(settings) { } - /** * Override ModifyJsonSerializerSettings if you need access to `JsonSerializerSettings` */ @@ -319,22 +285,22 @@ public MyJsonNetSerializer(IConnectionSettingsValues settings) : base(settings) /** * You can inject contract resolved converters by implementing the ContractConverters property - * This can be much faster then registering them on JsonSerializerSettings.Converters + * This can be much faster then registering them on `JsonSerializerSettings.Converters` */ public int CallToContractConverter { get; set; } = 0; - protected override IList> ContractConverters => new List>() + protected override IList> ContractConverters => new List> { - { t => { + t => { CallToContractConverter++; return null; - } } + } }; } /** - * You can then register a factory on ConnectionSettings to create an instance of your subclass instead. - * This is called once per instance of ConnectionSettings. + * You can then register a factory on `ConnectionSettings` to create an instance of your subclass instead. + * This is **_called once per instance_** of ConnectionSettings. */ [U] public void ModifyJsonSerializerSettingsIsCalled() diff --git a/src/Tests/ClientConcepts/LowLevel/Lifetimes.doc.cs b/src/Tests/ClientConcepts/LowLevel/Lifetimes.doc.cs index f6fcadfc92e..58852072c81 100644 --- a/src/Tests/ClientConcepts/LowLevel/Lifetimes.doc.cs +++ b/src/Tests/ClientConcepts/LowLevel/Lifetimes.doc.cs @@ -13,64 +13,43 @@ namespace Tests.ClientConcepts.LowLevel { public class Lifetimes { - /** - * ## Lifetimes + /**== Lifetimes * * If you are using an IOC container its always useful to know the best practices around the lifetime of your objects - - * In general we advise folks to register their ElasticClient instances as singleton. The client is thread safe - * so sharing this instance over threads is ok. - - * Zooming in however the actual moving part that benefits the most of being static for most of the duration of your - * application is ConnectionSettings. Caches are per ConnectionSettings. - - * In some applications it could make perfect sense to have multiple singleton IElasticClient's registered with different - * connectionsettings. e.g if you have 2 functionally isolated Elasticsearch clusters. - - */ - - - [U] public void InitialDisposeState() - { - var connection = new AConnection(); - var connectionPool = new AConnectionPool(new Uri("http://localhost:9200")); - var settings = new AConnectionSettings(connectionPool, connection); - settings.IsDisposed.Should().BeFalse(); - connectionPool.IsDisposed.Should().BeFalse(); - connection.IsDisposed.Should().BeFalse(); - } - - /** - * Disposing the ConnectionSettings will dispose the IConnectionPool and IConnection it has a hold of + * + * In general we advise folks to register their ElasticClient instances as singletons. The client is thread safe + * so sharing an instance between threads is fine. + * + * Zooming in however the actual moving part that benefits the most from being static for most of the duration of your + * application is `ConnectionSettings`; caches are __per__ `ConnectionSettings`. + * + * In some applications it could make perfect sense to have multiple singleton `ElasticClient`'s registered with different + * connection settings. e.g if you have 2 functionally isolated Elasticsearch clusters. + * + * NOTE: Due to the semantic versioning of Elasticsearch.Net and NEST and their alignment to versions of Elasticsearch, all instances of `ElasticClient` and + * Elasticsearch clusters that are connected to must be on the **same major version** i.e. it is not possible to have both an `ElasticClient` to connect to + * Elasticsearch 1.x _and_ 2.x in the same application as the former would require NEST 1.x and the latter, NEST 2.x. + * + * Let's demonstrate which components are disposed by creating our own derived `ConnectionSettings`, `IConnectionPool` and `IConnection` types */ - [U] public void DisposingSettingsDisposesMovingParts() - { - var connection = new AConnection(); - var connectionPool = new AConnectionPool(new Uri("http://localhost:9200")); - var settings = new AConnectionSettings(connectionPool, connection); - using (settings) { } - settings.IsDisposed.Should().BeTrue(); - connectionPool.IsDisposed.Should().BeTrue(); - connection.IsDisposed.Should().BeTrue(); - } - - class AConnectionPool : SingleNodeConnectionPool + class AConnectionSettings : ConnectionSettings { - public AConnectionPool(Uri uri, IDateTimeProvider dateTimeProvider = null) : base(uri, dateTimeProvider) { } - + public AConnectionSettings(IConnectionPool pool, IConnection connection) + : base(pool, connection) + { } public bool IsDisposed { get; private set; } - protected override void DisposeManagedResources() + protected override void DisposeManagedResources() { this.IsDisposed = true; base.DisposeManagedResources(); } } - class AConnectionSettings : ConnectionSettings + class AConnectionPool : SingleNodeConnectionPool { - public AConnectionSettings(IConnectionPool pool, IConnection connection) - : base(pool, connection) { } + public AConnectionPool(Uri uri, IDateTimeProvider dateTimeProvider = null) : base(uri, dateTimeProvider) { } + public bool IsDisposed { get; private set; } protected override void DisposeManagedResources() { @@ -89,5 +68,31 @@ protected override void DisposeManagedResources() } } + /** + * `ConnectionSettings`, `IConnectionPool` and `IConnection` all explictily implement `IDisposable` + */ + [U] public void InitialDisposeState() + { + var connection = new AConnection(); + var connectionPool = new AConnectionPool(new Uri("http://localhost:9200")); + var settings = new AConnectionSettings(connectionPool, connection); + settings.IsDisposed.Should().BeFalse(); + connectionPool.IsDisposed.Should().BeFalse(); + connection.IsDisposed.Should().BeFalse(); + } + + /** + * Disposing `ConnectionSettings` will dispose the `IConnectionPool` and `IConnection` it has a hold of + */ + [U] public void DisposingSettingsDisposesMovingParts() + { + var connection = new AConnection(); + var connectionPool = new AConnectionPool(new Uri("http://localhost:9200")); + var settings = new AConnectionSettings(connectionPool, connection); + using (settings) { } + settings.IsDisposed.Should().BeTrue(); + connectionPool.IsDisposed.Should().BeTrue(); + connection.IsDisposed.Should().BeTrue(); + } } } diff --git a/src/Tests/ClientConcepts/LowLevel/PostData.doc.cs b/src/Tests/ClientConcepts/LowLevel/PostData.doc.cs index 92fd97d6eff..a81285e9b66 100644 --- a/src/Tests/ClientConcepts/LowLevel/PostData.doc.cs +++ b/src/Tests/ClientConcepts/LowLevel/PostData.doc.cs @@ -13,9 +13,10 @@ namespace Tests.ClientConcepts.LowLevel { public class PostingData { - /** ## Post data - * The low level allows you to post a string, byte[] array directly. On top of this if you pass a list of strings or objects - * they will be serialized in Elasticsearch's special bulk/multi format. + /**== Post data + * The low level client allows you to post a `string` or `byte[]` array directly. On top of this, + * if you pass a collection of ``string``s or ``object``s they will be serialized + * using Elasticsearch's special bulk/multi format. */ private readonly string @string = "fromString"; private readonly byte[] bytes = Utf8Bytes("fromByteArray"); @@ -39,9 +40,15 @@ public PostingData() [U] public void ImplicitConversions() { - /** Even though the argument for postData on the low level client takes a PostData - * You can rely on C# implicit conversion to abstract the notion of PostData completely. - *You can implicitly convert from the following types.*/ + /** Even though the argument for PostData on the low level client takes a `PostData`, + * You can rely on implicit conversion to abstract the notion of PostData completely. + * You can implicitly convert from the following types + * - `string` + * - `byte[]` + * - collection of `string` + * - collection of `object` + * - `object` + */ var fromString = ImplicitlyConvertsFrom(@string); var fromByteArray = ImplicitlyConvertsFrom(bytes); @@ -49,7 +56,7 @@ [U] public void ImplicitConversions() var fromListOfObject = ImplicitlyConvertsFrom(listOfObjects); var fromObject = ImplicitlyConvertsFrom(@object); - /** postData Bytes will always be set if it originated from a byte*/ + /** PostData bytes will always be set if it originated from `byte[]` */ fromByteArray.WrittenBytes.Should().BeSameAs(bytes); fromString.Type.Should().Be(PostType.LiteralString); @@ -58,8 +65,7 @@ [U] public void ImplicitConversions() fromListOfObject.Type.Should().Be(PostType.EnumerableOfObject); fromObject.Type.Should().Be(PostType.Serializable); - //passing a PostData object to a method taking PostData should not wrap - + /** and passing a `PostData` object to a method taking `PostData` should not wrap */ fromString = ImplicitlyConvertsFrom(fromString); fromByteArray = ImplicitlyConvertsFrom(fromByteArray); fromListOfString = ImplicitlyConvertsFrom(fromListOfString); @@ -71,8 +77,6 @@ [U] public void ImplicitConversions() fromListOfString.Type.Should().Be(PostType.EnumerableOfString); fromListOfObject.Type.Should().Be(PostType.EnumerableOfObject); fromObject.Type.Should().Be(PostType.Serializable); - - } [U] public async Task WritesCorrectlyUsingBothLowAndHighLevelSettings() @@ -83,34 +87,33 @@ [U] public async Task WritesCorrectlyUsingBothLowAndHighLevelSettings() private async Task AssertOn(IConnectionConfigurationValues settings) { - /** Although each implicitly types behaves slightly differently */ - await Post(()=>@string, writes: Utf8Bytes(@string), storesBytes: true, settings: settings); + await Post(() => @string, writes: Utf8Bytes(@string), storesBytes: true, settings: settings); - await Post(()=>bytes, writes: bytes, storesBytes: true, settings: settings); + await Post(() => bytes, writes: bytes, storesBytes: true, settings: settings); /** When passing a list of strings we assume its a list of valid serialized json that we * join with newlinefeeds making sure there is a trailing linefeed */ - await Post(()=>listOfStrings, writes: multiStringJson, storesBytes: true, settings: settings); + await Post(() => listOfStrings, writes: multiStringJson, storesBytes: true, settings: settings); /** * When passing a list of object we assume its a list of objects we need to serialize * individually to json and join with newlinefeeds aking sure there is a trailing linefeed */ - await Post(()=>listOfObjects, writes: multiObjectJson, storesBytes: false, settings: settings); + await Post(() => listOfObjects, writes: multiObjectJson, storesBytes: false, settings: settings); /** In all other cases postdata is serialized as is. */ - await Post(()=>@object, writes: objectJson, storesBytes: false, settings: settings); + await Post(() => @object, writes: objectJson, storesBytes: false, settings: settings); - /** If you want to maintain a copy of the request that went out use the following settings */ + /** If you want to maintain a copy of the request that went out, use `DisableDirectStreaming` */ settings = new ConnectionSettings().DisableDirectStreaming(); - /** by forcing `DisableDirectStreaming` serializing happens first in a private MemoryStream - * so we can get a hold of the serialized bytes */ - await Post(()=>listOfObjects, writes: multiObjectJson, storesBytes: true, settings: settings); + /** by forcing `DisableDirectStreaming` on connection settings, serialization happens first in a private `MemoryStream` + * so we can get hold of the serialized bytes */ + await Post(() => listOfObjects, writes: multiObjectJson, storesBytes: true, settings: settings); /** this behavior can also be observed when serializing a simple object using `DisableDirectStreaming` */ - await Post(()=>@object, writes: objectJson, storesBytes: true, settings: settings); + await Post(() => @object, writes: objectJson, storesBytes: true, settings: settings); } private static async Task Post(Func> postData, byte[] writes, bool storesBytes, IConnectionConfigurationValues settings) diff --git a/src/Tests/CodeStandards/NamingConventions.doc.cs b/src/Tests/CodeStandards/NamingConventions.doc.cs index 88df362d339..e9f06626bde 100644 --- a/src/Tests/CodeStandards/NamingConventions.doc.cs +++ b/src/Tests/CodeStandards/NamingConventions.doc.cs @@ -7,13 +7,13 @@ namespace Tests.CodeStandards { - /** # Naming Conventions - * + /** == Naming Conventions + * * NEST uses the following naming conventions (with _some_ exceptions). */ public class NamingConventions { - /** ## Class Names + /** === Class Names * * Abstract class names should end with a `Base` suffix */ @@ -50,7 +50,7 @@ [U] public void ClassNameContainsBaseShouldBeAbstract() baseClassesNotAbstract.Should().BeEmpty(); } - /** ## Requests and Responses + /** === Requests and Responses * * Request class names should end with `Request` */ @@ -88,13 +88,12 @@ public void ResponseClassNamesEndWithResponse() * Request and Response class names should be one to one in *most* cases. * e.g. `ValidateRequest` => `ValidateResponse`, and not `ValidateQueryRequest` => `ValidateResponse` * There are a few exceptions to this rule, most notably the `Cat` prefixed requests and - * `Exists` requests. + * the `Exists` requests. */ [U] public void ParityBetweenRequestsAndResponses() { - // Add any exceptions to the rule here. - var exceptions = new[] + var exceptions = new[] // <1> _Exceptions to the rule_ { typeof(DocumentExistsRequest), typeof(DocumentExistsRequest<>), diff --git a/src/Tests/CodeStandards/Serialization/Properties.doc.cs b/src/Tests/CodeStandards/Serialization/Properties.doc.cs index 5e27213f989..23049da6abd 100644 --- a/src/Tests/CodeStandards/Serialization/Properties.doc.cs +++ b/src/Tests/CodeStandards/Serialization/Properties.doc.cs @@ -8,7 +8,6 @@ namespace Tests.CodeStandards.Serialization { public class JsonProperties { - /** * Our Json.NET contract resolver picks up attributes set on the interface */ @@ -27,8 +26,6 @@ public void SeesInterfaceProperties() serialized = c.Serializer.SerializeToString(new AnalysisDescriptor().CharFilters(cf=>cf)); serialized.Should().NotContain("char_filters").And.NotContain("charFilters"); serialized.Should().Contain("char_filter"); - - } } } diff --git a/src/Tests/CommonOptions/DateMath/DateMathExpressions.doc.cs b/src/Tests/CommonOptions/DateMath/DateMathExpressions.doc.cs index de3be865d88..82fb1acfbf4 100644 --- a/src/Tests/CommonOptions/DateMath/DateMathExpressions.doc.cs +++ b/src/Tests/CommonOptions/DateMath/DateMathExpressions.doc.cs @@ -8,30 +8,40 @@ namespace Tests.CommonOptions.DateMath { public class DateMathEpressions { - /** # Date Expressions - * The date type supports using date math expression when using it in a query/filter - *Whenever durations need to be specified, eg for a timeout parameter, the duration can be specified + /** == Date Math Expressions + * The date type supports using date math expression when using it in a query/filter + * Whenever durations need to be specified, eg for a timeout parameter, the duration can be specified * - * The expression starts with an "anchor" date, which can be either now or a date string (in the applicable format) ending with ||. - * It can then follow by a math expression, supporting +, - and / (rounding). - * The units supported are y (year), M (month), w (week), d (day), h (hour), m (minute), and s (second). - * as a whole number representing time in milliseconds, or as a time value like `2d` for 2 days. + * The expression starts with an "anchor" date, which can be either now or a date string (in the applicable format) ending with `||`. + * It can then follow by a math expression, supporting `+`, `-` and `/` (rounding). + * The units supported are + * + * - `y` (year) + * - `M` (month) + * - `w` (week) + * - `d` (day) + * - `h` (hour) + * - `m` (minute) + * - `s` (second) * - * Be sure to read the elasticsearch documentation {ref}/mapping-date-format.html#date-math[on this subject here] - + * as a whole number representing time in milliseconds, or as a time value like `2d` for 2 days. + * :datemath: {ref_current}/common-options.html#date-math + * Be sure to read the elasticsearch documentation {datemath}[on this subject here] */ - [U] public void SimpleExpressions() { - /** You can create simple expressions using any of the static methods on `DateMath` */ + /** === Simple Expressions + * You can create simple expressions using any of the static methods on `DateMath` + */ Expect("now").WhenSerializing(Nest.DateMath.Now); Expect("2015-05-05T00:00:00").WhenSerializing(Nest.DateMath.Anchored(new DateTime(2015,05, 05))); - /** strings implicitly convert to date maths */ + /** strings implicitly convert to `DateMath` */ Expect("now").WhenSerializing("now"); /** but are lenient to bad math expressions */ var nonsense = "now||*asdaqwe"; + Expect(nonsense).WhenSerializing(nonsense) /** the resulting date math will assume the whole string is the anchor */ .Result(dateMath => ((IDateMath)dateMath) @@ -41,10 +51,10 @@ [U] public void SimpleExpressions() ) ); - /** date's also implicitly convert to simple date math expressions */ + /** `DateTime` also implicitly convert to simple date math expressions */ var date = new DateTime(2015, 05, 05); Expect("2015-05-05T00:00:00").WhenSerializing(date) - /** the anchor will be an actual DateTime, even after a serialization - deserialization round trip */ + /** the anchor will be an actual `DateTime`, even after a serialization/deserialization round trip */ .Result(dateMath => ((IDateMath)dateMath) . Anchor.Match( d => d.Should().Be(date), @@ -55,19 +65,21 @@ . Anchor.Match( [U] public void ComplexExpressions() { - /** Ranges can be chained on to simple expressions */ + /** === Complex Expressions + * Ranges can be chained on to simple expressions + */ Expect("now+1d").WhenSerializing(Nest.DateMath.Now.Add("1d")); - /** plural means that you can chain multiple */ + /** Including multiple operations */ Expect("now+1d-1m").WhenSerializing(Nest.DateMath.Now.Add("1d").Subtract(TimeSpan.FromMinutes(1))); - /** a rounding value can also be chained at the end afterwhich no more ranges can be appended */ + /** A rounding value can be chained to the end of the expression, after which no more ranges can be appended */ Expect("now+1d-1m/d").WhenSerializing(Nest.DateMath.Now.Add("1d").Subtract(TimeSpan.FromMinutes(1)).RoundTo(Nest.TimeUnit.Day)); - /** When anchoring date's we need to append `||` as clear separator between anchor and ranges */ - /** plural means that you can chain multiple */ + /** When anchoring dates, a `||` needs to be appended as clear separator between the anchor and ranges. + * Again, multiple ranges can be chained + */ Expect("2015-05-05T00:00:00||+1d-1m").WhenSerializing(Nest.DateMath.Anchored(new DateTime(2015,05,05)).Add("1d").Subtract(TimeSpan.FromMinutes(1))); - } } diff --git a/src/Tests/CommonOptions/DistanceUnit/DistanceUnits.doc.cs b/src/Tests/CommonOptions/DistanceUnit/DistanceUnits.doc.cs new file mode 100644 index 00000000000..1e021fc6a4c --- /dev/null +++ b/src/Tests/CommonOptions/DistanceUnit/DistanceUnits.doc.cs @@ -0,0 +1,104 @@ +using Nest; +using Tests.Framework; +using static Tests.Framework.RoundTripper; + +namespace Tests.CommonOptions.DistanceUnit +{ + public class DistanceUnits + { + /**== Distance Units + * Whenever distances need to be specified, e.g. for a {ref_current}/query-dsl-geo-distance-query.html[geo distance query], + * the distance unit can be specified as a double number representing distance in meters, as a new instance of + * a `Distance`, or as a string of the form number and distance unit e.g. "`2.72km`" + * + * === Using Distance units in NEST + * NEST uses `Distance` to strongly type distance units and there are several ways to construct one. + * + * ==== Constructor + * The most straight forward way to construct a `Distance` is through its constructor + */ + [U] + public void Constructor() + { + var unitComposed = new Distance(25); + var unitComposedWithUnits = new Distance(25, Nest.DistanceUnit.Meters); + + /** + * `Distance` serializes to a string composed of a factor and distance unit. + * The factor is a double so always has at least one decimal place when serialized + */ + Expect("25.0m") + .WhenSerializing(unitComposed) + .WhenSerializing(unitComposedWithUnits); + } + + /** + * ==== Implicit conversion + * Alternatively a distance unit `string` can be assigned to a `Distance`, resulting in an implicit conversion to a new `Distance` instance. + * If no `DistanceUnit` is specified, the default distance unit is meters + */ + [U] + public void ImplicitConversion() + { + Distance distanceString = "25"; + Distance distanceStringWithUnits = "25m"; + + Expect(new Distance(25)) + .WhenSerializing(distanceString) + .WhenSerializing(distanceStringWithUnits); + } + + /** + * ==== Supported units + * A number of distance units are supported, from millimeters to nautical miles + */ + [U] + public void UsingDifferentUnits() + { + /** ===== Metric + *`mm` (Millimeters) + */ + Expect("2.0mm").WhenSerializing(new Distance(2, Nest.DistanceUnit.Millimeters)); + + /** + *`cm` (Centimeters) + */ + Expect("123.456cm").WhenSerializing(new Distance(123.456, Nest.DistanceUnit.Centimeters)); + + /** + *`m` (Meters) + */ + Expect("400.0m").WhenSerializing(new Distance(400, Nest.DistanceUnit.Meters)); + + /** + *`km` (Kilometers) + */ + Expect("0.1km").WhenSerializing(new Distance(0.1, Nest.DistanceUnit.Kilometers)); + + /** ===== Imperial + *`in` (Inches) + */ + Expect("43.23in").WhenSerializing(new Distance(43.23, Nest.DistanceUnit.Inch)); + + /** + *`ft` (Feet) + */ + Expect("3.33ft").WhenSerializing(new Distance(3.33, Nest.DistanceUnit.Feet)); + + /** + *`yd` (Yards) + */ + Expect("9.0yd").WhenSerializing(new Distance(9, Nest.DistanceUnit.Yards)); + + /** + *`mi` (Miles) + */ + Expect("0.62mi").WhenSerializing(new Distance(0.62, Nest.DistanceUnit.Miles)); + + /** + *`nmi` or `NM` (Nautical Miles) + */ + Expect("45.5nmi").WhenSerializing(new Distance(45.5, Nest.DistanceUnit.NauticalMiles)); + } + } +} diff --git a/src/Tests/CommonOptions/TimeUnit/TimeUnits.doc.cs b/src/Tests/CommonOptions/TimeUnit/TimeUnits.doc.cs index d4c8de75b03..8e6d1cec232 100644 --- a/src/Tests/CommonOptions/TimeUnit/TimeUnits.doc.cs +++ b/src/Tests/CommonOptions/TimeUnit/TimeUnits.doc.cs @@ -7,19 +7,18 @@ namespace Tests.CommonOptions.TimeUnit { - public class TimeUnits + public class TimeUnits { - /** # Time units - * Whenever durations need to be specified, eg for a timeout parameter, the duration can be specified - * as a whole number representing time in milliseconds, or as a time value like `2d` for 2 days. - * - * ## Using Time units in NEST + /** == Time units + * Whenever durations need to be specified, eg for a timeout parameter, the duration can be specified + * as a whole number representing time in milliseconds, or as a time value like `2d` for 2 days. + * + * === Using Time units in NEST * NEST uses `Time` to strongly type this and there are several ways to construct one. * - * ### Constructor + * ==== Constructor * The most straight forward way to construct a `Time` is through its constructor */ - [U] public void Constructor() { var unitString = new Time("2d"); @@ -28,8 +27,13 @@ [U] public void Constructor() var unitMilliseconds = new Time(1000 * 60 * 60 * 24 * 2); /** - * When serializing Time constructed from a string, milliseconds, composition of factor and - * interval, or a `TimeSpan` the expression will be serialized as time unit string + * When serializing Time constructed from + * - a string + * - milliseconds (as a double) + * - composition of factor and interval + * - a `TimeSpan` + * + * the expression will be serialized to a time unit string composed of the factor and interval e.g. `2d` */ Expect("2d") .WhenSerializing(unitString) @@ -38,7 +42,7 @@ [U] public void Constructor() .WhenSerializing(unitMilliseconds); /** - * Milliseconds are always calculated even when not using the constructor that takes a long + * The `Milliseconds` property on `Time` is calculated even when not using the constructor that takes a double */ unitMilliseconds.Milliseconds.Should().Be(1000*60*60*24*2); unitComposed.Milliseconds.Should().Be(1000*60*60*24*2); @@ -46,10 +50,9 @@ [U] public void Constructor() unitString.Milliseconds.Should().Be(1000*60*60*24*2); } /** - * ### Implicit conversion - * Alternatively `string`, `TimeSpan` and `double` can be implicitly assigned to `Time` properties and variables + * ==== Implicit conversion + * Alternatively to using the constructor, `string`, `TimeSpan` and `double` can be implicitly converted to `Time` */ - [U] [SuppressMessage("ReSharper", "SuggestVarOrType_SimpleTypes")] public void ImplicitConversion() { @@ -62,14 +65,13 @@ public void ImplicitConversion() Expect("2d").WhenSerializing(twoDays); } - [U] [SuppressMessage("ReSharper", "SuggestVarOrType_SimpleTypes")] public void EqualityAndComparable() { Time oneAndHalfYear = "1.5y"; Time twoWeeks = TimeSpan.FromDays(14); Time twoDays = 1000*60*60*24*2; - + /** * Milliseconds are calculated even when values are not passed as long */ @@ -90,12 +92,12 @@ public void EqualityAndComparable() (oneAndHalfYear > twoWeeks).Should().BeTrue(); (oneAndHalfYear >= twoWeeks).Should().BeTrue(); (twoDays >= new Time("2d")).Should().BeTrue(); - + twoDays.Should().BeLessThan(twoWeeks); (twoDays < twoWeeks).Should().BeTrue(); (twoDays <= twoWeeks).Should().BeTrue(); (twoDays <= new Time("2d")).Should().BeTrue(); - + /** * And assert equality */ @@ -110,8 +112,8 @@ public void EqualityAndComparable() [U] public void UsingInterval() { - /** - * Time units are specified as a union of either a `DateInterval` or `Time` + /** === Time units + * Time units are specified as a union of either a `DateInterval` or `Time`, * both of which implicitly convert to the `Union` of these two. */ Expect("month").WhenSerializing>(DateInterval.Month); diff --git a/src/Tests/Document/Multiple/Bulk/BulkApiTests.cs b/src/Tests/Document/Multiple/Bulk/BulkApiTests.cs index fba1b200e34..8c3ec8e7ecb 100644 --- a/src/Tests/Document/Multiple/Bulk/BulkApiTests.cs +++ b/src/Tests/Document/Multiple/Bulk/BulkApiTests.cs @@ -28,7 +28,7 @@ protected override LazyResponses ClientUsage() => Calls( protected override bool SupportsDeserialization => false; - protected override object ExpectJson { get; } = new object[] + protected override object ExpectJson => new object[] { new Dictionary{ { "index", new { _type = "project", _id = Project.Instance.Name } } }, Project.InstanceAnonymous, @@ -39,6 +39,32 @@ protected override LazyResponses ClientUsage() => Calls( new Dictionary{ { "delete", new { _type="project", _id = Project.Instance.Name + "1" } } }, }; + protected override Func Fluent => d => d + .Index(CallIsolatedValue) + .Index(b => b.Document(Project.Instance)) + .Update(b => b.Doc(new { leadDeveloper = new { firstName = "martijn" } }).Id(Project.Instance.Name)) + .Create(b => b.Document(Project.Instance).Id(Project.Instance.Name + "1")) + .Delete(b=>b.Id(Project.Instance.Name + "1")); + + + protected override BulkRequest Initializer => + new BulkRequest(CallIsolatedValue) + { + Operations = new List + { + new BulkIndexOperation(Project.Instance), + new BulkUpdateOperation(Project.Instance) + { + Doc = new { leadDeveloper = new { firstName = "martijn" } } + }, + new BulkCreateOperation(Project.Instance) + { + Id = Project.Instance.Name + "1" + }, + new BulkDeleteOperation(Project.Instance.Name + "1"), + } + }; + protected override void ExpectResponse(IBulkResponse response) { response.Took.Should().BeGreaterThan(0); @@ -58,33 +84,9 @@ protected override void ExpectResponse(IBulkResponse response) item.Shards.Successful.Should().BeGreaterThan(0); } - var p1 = this.Client.Source(Project.Instance.Name, p=>p.Index(CallIsolatedValue)); + var p1 = this.Client.Source(Project.Instance.Name, p => p.Index(CallIsolatedValue)); p1.LeadDeveloper.FirstName.Should().Be("martijn"); } - protected override Func Fluent => d => d - .Index(CallIsolatedValue) - .Index(b => b.Document(Project.Instance)) - .Update(b => b.Doc(new { leadDeveloper = new { firstName = "martijn" } }).Id(Project.Instance.Name)) - .Create(b => b.Document(Project.Instance).Id(Project.Instance.Name + "1")) - .Delete(b=>b.Id(Project.Instance.Name + "1")); - - - protected override BulkRequest Initializer => new BulkRequest(CallIsolatedValue) - { - Operations = new List - { - new BulkIndexOperation(Project.Instance), - new BulkUpdateOperation(Project.Instance) - { - Doc = new { leadDeveloper = new { firstName = "martijn" } } - }, - new BulkCreateOperation(Project.Instance) - { - Id = Project.Instance.Name + "1" - }, - new BulkDeleteOperation(Project.Instance.Name + "1"), - } - }; } } diff --git a/src/Tests/Document/Multiple/DeleteByQuery/DeleteByQueryApiTests.cs b/src/Tests/Document/Multiple/DeleteByQuery/DeleteByQueryApiTests.cs index 441c7a60872..3e3785561ae 100644 --- a/src/Tests/Document/Multiple/DeleteByQuery/DeleteByQueryApiTests.cs +++ b/src/Tests/Document/Multiple/DeleteByQuery/DeleteByQueryApiTests.cs @@ -39,6 +39,7 @@ protected override LazyResponses ClientUsage() => Calls( protected override bool ExpectIsValid => true; protected override int ExpectStatusCode => 200; protected override HttpMethod HttpMethod => HttpMethod.DELETE; + protected override string UrlPath => $"/{CallIsolatedValue}%2C{SecondIndex}/_query?ignore_unavailable=true"; protected override bool SupportsDeserialization => false; @@ -55,13 +56,6 @@ protected override LazyResponses ClientUsage() => Calls( } }; - protected override void ExpectResponse(IDeleteByQueryResponse response) - { - response.Indices.Should().NotBeEmpty().And.HaveCount(2).And.ContainKey(CallIsolatedValue); - response.Indices[CallIsolatedValue].Deleted.Should().Be(1); - response.Indices[CallIsolatedValue].Found.Should().Be(1); - } - protected override DeleteByQueryDescriptor NewDescriptor() => new DeleteByQueryDescriptor(this.Indices); protected override Func, IDeleteByQueryRequest> Fluent => d => d @@ -82,5 +76,12 @@ protected override void ExpectResponse(IDeleteByQueryResponse response) Values = new Id[] { Project.Projects.First().Name, "x" } } }; + + protected override void ExpectResponse(IDeleteByQueryResponse response) + { + response.Indices.Should().NotBeEmpty().And.HaveCount(2).And.ContainKey(CallIsolatedValue); + response.Indices[CallIsolatedValue].Deleted.Should().Be(1); + response.Indices[CallIsolatedValue].Found.Should().Be(1); + } } } diff --git a/src/Tests/Document/Single/Update/UpdateApiTests.cs b/src/Tests/Document/Single/Update/UpdateApiTests.cs index cbcf8909490..fc71b21b602 100644 --- a/src/Tests/Document/Single/Update/UpdateApiTests.cs +++ b/src/Tests/Document/Single/Update/UpdateApiTests.cs @@ -43,7 +43,7 @@ protected override LazyResponses ClientUsage() => Calls( protected override UpdateDescriptor NewDescriptor() => new UpdateDescriptor(DocumentPath.Id(CallIsolatedValue)); - protected override Func, IUpdateRequest> Fluent => d => d + protected override Func, IUpdateRequest> Fluent => u => u .Doc(Project.Instance) .DocAsUpsert() .DetectNoop(); diff --git a/src/Tests/QueryDsl/BoolDsl/BoolDsl.doc.cs b/src/Tests/QueryDsl/BoolDsl/BoolDsl.doc.cs index 7082f9fe20c..a3515202520 100644 --- a/src/Tests/QueryDsl/BoolDsl/BoolDsl.doc.cs +++ b/src/Tests/QueryDsl/BoolDsl/BoolDsl.doc.cs @@ -9,11 +9,15 @@ namespace Tests.QueryDsl.BoolDsl { + /**== Bool Queries + */ public class BoolDslTests : OperatorUsageBase { protected readonly IElasticClient Client = TestClient.GetFixedReturnClient(new { }); - - /** Writing boolean queries can grow rather verbose rather quickly using the query DSL e.g */ + + /** Writing boolean queries can grow verbose rather quickly when using the query DSL. For example, + * take a single {ref_current}/query-dsl-bool-query.html[``bool`` query] with only two clauses + */ public void VerboseWay() { var searchResults = this.Client.Search(s => s @@ -27,22 +31,23 @@ public void VerboseWay() ) ); } - /** now this is just a single bool with only two clauses, imagine multiple nested bools this quickly becomes an exercise in - hadouken indenting + /**Now, imagine multiple nested bools; you'll realise that this quickly becomes an exercise in _hadouken indenting_ * *[[indent]] - *.hadouken indenting example - *image::http://i.imgur.com/BtjZedW.jpg[dead indent] + *.hadouken indenting + *image::hadouken-indentation.jpg[hadouken indenting] * - - * For this reason, NEST introduces operator overloading so complex bool queries become easier to write, the previous example will become. */ - + *=== Operator Overloading + * + *For this reason, NEST introduces **operator overloading** so complex bool queries become easier to write. + *The previous example now becomes the following with the fluent API + */ public void UsingOperator() { var searchResults = this.Client.Search(s => s .Query(q => q.Term(p => p.Name, "x") || q.Term(p => p.Name, "y")) ); - /** Or using the object initializer syntax */ + /** or, using the object initializer syntax */ searchResults = this.Client.Search(new SearchRequest { Query = new TermQuery { Field = "name", Value= "x" } @@ -51,58 +56,78 @@ public void UsingOperator() } /** A naive implementation of operator overloading would rewrite - + * * `term && term && term` to - - *> bool - *> |___must - *> |___term - *> |___bool - *> |___must - *> |___term - *> |___term - + * + *.... + *bool + *|___must + * |___term + * |___bool + * |___must + * |___term + * |___term + *.... + * * As you can image this becomes unwieldy quite fast the more complex a query becomes NEST can spot these and * join them together to become a single bool query - - *> bool - *> |___must - *> |___term - *> |___term - *> |___term - + * + *.... + *bool + *|___must + * |___term + * |___term + * |___term + *.... */ - [U] public void JoinsMustQueries() => + [U] public void JoinsMustQueries() + { Assert( q => q.Query() && q.Query() && q.Query(), Query && Query && Query, c => c.Bool.Must.Should().HaveCount(3) - ); - - /** The bool DSL offers also a short hand notation to mark a query as a must_not using ! */ - - [U] public void MustNotOperator() => Assert(q => !q.Query(), !Query, c => c.Bool.MustNot.Should().HaveCount(1)); + ); + } - /** And to mark a query as a filter using + */ + /** The bool DSL offers also a short hand notation to mark a query as a `must_not` using the `!` operator */ + [U] public void MustNotOperator() + { + Assert(q => !q.Query(), !Query, c => c.Bool.MustNot.Should().HaveCount(1)); + } - [U] public void UnaryAddOperator() => Assert(q => +q.Query(), +Query, c => c.Bool.Filter.Should().HaveCount(1)); + /** And to mark a query as a `filter` using the `+` operator*/ + [U] public void UnaryAddOperator() + { + Assert(q => +q.Query(), +Query, c => c.Bool.Filter.Should().HaveCount(1)); + } - /** Both of these can be combined with ands to a single bool query */ + /** Both of these can be combined with `&&` to form a single bool query */ - [U] public void MustNotOperatorAnd() => Assert(q => !q.Query() && !q.Query(), !Query && !Query, c => c.Bool.MustNot.Should().HaveCount(2)); - [U] public void UnaryAddOperatorAnd() => Assert(q => +q.Query() && +q.Query(), +Query && +Query, c => c.Bool.Filter.Should().HaveCount(2)); + [U] public void MustNotOperatorAnd() + { + Assert(q => !q.Query() && !q.Query(), !Query && !Query, c => c.Bool.MustNot.Should().HaveCount(2)); + } - /** When combining multiple queries some or all possibly marked as must_not or filter NEST still combines to a single bool query + [U] public void UnaryAddOperatorAnd() + { + Assert(q => +q.Query() && +q.Query(), +Query && +Query, c => c.Bool.Filter.Should().HaveCount(2)); + } - *> bool - *> |___must - *> | |___term - *> | |___term - *> | |___term - > | - *> |___must_not - *> |___term + /** === Combining/Merging bool queries + * + * When combining multiple queries some or all possibly marked as `must_not` or `filter`, NEST still combines to a single bool query + * + *.... + *bool + *|___must + *| |___term + *| |___term + *| |___term + *| + *|___must_not + * |___term + *.... */ [U] public void JoinsMustWithMustNot() @@ -118,100 +143,101 @@ [U] public void JoinsMustWithMustNot() } - /** Even more involved `term && term && term && !term && +term && +term` still only results in a single bool query: - - *> bool - *> |___must - *> | |___term - *> | |___term - *> | |___term - *> | - *> |___must_not - *> | |___term - *> | - *> |___filter - *> |___term - *> |___term + /** Even more involved `term && term && term && !term && +term && +term` still only results in a single `bool` query: + *.... + *bool + *|___must + *| |___term + *| |___term + *| |___term + *| + *|___must_not + *| |___term + *| + *|___filter + * |___term + * |___term + *.... */ - - [U] public void JoinsMustWithMustNotAndFilter() => + [U] public void JoinsMustWithMustNotAndFilter() + { Assert( q => q.Query() && q.Query() && q.Query() && !q.Query() && +q.Query() && +q.Query(), Query && Query && Query && !Query && +Query && +Query, - c=> + c => { c.Bool.Must.Should().HaveCount(3); c.Bool.MustNot.Should().HaveCount(1); c.Bool.Filter.Should().HaveCount(2); }); + } - /** You can still mix and match actual bool queries with the bool dsl e.g - - * `bool(must=term, term, term) && !term` - - * it would still merge into a single bool query. */ - - [U] public void MixAndMatch() => + /** You can still mix and match actual bool queries with the bool DSL e.g + * `bool(must=term, term, term) && !term` would still merge into a single `bool` query. + */ + [U] public void MixAndMatch() + { Assert( - q => q.Bool(b=>b.Must(mq=>mq.Query(),mq=>mq.Query(), mq=>mq.Query())) && !q.Query(), + q => q.Bool(b => b.Must(mq => mq.Query(), mq => mq.Query(), mq => mq.Query())) && !q.Query(), new BoolQuery { Must = new QueryContainer[] { Query, Query, Query } } && !Query, - c=> + c => { c.Bool.Must.Should().HaveCount(3); c.Bool.MustNot.Should().HaveCount(1); }); + } - /* NEST will also do the same with `should`'s or OR's when it sees that the boolean queries in play **ONLY** consist of `should clauses`. - * This is because the boolquery does not quite follow the same boolean logic you expect from a programming language. - - * To summarize the latter: - + /* NEST will also do the same with `should`s or `||` when it sees that the boolean queries in play **ONLY** consist of `should` clauses. + * This is because the `bool` query does not quite follow the same boolean logic you expect from a programming language. + * + * To summarize, the latter: + * * `term || term || term` - + * * becomes - - *> bool - *> |___should - *> |___term - *> |___term - *> |___term - - * but - - * `term1 && (term2 || term3 || term4)` will NOT become - - *> bool - *> |___must - *> | |___term1 - *> | - *> |___should - *> |___term2 - *> |___term3 - *> |___term4 - - * This is because when a bool query has **only** should clauses atleast 1 of them has to match. When that bool query also has a must clause the should clauses start acting as a boost factor + *.... + *bool + *|___should + * |___term + * |___term + * |___term + *.... + * but `term1 && (term2 || term3 || term4)` does **NOT** become + *.... + *bool + *|___must + *| |___term1 + *| + *|___should + * |___term2 + * |___term3 + * |___term4 + *.... + * + * This is because when a `bool` query has **only** `should` clauses, at least one of them must match. + * When that `bool` query also has a `must` clause then the `should` clauses start acting as a _boost_ factor * and none of them have to match, drastically altering its meaning. - - * So in the previous you could get back results that ONLY contain `term1` this is clearly not what you want in the strict boolean sense of the input. - - * NEST therefor rewrites this query to - - *> bool - *> |___must - *> |___term1 - *> |___bool - *> |___should - *> |___term2 - *> |___term3 - *> |___term4 - + * + * So in the previous you could get back results that **ONLY** contain `term1`. This is clearly not what you want in the strict boolean sense of the input. + * + * To aid with this, NEST rewrites the previous query to + *.... + *bool + *|___must + * |___term1 + * |___bool + * |___should + * |___term2 + * |___term3 + * |___term4 + *.... */ - - [U] public void JoinsWithShouldClauses() => + [U] public void JoinsWithShouldClauses() + { Assert( q => q.Query() && (q.Query() || q.Query() || q.Query()), Query && (Query || Query || Query), - c=> + c => { c.Bool.Must.Should().HaveCount(2); var lastClause = c.Bool.Must.Last() as IQueryContainer; @@ -219,63 +245,81 @@ [U] public void JoinsWithShouldClauses() => lastClause.Bool.Should().NotBeNull(); lastClause.Bool.Should.Should().HaveCount(3); }); + } - /* Note also that you can parenthesis to force evaluation order */ - - /* Also note that using shoulds as boosting factors can be really powerful so if you need this always remember that you can mix and match an actual bool query with the bool dsl */ - - /* There is another subtle situation where NEST will not blindly merge 2 bool queries with only should clauses. Image the following: - + /** TIP: You can add parentheses to force evaluation order + * + * Also note that using shoulds as boosting factors can be really powerful so if you need this + *always remember that you can mix and match an actual bool query with the bool dsl. + * + * There is another subtle situation where NEST will not blindly merge 2 bool queries with only should clauses. Imagine the following: + * * `bool(should=term1, term2, term3, term4, minimum_should_match=2) || term5 || term6` - - * if NEST identified both sides of the OR operation as only containing should clauses and it would join them together it would give a different meaning to the `minimum_should_match` parameter of the first boolean query. - * Rewriting this to a single bool with 5 should clauses would break because only matching on term5 or term6 should still be a hit. - */ - - [U] public void MixAndMatchMinimumShouldMatch() => + * + * if NEST identified both sides of the OR operation as only containing `should` clauses and it would + * join them together it would give a different meaning to the `minimum_should_match` parameter of the first boolean query. + * Rewriting this to a single bool with 5 `should` clauses would break because only matching on `term5` or `term6` should still be a hit. + **/ + [U] + public void MixAndMatchMinimumShouldMatch() + { Assert( - q => q.Bool(b=>b - .Should(mq=>mq.Query(),mq=>mq.Query(), mq=>mq.Query(), mq=>mq.Query()) - .MinimumShouldMatch(2) - ) - || !q.Query() || q.Query(), + q => q.Bool(b => b + .Should(mq => mq.Query(), mq => mq.Query(), mq => mq.Query(), mq => mq.Query()) + .MinimumShouldMatch(2) + ) + || !q.Query() || q.Query(), new BoolQuery { Should = new QueryContainer[] { Query, Query, Query, Query }, MinimumShouldMatch = 2 } || !Query || Query, - c=> + c => { c.Bool.Should.Should().HaveCount(3); var nestedBool = c.Bool.Should.First() as IQueryContainer; nestedBool.Bool.Should.Should().HaveCount(4); }); + } - /* Nest will also not combine if any metadata is set on the bool e.g boost/name nest will treat these as locked */ - - [U] public void DoNotCombineLockedBools() => + /** === Locked bool queries + * + * NEST will not combine `bool` queries if any of the query metadata is set e.g if metadata such as `boost` or `name` are set, + * NEST will treat these as locked + * + * Here we demonstrate that two locked `bool` queries are not combined + */ + [U] public void DoNotCombineLockedBools() + { Assert( - q => q.Bool(b=>b.Name("leftBool").Should(mq=>mq.Query())) - || q.Bool(b=>b.Name("rightBool").Should(mq=>mq.Query())), + q => q.Bool(b => b.Name("leftBool").Should(mq => mq.Query())) + || q.Bool(b => b.Name("rightBool").Should(mq => mq.Query())), new BoolQuery { Name = "leftBool", Should = new QueryContainer[] { Query } } - || new BoolQuery { Name = "rightBool", Should = new QueryContainer[] { Query } }, - c=>AssertDoesNotJoinOntoLockedBool(c, "leftBool")); + || new BoolQuery { Name = "rightBool", Should = new QueryContainer[] { Query } }, + c => AssertDoesNotJoinOntoLockedBool(c, "leftBool")); + } - [U] public void DoNotCombineRightLockedBool() => + /** neither are two `bool` queries where either right query is locked */ + [U] public void DoNotCombineRightLockedBool() + { Assert( - q => q.Bool(b=>b.Should(mq=>mq.Query())) - || q.Bool(b=>b.Name("rightBool").Should(mq=>mq.Query())), + q => q.Bool(b => b.Should(mq => mq.Query())) + || q.Bool(b => b.Name("rightBool").Should(mq => mq.Query())), new BoolQuery { Should = new QueryContainer[] { Query } } - || new BoolQuery { Name = "rightBool", Should = new QueryContainer[] { Query } }, - c=>AssertDoesNotJoinOntoLockedBool(c, "rightBool")); + || new BoolQuery { Name = "rightBool", Should = new QueryContainer[] { Query } }, + c => AssertDoesNotJoinOntoLockedBool(c, "rightBool")); + } - [U] public void DoNotCombineLeftLockedBool() => + /** or the left query is locked */ + [U] public void DoNotCombineLeftLockedBool() + { Assert( - q => q.Bool(b=>b.Name("leftBool").Should(mq=>mq.Query())) - || q.Bool(b=>b.Should(mq=>mq.Query())), + q => q.Bool(b => b.Name("leftBool").Should(mq => mq.Query())) + || q.Bool(b => b.Should(mq => mq.Query())), new BoolQuery { Name = "leftBool", Should = new QueryContainer[] { Query } } - || new BoolQuery { Should = new QueryContainer[] { Query } }, - c=>AssertDoesNotJoinOntoLockedBool(c, "leftBool")); + || new BoolQuery { Should = new QueryContainer[] { Query } }, + c => AssertDoesNotJoinOntoLockedBool(c, "leftBool")); + } private static void AssertDoesNotJoinOntoLockedBool(IQueryContainer c, string firstName) { diff --git a/src/Tests/QueryDsl/BoolDsl/hadouken-indentation.jpg b/src/Tests/QueryDsl/BoolDsl/hadouken-indentation.jpg new file mode 100644 index 00000000000..afe03b960d1 Binary files /dev/null and b/src/Tests/QueryDsl/BoolDsl/hadouken-indentation.jpg differ diff --git a/src/Tests/QueryDsl/Geo/Distance/DistanceUnits.doc.cs b/src/Tests/QueryDsl/Geo/Distance/DistanceUnits.doc.cs deleted file mode 100644 index a4546a9c3a7..00000000000 --- a/src/Tests/QueryDsl/Geo/Distance/DistanceUnits.doc.cs +++ /dev/null @@ -1,103 +0,0 @@ -using Nest; -using Tests.Framework; -using static Tests.Framework.RoundTripper; - -namespace Tests.QueryDsl.Geo.Distance -{ - public class DistanceUnits - { - /** # Distance Units - * Whenever distances need to be specified, e.g. for a geo distance query, the distance unit can be specified - * as a double number representing distance in meters, as a new instance of a `Distance`, or as a string - * of the form number and distance unit e.g. `"2.72km"` - * - * ## Using Distance units in NEST - * NEST uses `Distance` to strongly type distance units and there are several ways to construct one. - * - * ### Constructor - * The most straight forward way to construct a `Distance` is through its constructor - */ - [U] - public void Constructor() - { - var unitComposed = new Nest.Distance(25); - var unitComposedWithUnits = new Nest.Distance(25, DistanceUnit.Meters); - - /** - * When serializing Distance constructed from a string, composition of distance value and unit - */ - Expect("25.0m") - .WhenSerializing(unitComposed) - .WhenSerializing(unitComposedWithUnits); - } - - /** - * ### Implicit conversion - * Alternatively a distance unit `string` can be assigned to a `Distance`, resulting in an implicit conversion to a new `Distance` instance. - * If no `DistanceUnit` is specified, the default distance unit is meters - */ - [U] - public void ImplicitConversion() - { - Nest.Distance distanceString = "25"; - Nest.Distance distanceStringWithUnits = "25m"; - - Expect(new Nest.Distance(25)) - .WhenSerializing(distanceString) - .WhenSerializing(distanceStringWithUnits); - } - - /** - * ### Supported units - * A number of distance units are supported, from millimeters to nautical miles - */ - [U] - public void UsingDifferentUnits() - { - /** - * Miles - */ - Expect("0.62mi").WhenSerializing(new Nest.Distance(0.62, DistanceUnit.Miles)); - - /** - * Yards - */ - Expect("9.0yd").WhenSerializing(new Nest.Distance(9, DistanceUnit.Yards)); - - /** - * Feet - */ - Expect("3.33ft").WhenSerializing(new Nest.Distance(3.33, DistanceUnit.Feet)); - - /** - * Inches - */ - Expect("43.23in").WhenSerializing(new Nest.Distance(43.23, DistanceUnit.Inch)); - - /** - * Kilometers - */ - Expect("0.1km").WhenSerializing(new Nest.Distance(0.1, DistanceUnit.Kilometers)); - - /** - * Meters - */ - Expect("400.0m").WhenSerializing(new Nest.Distance(400, DistanceUnit.Meters)); - - /** - * Centimeters - */ - Expect("123.456cm").WhenSerializing(new Nest.Distance(123.456, DistanceUnit.Centimeters)); - - /** - * Millimeters - */ - Expect("2.0mm").WhenSerializing(new Nest.Distance(2, DistanceUnit.Millimeters)); - - /** - * Nautical Miles - */ - Expect("45.5nmi").WhenSerializing(new Nest.Distance(45.5, DistanceUnit.NauticalMiles)); - } - } -} \ No newline at end of file diff --git a/src/Tests/QueryDsl/Span/Not/SpanNotQueryUsageTests.cs b/src/Tests/QueryDsl/Span/Not/SpanNotQueryUsageTests.cs index 17b41a1fc2e..ddb45def3df 100644 --- a/src/Tests/QueryDsl/Span/Not/SpanNotQueryUsageTests.cs +++ b/src/Tests/QueryDsl/Span/Not/SpanNotQueryUsageTests.cs @@ -36,8 +36,20 @@ public SpanNotUsageTests(ReadOnlyCluster i, EndpointUsage usage) : base(i, usage Dist = 12, Post = 13, Pre = 14, - Include = new SpanQuery { SpanTerm = new SpanTermQuery { Field = "field1", Value = "hoya" } }, - Exclude = new SpanQuery { SpanTerm = new SpanTermQuery { Field = "field1", Value = "hoya2" } }, + Include = new SpanQuery + { + SpanTerm = new SpanTermQuery + { + Field = "field1", Value = "hoya" + } + }, + Exclude = new SpanQuery + { + SpanTerm = new SpanTermQuery + { + Field = "field1", Value = "hoya2" + } + }, }; protected override QueryContainer QueryFluent(QueryContainerDescriptor q) => q diff --git a/src/Tests/QueryDsl/Specialized/Template/TemplateQueryUsageTests.cs b/src/Tests/QueryDsl/Specialized/Template/TemplateQueryUsageTests.cs index 014dc1a0ced..c6288903a2c 100644 --- a/src/Tests/QueryDsl/Specialized/Template/TemplateQueryUsageTests.cs +++ b/src/Tests/QueryDsl/Specialized/Template/TemplateQueryUsageTests.cs @@ -23,8 +23,6 @@ public TemplateUsageTests(ReadOnlyCluster i, EndpointUsage usage) : base(i, usag query_string = "all about search" } } - - }; protected override QueryContainer QueryInitializer => new TemplateQuery diff --git a/src/Tests/QueryDsl/TermLevel/Terms/TermsQueryUsageTests.cs b/src/Tests/QueryDsl/TermLevel/Terms/TermsQueryUsageTests.cs index 1984fba674b..32ed0aad48f 100644 --- a/src/Tests/QueryDsl/TermLevel/Terms/TermsQueryUsageTests.cs +++ b/src/Tests/QueryDsl/TermLevel/Terms/TermsQueryUsageTests.cs @@ -6,6 +6,11 @@ namespace Tests.QueryDsl.TermLevel.Terms { + /** + * Filters documents that have fields that match any of the provided terms (not analyzed). + * + * Be sure to read the Elasticsearch documentation on {ref_current}/query-dsl-terms-query.html[Terms query] for more information. + */ public class TermsQueryUsageTests : QueryDslUsageTestsBase { protected virtual string[] ExpectedTerms => new [] { "term1", "term2" }; @@ -53,6 +58,9 @@ protected override QueryContainer QueryFluent(QueryContainerDescriptor }; } + /**[float] + *== Single term Terms Query + */ public class SingleTermTermsQueryUsageTests : TermsQueryUsageTests { protected override string[] ExpectedTerms => new [] { "term1" }; diff --git a/src/Tests/Search/Request/InnerHitsUsageTests.cs b/src/Tests/Search/Request/InnerHitsUsageTests.cs index b160ee41022..6f65982ad4a 100644 --- a/src/Tests/Search/Request/InnerHitsUsageTests.cs +++ b/src/Tests/Search/Request/InnerHitsUsageTests.cs @@ -146,7 +146,7 @@ public GlobalInnerHitsApiTests(OwnIndexCluster cluster, EndpointUsage usage) : b private static IndexName IndexName { get; } = RandomString(); protected override IndexName Index => GlobalInnerHitsApiTests.IndexName; - protected override object ExpectJson { get; } = new + protected override object ExpectJson => new { inner_hits = new { @@ -238,7 +238,7 @@ public QueryInnerHitsApiTests(OwnIndexCluster cluster, EndpointUsage usage) : ba private static IndexName IndexName { get; } = RandomString(); protected override IndexName Index => QueryInnerHitsApiTests.IndexName; - protected override object ExpectJson { get; } = new + protected override object ExpectJson => new { query = new { diff --git a/src/Tests/Search/Suggesters/SuggestApiTest.cs b/src/Tests/Search/Suggesters/SuggestApiTests.doc.cs similarity index 94% rename from src/Tests/Search/Suggesters/SuggestApiTest.cs rename to src/Tests/Search/Suggesters/SuggestApiTests.doc.cs index ddf6724fbf9..2f04dd38193 100644 --- a/src/Tests/Search/Suggesters/SuggestApiTest.cs +++ b/src/Tests/Search/Suggesters/SuggestApiTests.doc.cs @@ -12,11 +12,14 @@ namespace Tests.Search.Suggesters { + /** == Suggest API + + */ [Collection(IntegrationContext.ReadOnly)] - public class SuggestApiTest + public class SuggestApiTests : ApiIntegrationTestBase, SuggestRequest> { - public SuggestApiTest(ReadOnlyCluster cluster, EndpointUsage usage) : base(cluster, usage) { } + public SuggestApiTests(ReadOnlyCluster cluster, EndpointUsage usage) : base(cluster, usage) { } protected override LazyResponses ClientUsage() => Calls( fluent: (c, f) => c.Suggest(f), @@ -31,22 +34,6 @@ protected override LazyResponses ClientUsage() => Calls( protected override string UrlPath => "/_suggest"; protected override bool SupportsDeserialization => false; - protected override void ExpectResponse(ISuggestResponse response) - { - var myCompletionSuggest = response.Suggestions["my-completion-suggest"]; - myCompletionSuggest.Should().NotBeNull(); - var suggest = myCompletionSuggest.First(); - suggest.Text.Should().Be(Project.Instance.Name); - suggest.Length.Should().BeGreaterThan(0); - var option = suggest.Options.First(); - option.Text.Should().NotBeNullOrEmpty(); - option.Score.Should().BeGreaterThan(0); - var payload = option.Payload(); - payload.Should().NotBeNull(); - payload.Name.Should().Be(Project.Instance.Name); - payload.State.Should().NotBeNull(); - } - protected override object ExpectJson => new Dictionary { { "my-completion-suggest", new { @@ -108,6 +95,7 @@ protected override void ExpectResponse(ISuggestResponse response) } }; + /** === Fluent DSL Example */ protected override Func, ISuggestRequest> Fluent => s => s .Term("my-term-suggest", t => t .MaxEdits(1) @@ -158,6 +146,7 @@ protected override void ExpectResponse(ISuggestResponse response) .RealWordErrorLikelihood(0.5) ); + /** === Object Initializer Syntax Example */ protected override SuggestRequest Initializer => new SuggestRequest { @@ -229,5 +218,28 @@ protected override void ExpectResponse(ISuggestResponse response) } }, } }; + + protected override void ExpectResponse(ISuggestResponse response) + { + /** === Handling Responses + * Get the suggestions for a suggester by indexing into + * the `.Suggestions` on the response + */ + var myCompletionSuggest = response.Suggestions["my-completion-suggest"]; + myCompletionSuggest.Should().NotBeNull(); + + var suggest = myCompletionSuggest.First(); + suggest.Text.Should().Be(Project.Instance.Name); + suggest.Length.Should().BeGreaterThan(0); + + var option = suggest.Options.First(); + option.Text.Should().NotBeNullOrEmpty(); + option.Score.Should().BeGreaterThan(0); + + var payload = option.Payload(); + payload.Should().NotBeNull(); + payload.Name.Should().Be(Project.Instance.Name); + payload.State.Should().NotBeNull(); + } } } diff --git a/src/Tests/aggregations-usage.asciidoc b/src/Tests/aggregations-usage.asciidoc new file mode 100644 index 00000000000..3a605010ed7 --- /dev/null +++ b/src/Tests/aggregations-usage.asciidoc @@ -0,0 +1,6 @@ +:includes-from-dirs: aggregations/bucket,aggregations/metric,aggregations/pipeline + + + + + diff --git a/src/Tests/aggregations.asciidoc b/src/Tests/aggregations.asciidoc new file mode 100644 index 00000000000..fd5ea121571 --- /dev/null +++ b/src/Tests/aggregations.asciidoc @@ -0,0 +1,14 @@ +:output-dir: aggregations + +[[aggregations]] += Aggregations + +[partintro] +-- +Aggregations are arguably one of the most powerful features of Elasticsearch and NEST +exposes all of the available Aggregation types +-- + +include::{output-dir}/writing-aggregations.asciidoc[] + +include::aggregations-usage.asciidoc[] \ No newline at end of file diff --git a/src/Tests/client-concepts.asciidoc b/src/Tests/client-concepts.asciidoc new file mode 100644 index 00000000000..1d9bbe92f75 --- /dev/null +++ b/src/Tests/client-concepts.asciidoc @@ -0,0 +1,8 @@ +include::low-level.asciidoc[] + +include::high-level.asciidoc[] + + + + + diff --git a/src/Tests/common-options.asciidoc b/src/Tests/common-options.asciidoc new file mode 100644 index 00000000000..0da6edff8b3 --- /dev/null +++ b/src/Tests/common-options.asciidoc @@ -0,0 +1,24 @@ +:output-dir: common-options + +[[common-options]] += Common Options + +[partintro] +-- +NEST has a number of types for working with Elasticsearch conventions for: + +- <> +- <> +- <> + +-- + +include::{output-dir}/time-unit/time-units.asciidoc[] + +include::{output-dir}/distance-unit/distance-units.asciidoc[] + +include::{output-dir}/date-math/date-math-expressions.asciidoc[] + + + + diff --git a/src/Tests/connection-pooling.asciidoc b/src/Tests/connection-pooling.asciidoc new file mode 100644 index 00000000000..eadbe1d9f85 --- /dev/null +++ b/src/Tests/connection-pooling.asciidoc @@ -0,0 +1,61 @@ +:output-dir: client-concepts/connection-pooling +:building-blocks: {output-dir}/building-blocks +:sniffing: {output-dir}/sniffing +:pinging: {output-dir}/pinging +:round-robin: {output-dir}/round-robin +:failover: {output-dir}/failover +:max-retries: {output-dir}/max-retries +:request-overrides: {output-dir}/request-overrides +:exceptions: {output-dir}/exceptions + +include::{building-blocks}/connection-pooling.asciidoc[] + +include::{building-blocks}/request-pipelines.asciidoc[] + +include::{building-blocks}/transports.asciidoc[] + +include::{building-blocks}/keeping-track-of-nodes.asciidoc[] + +include::{building-blocks}/date-time-providers.asciidoc[] + +include::{sniffing}/on-startup.asciidoc[] + +include::{sniffing}/on-connection-failure.asciidoc[] + +include::{sniffing}/on-stale-cluster-state.asciidoc[] + +include::{sniffing}/role-detection.asciidoc[] + +include::{pinging}/first-usage.asciidoc[] + +include::{pinging}/revival.asciidoc[] + +include::{round-robin}/round-robin.asciidoc[] + +include::{round-robin}/skip-dead-nodes.asciidoc[] + +include::{round-robin}/volatile-updates.asciidoc[] + +include::{failover}/falling-over.asciidoc[] + +include::{max-retries}/respects-max-retry.asciidoc[] + +include::{request-overrides}/disable-sniff-ping-per-request.asciidoc[] + +include::{request-overrides}/request-timeouts-overrides.asciidoc[] + +include::{request-overrides}/respects-max-retry-overrides.asciidoc[] + +include::{request-overrides}/respects-allowed-status-code.asciidoc[] + +include::{request-overrides}/respects-force-node.asciidoc[] + +include::{exceptions}/unexpected-exceptions.asciidoc[] + +include::{exceptions}/unrecoverable-exceptions.asciidoc[] + + + + + + diff --git a/src/Tests/high-level.asciidoc b/src/Tests/high-level.asciidoc new file mode 100644 index 00000000000..ce41681823f --- /dev/null +++ b/src/Tests/high-level.asciidoc @@ -0,0 +1,60 @@ +:output-dir: client-concepts/high-level + +[[nest]] += Client Concepts - NEST + +[partintro] +-- +The high level client, `ElasticClient`, provides a strongly typed query DSL that maps one-to-one with the Elasticsearch query DSL. + +It can be installed from the Package Manager Console inside Visual Studio using + +[source, shell] +---- +Install-Package NEST +---- + +Or by searching for https://www.nuget.org/packages/NEST[NEST] in the Package Manager GUI. + +NEST internally uses and still exposes the low level client, `ElasticLowLevelClient`, from <> via +the `.LowLevel` property on `ElasticClient`. + +There are a number of conventions that NEST uses for inference of + +- <> +- <> +- <> and <> +- <> +- <> +- <> + +In addition to features such as + +- <> +- <> + +-- + +include::{output-dir}/inference/index-name-inference.asciidoc[] + +include::{output-dir}/inference/indices-paths.asciidoc[] + +include::{output-dir}/inference/field-inference.asciidoc[] + +include::{output-dir}/inference/property-inference.asciidoc[] + +include::{output-dir}/inference/ids-inference.asciidoc[] + +include::{output-dir}/inference/document-paths.asciidoc[] + +include::{output-dir}/inference/features-inference.asciidoc[] + +include::{output-dir}/mapping/auto-map.asciidoc[] + +include::{output-dir}/covariant-hits/covariant-search-results.asciidoc[] + + + + + + diff --git a/src/Tests/index.asciidoc b/src/Tests/index.asciidoc index 873adf55c98..b90d7bcdd69 100644 --- a/src/Tests/index.asciidoc +++ b/src/Tests/index.asciidoc @@ -1,53 +1,14 @@ -# Introduction +[[elasticsearch-net-reference]] += Elasticsearch.Net and NEST: the .NET clients -You've reached the documentation page for `Elasticsearch.Net` and `NEST`. The two official .NET clients for Elasticsearch. So why two clients I hear you say? +include::intro.asciidoc[] -`Elasticsearch.Net` is a very low level, dependency free, client that has no opinions about how you build and represent your requests and responses. It has abstracted -enough so that **all** the Elasticsearch API endpoints are represented as methods but not too much to get in the way of how you want to build your json/request/response objects. It also comes with builtin, configurable/overridable, cluster failover retry mechanisms. Elasticsearch is elastic so why not your client? +include::client-concepts.asciidoc[] -`NEST` is a high level client that has the advantage of having mapped all the request and response objects, comes with a strongly typed query DSL that maps 1 to 1 with the Elasticsearch query DSL, and takes advantage of specific .NET features such as covariant results. NEST internally uses, and still exposes, the low level `Elasticsearch.Net` client. +include::common-options.asciidoc[] -Please read the getting started guide for both. +include::query-dsl.asciidoc[] - -## Who's using Nest -* [stackoverflow.com](http://www.stackoverflow.com) (and the rest of the stackexchange family). -* [7digital.com](http://www.7digital.com) (run NEST on mono). -* [rijksmuseum.nl](https://www.rijksmuseum.nl/en) (Elasticsearch is the only datastorage hit for each page). -* [Kiln](http://www.fogcreek.com/kiln/) FogCreek's version control & code review tooling. - They are so pleased with Elasticsearch that [they made a video about how pleased they are!](http://blog.fogcreek.com/kiln-powered-by-elasticsearch/) - -## Other resources - -[@joelabrahamsson](http://twitter.com/joelabrahamsson) wrote a great [intro into elasticsearch on .NET](http://joelabrahamsson.com/entry/extending-aspnet-mvc-music-store-with-elasticsearch) -using NEST. - -Also checkout the [searchbox.io guys](https://searchbox.io/) rocking NEST [on AppHarbor](http://blog.appharbor.com/2012/06/19/searchbox-elasticsearch-is-now-an-add-on) -with their [demo project](https://github.com/searchbox-io/.net-sample) - -## Questions, bugs, comments, requests - -All of these are more then welcome on the github issues pages! We try to to at least reply within the same day. - -We also monitor question tagged with ['nest' on stackoverflow](http://stackoverflow.com/questions/tagged/nest) or -['elasticsearch-net' on stackoverflow](http://stackoverflow.com/questions/tagged/elasticsearch-net) - -# License - -This software is licensed under the Apache 2 license, quoted below. - - Copyright (c) 2014 Elasticsearch - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. +include::aggregations.asciidoc[] diff --git a/src/Tests/intro.asciidoc b/src/Tests/intro.asciidoc new file mode 100644 index 00000000000..0b41b0b3bd4 --- /dev/null +++ b/src/Tests/intro.asciidoc @@ -0,0 +1,58 @@ +:github: https://github.com/elastic/elasticsearch-net +:stackoverflow: http://stackoverflow.com + +[[introduction]] +== Introduction + +You've reached the documentation page for `Elasticsearch.Net` and `NEST`. The two official .NET clients for Elasticsearch. So why two clients I hear you say? + +`Elasticsearch.Net` is a very low level, dependency free, client that has no opinions about how you build and represent your requests and responses. It has abstracted +enough so that **all** the Elasticsearch API endpoints are represented as methods but not too much to get in the way of how you want to build your json/request/response objects. It also comes with builtin, configurable/overridable, cluster failover retry mechanisms. Elasticsearch is elastic so why not your client? + +`NEST` is a high level client that has the advantage of having mapped all the request and response objects, comes with a strongly typed query DSL that maps 1 to 1 with the Elasticsearch query DSL, and takes advantage of specific .NET features such as covariant results. NEST internally uses, and still exposes, the low level `Elasticsearch.Net` client. + +Please read the getting started guide for both. + +=== Who's using Nest +- {stackoverflow}[stackoverflow.com] (and the rest of the stackexchange family). +- http://www.7digital.com[7digital.com] (run NEST on mono). +- https://www.rijksmuseum.nl/en[rijksmuseum.nl] (Elasticsearch is the only datastorage hit for each page). +- http://www.fogcreek.com/kiln/[Kiln] FogCreek's version control & code review tooling. + They are so pleased with Elasticsearch that http://blog.fogcreek.com/kiln-powered-by-elasticsearch/[they made a video about how pleased they are!] + +=== Other resources + +http://twitter.com/joelabrahamsson[@joelabrahamsson] wrote a great http://joelabrahamsson.com/entry/extending-aspnet-mvc-music-store-with-elasticsearch[intro into elasticsearch on .NET] +using NEST. + +Also checkout the https://searchbox.io/[searchbox.io guys] rocking NEST http://blog.appharbor.com/2012/06/19/searchbox-elasticsearch-is-now-an-add-on[on AppHarbor] +with their https://github.com/searchbox-io/.net-sample[demo project] + +=== Questions, bugs, comments, requests + +All of these are more then welcome on the {github}/issues[github issues pages]! We try to at least reply within the same day. + +We also monitor question tagged with {stackoverflow}/questions/tagged/nest['nest' on stackoverflow] or +{stackoverflow}/questions/tagged/elasticsearch-net['elasticsearch-net' on stackoverflow], as well as https://discuss.elastic.co[discussions on our discourse site] + +=== License + +.... +This software is licensed under the Apache 2 license, quoted below. + + Copyright (c) 2014 Elasticsearch + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +.... + + diff --git a/src/Tests/low-level.asciidoc b/src/Tests/low-level.asciidoc new file mode 100644 index 00000000000..e4a56b277e5 --- /dev/null +++ b/src/Tests/low-level.asciidoc @@ -0,0 +1,28 @@ +:output-dir: client-concepts/low-level + +[[elasticsearch-net]] += Client Concepts - Elasticsearch.Net + +[partintro] +-- +The low level client, `ElasticLowLevelClient`, is a low level, dependency free client that has no +opinions about how you build and represent your requests and responses. + +It can be installed from the Package Manager Console inside Visual Studio using + +[source, shell] +---- +Install-Package Elasticsearch.Net +---- + +Or by searching for https://www.nuget.org/packages/Elasticsearch.Net[Elasticsearch.Net] in the Package Manager GUI. +-- + +include::{output-dir}/connecting.asciidoc[] + +include::{output-dir}/lifetimes.asciidoc[] + +include::{output-dir}/post-data.asciidoc[] + +include::connection-pooling.asciidoc[] + diff --git a/src/Tests/query-dsl-usage.asciidoc b/src/Tests/query-dsl-usage.asciidoc new file mode 100644 index 00000000000..ae871f36c9f --- /dev/null +++ b/src/Tests/query-dsl-usage.asciidoc @@ -0,0 +1,6 @@ +:includes-from-dirs: query-dsl/compound,query-dsl/full-text,query-dsl/geo,query-dsl/joining,query-dsl/nest-specific,query-dsl/span,query-dsl/specialized,query-dsl/term-level + + + + + diff --git a/src/Tests/query-dsl.asciidoc b/src/Tests/query-dsl.asciidoc new file mode 100644 index 00000000000..f79459d44c0 --- /dev/null +++ b/src/Tests/query-dsl.asciidoc @@ -0,0 +1,17 @@ +:output-dir: query-dsl + +[[query-dsl]] += Query DSL + +[partintro] +-- +NEST exposes all of the query DSL endpoints available in Elasticsearch +-- + +include::{output-dir}/bool-dsl/bool-dsl.asciidoc[] + +include::query-dsl-usage.asciidoc[] + + + + diff --git a/src/Tests/tests.yaml b/src/Tests/tests.yaml index 0ea63a0af78..74cf2393ae3 100644 --- a/src/Tests/tests.yaml +++ b/src/Tests/tests.yaml @@ -1,5 +1,5 @@ # mode either u (unit test), i (integration test) or m (mixed mode) -mode: u +mode: m # the elasticsearch version that should be started elasticsearch_version: 2.2.0 # whether we want to forcefully reseed on the node, if you are starting the tests with a node already running diff --git a/src/global.json b/src/global.json index 7a69a1ac22c..52a93658e3b 100644 --- a/src/global.json +++ b/src/global.json @@ -1,8 +1,11 @@ { - "projects": [ "src" ], - "sdk": { - "version": "1.0.0-rc1-update1", - "runtime": "clr", - "architecture": "x86" - } + "projects": [ + "src", + "wrap" + ], + "sdk": { + "version": "1.0.0-rc1-update1", + "runtime": "clr", + "architecture": "x86" + } } \ No newline at end of file diff --git a/src/lib/dnx451/AsciiDoc.dll b/src/lib/dnx451/AsciiDoc.dll new file mode 100644 index 00000000000..7e19b818484 Binary files /dev/null and b/src/lib/dnx451/AsciiDoc.dll differ diff --git a/src/lib/dnx451/AsciiDoc.pdb b/src/lib/dnx451/AsciiDoc.pdb new file mode 100644 index 00000000000..075c1d0b2d7 Binary files /dev/null and b/src/lib/dnx451/AsciiDoc.pdb differ diff --git a/src/wrap/AsciiDoc/project.json b/src/wrap/AsciiDoc/project.json new file mode 100644 index 00000000000..cdfff018832 --- /dev/null +++ b/src/wrap/AsciiDoc/project.json @@ -0,0 +1,10 @@ +{ + "version": "1.0.0-*", + "frameworks": { + "dnx451": { + "bin": { + "assembly": "../../lib/dnx451/AsciiDoc.dll" + } + } + } +} \ No newline at end of file