Skip to content

Commit a66ad0e

Browse files
committed
Merge branch 'master' into feature/ingest-api
# Conflicts: # src/Nest/Nest.csproj # src/Tests/Tests.csproj
2 parents a1833b5 + c4f3b38 commit a66ad0e

File tree

888 files changed

+2245
-26413
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

888 files changed

+2245
-26413
lines changed

build.bat

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ REM - elasticsearch_versions can be multiple separated with a semi-colon ';'
1212
if errorlevel 1 (
1313
exit /b %errorlevel%
1414
)
15-
.paket\paket.exe install
15+
.paket\paket.exe restore
1616
if errorlevel 1 (
1717
exit /b %errorlevel%
1818
)
@@ -23,7 +23,7 @@ SET ESVERSIONS=
2323
SET DNXVERSION="default"
2424
SET SKIPTESTS=0
2525
SET APIKEY=
26-
26+
SET FEED="elasticsearch-net"
2727

2828
IF /I "%1"=="skiptests" (
2929
set SKIPTESTS="1"
@@ -51,8 +51,7 @@ IF /I "%1%"=="integrate" (
5151

5252
IF /I "%1%"=="canary" (
5353
IF NOT [%2]==[] (set APIKEY="%2")
54-
IF /I "%3"=="skiptests" (set SKIPTESTS=1)
55-
IF /I "%2"=="skiptests" (set SKIPTESTS=1)
54+
IF NOT [%3]==[] (set FEED="%3")
5655
)
5756

58-
"packages\build\FAKE\tools\Fake.exe" "build\\scripts\\Targets.fsx" "target=%TARGET%" "version=%VERSION%" "esversions=%ESVERSIONS%" "skiptests=%SKIPTESTS%" "apiKey=%APIKEY%"
57+
"packages\build\FAKE\tools\Fake.exe" "build\\scripts\\Targets.fsx" "target=%TARGET%" "version=%VERSION%" "esversions=%ESVERSIONS%" "skiptests=%SKIPTESTS%" "apiKey=%APIKEY%" "feed=%FEED%"

build/scripts/Releasing.fsx

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -102,10 +102,11 @@ type Release() =
102102
MoveFile Paths.NugetOutput package
103103
)
104104

105-
static member PublishCanaryBuild accessKey =
105+
static member PublishCanaryBuild accessKey feed =
106106
!! "build/output/_packages/*-ci*.nupkg"
107107
|> Seq.iter(fun f ->
108-
let success = Tooling.execProcess (Tooling.NugetFile()) ["push"; f; accessKey; "-source"; "https://www.myget.org/F/elasticsearch-net/api/v2/package"]
108+
let source = "https://www.myget.org/F/" + feed + "/api/v2/package"
109+
let success = Tooling.execProcess (Tooling.NugetFile()) ["push"; f; accessKey; "-source"; source]
109110
match success with
110111
| 0 -> traceFAKE "publish to myget succeeded" |> ignore
111112
| _ -> failwith "publish to myget failed" |> ignore

build/scripts/Targets.fsx

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -37,13 +37,13 @@ Target "Clean" <| fun _ -> CleanDir Paths.BuildOutput
3737

3838
Target "BuildApp" <| fun _ -> Build.Compile()
3939

40-
Target "Test" <| fun _ -> Tests.RunUnitTests()
40+
Target "Test" <| fun _ -> Tests.RunUnitTests()
4141

42-
Target "QuickTest" <| fun _ -> Tests.RunUnitTests()
42+
Target "QuickTest" <| fun _ -> Tests.RunUnitTests()
4343

44-
Target "Integrate" <| fun _ -> Tests.RunIntegrationTests() (getBuildParamOrDefault "esversions" "")
44+
Target "Integrate" <| fun _ -> Tests.RunIntegrationTests() (getBuildParamOrDefault "esversions" "")
4545

46-
Target "WatchTests" <| fun _ ->
46+
Target "WatchTests" <| fun _ ->
4747
traceFAKE "Starting quick test (incremental compile then test)"
4848
use watcher = (!! "src/Tests/**/*.cs").And("src/Tests/**/*.md") |> WatchChanges (fun changes ->
4949
printfn "%A" changes
@@ -59,7 +59,7 @@ Target "Profile" <| fun _ -> Profiler.Run()
5959

6060
Target "Benchmark" <| fun _ -> Benchmarker.Run()
6161

62-
Target "QuickCompile" <| fun _ -> Build.QuickCompile()
62+
Target "QuickCompile" <| fun _ -> Build.QuickCompile()
6363

6464
Target "Version" <| fun _ ->
6565
Versioning.PatchAssemblyInfos()
@@ -74,7 +74,8 @@ Target "Release" <| fun _ ->
7474
Target "Canary" <| fun _ ->
7575
trace "Running canary build"
7676
let apiKey = (getBuildParam "apikey");
77-
if (not (String.IsNullOrWhiteSpace apiKey) || apiKey = "ignore") then Release.PublishCanaryBuild apiKey
77+
let feed = (getBuildParamOrDefault "feed" "elasticsearch-net");
78+
if (not (String.IsNullOrWhiteSpace apiKey) || apiKey = "ignore") then Release.PublishCanaryBuild apiKey feed
7879

7980
BuildFailureTarget "NotifyTestFailures" <| fun _ -> Tests.Notify() |> ignore
8081

docs/asciidoc/aggregations/bucket/filter/filter-aggregation-usage.asciidoc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77
[[filter-aggregation-usage]]
88
== Filter Aggregation Usage
99

10-
Defines a single bucket of all the documents in the current document set context that match a specified filter.
10+
Defines a single bucket of all the documents in the current document set context that match a specified filter.
1111
Often this will be used to narrow down the current aggregation context to a specific set of documents.
1212

1313
Be sure to read the Elasticsearch documentation on {ref_current}/search-aggregations-bucket-filter-aggregation.html[Filter Aggregation]

docs/asciidoc/aggregations/pipeline/moving-average/moving-average-ewma-aggregation-usage.asciidoc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ new SearchRequest<Project>()
4545
{
4646
Field = "startedOn",
4747
Interval = DateInterval.Month,
48-
Aggregations =
48+
Aggregations =
4949
new SumAggregation("commits", "numberOfCommits") &&
5050
new MovingAverageAggregation("commits_moving_avg", "commits")
5151
{

docs/asciidoc/aggregations/pipeline/moving-average/moving-average-holt-linear-aggregation-usage.asciidoc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ new SearchRequest<Project>()
4646
{
4747
Field = "startedOn",
4848
Interval = DateInterval.Month,
49-
Aggregations =
49+
Aggregations =
5050
new SumAggregation("commits", "numberOfCommits") &&
5151
new MovingAverageAggregation("commits_moving_avg", "commits")
5252
{

docs/asciidoc/aggregations/pipeline/moving-average/moving-average-holt-winters-aggregation-usage.asciidoc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@ new SearchRequest<Project>()
5151
{
5252
Field = "startedOn",
5353
Interval = DateInterval.Month,
54-
Aggregations =
54+
Aggregations =
5555
new SumAggregation("commits", "numberOfCommits") &&
5656
new MovingAverageAggregation("commits_moving_avg", "commits")
5757
{

docs/asciidoc/aggregations/pipeline/moving-average/moving-average-linear-aggregation-usage.asciidoc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@ new SearchRequest<Project>()
4444
{
4545
Field = "startedOn",
4646
Interval = DateInterval.Month,
47-
Aggregations =
47+
Aggregations =
4848
new SumAggregation("commits", "numberOfCommits") &&
4949
new MovingAverageAggregation("commits_moving_avg", "commits")
5050
{

docs/asciidoc/aggregations/pipeline/moving-average/moving-average-simple-aggregation-usage.asciidoc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ new SearchRequest<Project>()
4545
{
4646
Field = "startedOn",
4747
Interval = DateInterval.Month,
48-
Aggregations =
48+
Aggregations =
4949
new SumAggregation("commits", "numberOfCommits") &&
5050
new MovingAverageAggregation("commits_moving_avg", "commits")
5151
{

docs/asciidoc/client-concepts/connection-pooling/building-blocks/date-time-providers.asciidoc

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77
[[date-time-providers]]
88
== Date time providers
99

10-
Not typically something you'll have to pass to the client but all calls to `System.DateTime.UtcNow`
10+
Not typically something you'll have to pass to the client but all calls to `System.DateTime.UtcNow`
1111
in the client have been abstracted by `IDateTimeProvider`. This allows us to unit test timeouts and cluster failover
1212
without being bound to wall clock time as calculated by using `System.DateTime.UtcNow` directly.
1313

@@ -45,7 +45,7 @@ var maxTimeout = TimeSpan.FromMinutes(30);
4545
Plotting these defaults looks as followed:
4646

4747
[[timeout]]
48-
.Default formula, x-axis time in minutes, y-axis number of attempts to revive
48+
.Default formula, x-axis number of attempts to revive, y-axis time in minutes
4949
image::timeoutplot.png[dead timeout]
5050

5151
The goal here is that whenever a node is resurrected and is found to still be offline, we send it_back to the doghouse_ for an ever increasingly long period, until we hit a bounded maximum.

docs/asciidoc/client-concepts/connection-pooling/pinging/first-usage.asciidoc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ var audit = new Auditor(() => Framework.Cluster
6767
The first call goes to 9200 which succeeds
6868

6969
The 2nd call does a ping on 9201 because its used for the first time.
70-
It fails and so we ping 9202 which also fails. We then ping 9203 becuase
70+
It fails and so we ping 9202 which also fails. We then ping 9203 because
7171
we haven't used it before and it succeeds
7272

7373
Finally we assert that the connectionpool has two nodes that are marked as dead

docs/asciidoc/client-concepts/connection-pooling/round-robin/round-robin.asciidoc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ round robin over the `live` nodes to evenly distribute request load over all kno
1414

1515
`GetNext` is implemented in a lock free thread safe fashion, meaning each callee gets returned its own cursor to advance
1616
over the internal list of nodes. This to guarantee each request that needs to fall over tries all the nodes without
17-
suffering from noisy neighboors advancing a global cursor.
17+
suffering from noisy neighbours advancing a global cursor.
1818

1919
[source,csharp]
2020
----

docs/asciidoc/client-concepts/connection-pooling/round-robin/skip-dead-nodes.asciidoc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ When selecting nodes the connection pool will try and skip all the nodes that ar
1313

1414
GetNext is implemented in a lock free thread safe fashion, meaning each callee gets returned its own cursor to advance
1515
over the internal list of nodes. This to guarantee each request that needs to fall over tries all the nodes without
16-
suffering from noisy neighboors advancing a global cursor.
16+
suffering from noisy neighbours advancing a global cursor.
1717

1818
[source,csharp]
1919
----

docs/asciidoc/client-concepts/connection-pooling/sniffing/on-connection-failure.asciidoc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ Here we seed our connection with 5 known nodes 9200-9204 of which we think
1717
9202, 9203, 9204 are master eligible nodes. Our virtualized cluster will throw once when doing
1818
a search on 9201. This should a sniff to be kicked off.
1919

20-
When the call fails on 9201 the sniff succeeds and returns a new cluster of healty nodes
20+
When the call fails on 9201 the sniff succeeds and returns a new cluster of healthy nodes
2121
this cluster only has 3 nodes and the known masters are 9200 and 9202 but a search on 9201
2222
still fails once
2323

docs/asciidoc/client-concepts/connection-pooling/sniffing/on-stale-cluster-state.asciidoc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
== Sniffing periodically
99

1010
Connection pools that return true for `SupportsReseeding` can be configured to sniff periodically.
11-
In addition to sniffing on startup and sniffing on failures, sniffing periodically can benefit scenerio's where
11+
In addition to sniffing on startup and sniffing on failures, sniffing periodically can benefit scenarios where
1212
clusters are often scaled horizontally during peak hours. An application might have a healthy view of a subset of the nodes
1313
but without sniffing periodically it will never find the nodes that have been added to help out with load
1414

docs/asciidoc/client-concepts/connection-pooling/sniffing/role-detection.asciidoc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -103,7 +103,7 @@ await pipeline.SniffAsync();
103103
[source,csharp]
104104
----
105105
this._settings =
106-
this._cluster.Client(u => new SniffingConnectionPool(new[] {u}), c => c.PrettyJson()).ConnectionSettings;
106+
this._cluster.Node.Client(u => new SniffingConnectionPool(new[] {u}), c => c.PrettyJson()).ConnectionSettings;
107107
108108
var pipeline = new RequestPipeline(this._settings, DateTimeProvider.Default, new MemoryStreamFactory(),
109109
new SearchRequestParameters());

docs/asciidoc/client-concepts/connection-pooling/sticky/skip-dead-nodes.asciidoc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ node = pool.CreateView().First();
3939
node.Uri.Port.Should().Be(9200);
4040
----
4141

42-
After we marke the first node alive again we expect it to be hit again
42+
After we mark the first node alive again we expect it to be hit again
4343

4444
[source,csharp]
4545
----

docs/asciidoc/client-concepts/high-level/inference/document-paths.asciidoc

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,8 +8,8 @@
88
== Document Paths
99

1010
Many API's in Elasticsearch describe a path to a document. In NEST, besides generating a constructor that takes
11-
and Index, Type and Id seperately, we also generate a constructor taking a `DocumentPath` that allows you to describe the path
12-
to your document more succintly
11+
and Index, Type and Id separately, we also generate a constructor taking a `DocumentPath` that allows you to describe the path
12+
to your document more succinctly
1313

1414
=== Creating new instances
1515

docs/asciidoc/client-concepts/high-level/inference/features-inference.asciidoc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ Expect("_mappings,_aliases")
2323
.WhenSerializing(fieldString);
2424
----
2525

26-
Here we new an GET index elasticsearch request whichs takes Indices and Features.
26+
Here we new an GET index elasticsearch request which takes Indices and Features.
2727
Notice how we can use the Feature enum directly.
2828

2929
[source,csharp]

docs/asciidoc/client-concepts/high-level/inference/field-inference.asciidoc

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
== Field Inference
99

1010
Several places in the Elasticsearch API expect the path to a field from your original source document as a string.
11-
NEST allows you to use C# expressions to strongly type these field path strings.
11+
NEST allows you to use C# expressions to strongly type these field path strings.
1212

1313
These expressions are assigned to a type called `Field` and there are several ways to create an instance of one
1414

@@ -311,7 +311,7 @@ and we want to append the suffix "raw" to each
311311

312312
[source,csharp]
313313
----
314-
var fieldExpressions =
314+
var fieldExpressions =
315315
expressions.Select<Expression<Func<Project, object>>, Field>(e => e.AppendSuffix("raw")).ToList();
316316
317317
Expect("name.raw").WhenSerializing(fieldExpressions[0]);
@@ -411,7 +411,7 @@ var fieldNameOnA = client.Infer.Field(Field<A>(p => p.C.Name));
411411
var fieldNameOnB = client.Infer.Field(Field<B>(p => p.C.Name));
412412
----
413413

414-
Here we have to similary shaped expressions on coming from A and on from B
414+
Here we have two similarly shaped expressions, one coming from A and one from B
415415
that will resolve to the same field name, as expected
416416

417417
[source,csharp]
@@ -444,7 +444,7 @@ fieldNameOnA.Should().Be("d.name");
444444
fieldNameOnB.Should().Be("c.name");
445445
----
446446

447-
however we didn't break inferrence on the first client instance using its separate connection settings
447+
however we didn't break inference on the first client instance using its separate connection settings
448448

449449
[source,csharp]
450450
----
@@ -466,7 +466,7 @@ To wrap up, the precedence in which field names are inferred is:
466466

467467
. A NEST property mapping
468468

469-
. Ask the serializer if the property has a verbatim value e.g it has an explicit JsonPropery attribute.
469+
. Ask the serializer if the property has a verbatim value e.g it has an explicit JsonProperty attribute.
470470

471471
. Pass the MemberInfo's Name to the DefaultFieldNameInferrer which by default camelCases
472472

docs/asciidoc/client-concepts/high-level/inference/index-name-inference.asciidoc

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,9 +12,9 @@ as part of the request in order to know what index/indices a request should oper
1212

1313
NEST has a number of ways in which an index name can be specified
1414

15-
=== Default Index name on ConnectionSettings
15+
=== Default Index name on Connection Settings
1616

17-
A default index name can be specified on `ConnectionSettings` usinf `.DefaultIndex()`.
17+
A default index name can be specified on `ConnectionSettings` using `.DefaultIndex()`.
1818
This is the default index name to use when no other index name can be resolved for a request
1919

2020
[source,csharp]

0 commit comments

Comments
 (0)