diff --git a/.ci/run-elasticsearch.ps1 b/.ci/run-elasticsearch.ps1 index 33eb774d..9f4ae675 100644 --- a/.ci/run-elasticsearch.ps1 +++ b/.ci/run-elasticsearch.ps1 @@ -217,7 +217,7 @@ $volumes = @( "--volume", "${volume_name}:/usr/share/elasticsearch/data" ) -if (-not ($version -contains "oss")) { +if (-not ($version -match "oss")) { $environment += @( "--env", "ELASTIC_PASSWORD=`"$ELASTIC_PASSWORD`"", "--env", "xpack.license.self_generated.type=trial", @@ -241,7 +241,7 @@ if (-not ($version -contains "oss")) { } $url="http://$NODE_NAME" -if (-not ($version -contains "oss")) { +if (-not ($version -match "oss")) { $url="https://elastic:$ELASTIC_PASSWORD@$NODE_NAME" } diff --git a/.gitignore b/.gitignore index 4bd629e5..07d24d90 100644 --- a/.gitignore +++ b/.gitignore @@ -5,4 +5,7 @@ Cargo.lock .idea .vscode/ *.log -yaml_test_runner/ +yaml_test_runner/yaml/ +yaml_test_runner/tests/oss +yaml_test_runner/tests/xpack +yaml_test_runner/tests/mod.rs diff --git a/Cargo.toml b/Cargo.toml index 56f48afc..1dccee9b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,5 +1,6 @@ [workspace] members = [ "api_generator", - "elasticsearch" + "elasticsearch", + "yaml_test_runner" ] \ No newline at end of file diff --git a/api_generator/last_downloaded_version b/api_generator/rest_specs/last_downloaded_version similarity index 100% rename from api_generator/last_downloaded_version rename to api_generator/rest_specs/last_downloaded_version diff --git a/api_generator/src/main.rs b/api_generator/src/bin/run.rs similarity index 74% rename from api_generator/src/main.rs rename to api_generator/src/bin/run.rs index 76cf9624..deecbd89 100644 --- a/api_generator/src/main.rs +++ b/api_generator/src/bin/run.rs @@ -16,36 +16,28 @@ * specific language governing permissions and limitations * under the License. */ +extern crate api_generator; extern crate dialoguer; -#[macro_use] -extern crate lazy_static; - -#[macro_use] -extern crate quote; - +use api_generator::{generator, rest_spec}; use dialoguer::Input; use std::path::PathBuf; use std::{ fs::{self, File}, io::Write, - path::Path, }; -mod api_generator; -mod error; -mod rest_spec; - -fn main() { +fn main() -> Result<(), failure::Error> { // This must be run from the src root directory, with cargo run -p api_generator - let download_dir = fs::canonicalize(PathBuf::from("./api_generator/rest_specs")).unwrap(); - let generated_dir = fs::canonicalize(PathBuf::from("./elasticsearch/src/generated")).unwrap(); - let last_downloaded_version = "./api_generator/last_downloaded_version"; + let download_dir = fs::canonicalize(PathBuf::from("./api_generator/rest_specs"))?; + let generated_dir = fs::canonicalize(PathBuf::from("./elasticsearch/src/generated"))?; + let last_downloaded_version = + PathBuf::from("./api_generator/rest_specs/last_downloaded_version"); let mut download_specs = false; let mut answer = String::new(); - let default_branch = if Path::new(last_downloaded_version).exists() { - fs::read_to_string(last_downloaded_version).expect("Could not read branch into string") + let default_branch = if last_downloaded_version.exists() { + fs::read_to_string(&last_downloaded_version)? } else { String::from("master") }; @@ -76,13 +68,9 @@ fn main() { .interact() .unwrap(); - fs::remove_dir_all(&download_dir).unwrap(); - rest_spec::download_specs(&branch, &download_dir); - - File::create(last_downloaded_version) - .expect("failed to create last_downloaded_version file") - .write_all(branch.as_bytes()) - .expect("unable to write branch to last_downloaded_version file"); + fs::remove_dir_all(&download_dir)?; + rest_spec::download_specs(&branch, &download_dir)?; + File::create(&last_downloaded_version)?.write_all(branch.as_bytes())?; } // only offer to generate if there are downloaded specs @@ -109,12 +97,13 @@ fn main() { if generate_code { // delete existing generated files if the exist if generated_dir.exists() { - fs::remove_dir_all(&generated_dir).unwrap(); + fs::remove_dir_all(&generated_dir)?; } - fs::create_dir_all(&generated_dir).unwrap(); - - api_generator::generate(&branch, &download_dir, &generated_dir).unwrap(); + fs::create_dir_all(&generated_dir)?; + generator::generate(&branch, &download_dir, &generated_dir)?; } } + + Ok(()) } diff --git a/api_generator/src/api_generator/code_gen/mod.rs b/api_generator/src/generator/code_gen/mod.rs similarity index 85% rename from api_generator/src/api_generator/code_gen/mod.rs rename to api_generator/src/generator/code_gen/mod.rs index 444228fa..c78f0a30 100644 --- a/api_generator/src/api_generator/code_gen/mod.rs +++ b/api_generator/src/generator/code_gen/mod.rs @@ -22,7 +22,7 @@ pub mod request; pub mod root; pub mod url; -use crate::api_generator::TypeKind; +use crate::generator::TypeKind; use inflector::Inflector; use quote::Tokens; use std::str; @@ -73,7 +73,7 @@ pub fn parse_expr(input: quote::Tokens) -> syn::Expr { } /// Ensures that the name generated is one that is valid for Rust -fn valid_name(s: &str) -> &str { +pub fn valid_name(s: &str) -> &str { match s { "type" => "ty", s => s, @@ -116,7 +116,7 @@ impl GetPath for syn::Ty { fn get_path(&self) -> &syn::Path { match *self { syn::Ty::Path(_, ref p) => &p, - _ => panic!("Only path types are supported."), + ref p => panic!(format!("Expected syn::Ty::Path, but found {:?}", p)), } } } @@ -138,7 +138,8 @@ impl GetIdent for T { } /// Gets the Ty syntax token for a TypeKind -fn typekind_to_ty(name: &str, kind: &TypeKind, required: bool) -> syn::Ty { +/// TODO: This function is serving too many purposes. Refactor it +fn typekind_to_ty(name: &str, kind: &TypeKind, required: bool, fn_arg: bool) -> syn::Ty { let mut v = String::new(); if !required { v.push_str("Option<"); @@ -146,13 +147,33 @@ fn typekind_to_ty(name: &str, kind: &TypeKind, required: bool) -> syn::Ty { let str_type = "&'b str"; match kind { - TypeKind::None => v.push_str(str_type), - TypeKind::List => v.push_str(format!("&'b [{}]", str_type).as_ref()), - TypeKind::Enum => v.push_str(name.to_pascal_case().as_str()), + TypeKind::Unknown(_) => v.push_str(str_type), + TypeKind::List => { + v.push_str("&'b ["); + v.push_str(str_type); + v.push_str("]"); + } + TypeKind::Enum => match name { + // opened https://github.com/elastic/elasticsearch/issues/53212 + // to discuss whether this really should be a collection + "expand_wildcards" => { + // Expand wildcards should + v.push_str("&'b ["); + v.push_str(name.to_pascal_case().as_str()); + v.push_str("]"); + } + _ => v.push_str(name.to_pascal_case().as_str()), + }, TypeKind::String => v.push_str(str_type), TypeKind::Text => v.push_str(str_type), TypeKind::Boolean => match name { - "track_total_hits" => v.push_str("TrackTotalHits"), + "track_total_hits" => { + if fn_arg { + v.push_str(format!("Into<{}>", name.to_pascal_case()).as_str()) + } else { + v.push_str(name.to_pascal_case().as_str()) + } + } _ => v.push_str("bool"), }, TypeKind::Number => v.push_str("i64"), diff --git a/api_generator/src/api_generator/code_gen/namespace_clients.rs b/api_generator/src/generator/code_gen/namespace_clients.rs similarity index 96% rename from api_generator/src/api_generator/code_gen/namespace_clients.rs rename to api_generator/src/generator/code_gen/namespace_clients.rs index a04f0bf4..25e66335 100644 --- a/api_generator/src/api_generator/code_gen/namespace_clients.rs +++ b/api_generator/src/generator/code_gen/namespace_clients.rs @@ -16,9 +16,9 @@ * specific language governing permissions and limitations * under the License. */ -use crate::api_generator::*; -use crate::api_generator::code_gen::request::request_builder::RequestBuilder; -use crate::api_generator::code_gen::*; +use crate::generator::code_gen::request::request_builder::RequestBuilder; +use crate::generator::code_gen::*; +use crate::generator::*; use inflector::Inflector; use quote::Tokens; use std::path::PathBuf; diff --git a/api_generator/src/api_generator/code_gen/params.rs b/api_generator/src/generator/code_gen/params.rs similarity index 98% rename from api_generator/src/api_generator/code_gen/params.rs rename to api_generator/src/generator/code_gen/params.rs index e3641d97..36b762d5 100644 --- a/api_generator/src/api_generator/code_gen/params.rs +++ b/api_generator/src/generator/code_gen/params.rs @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -use crate::api_generator::*; +use crate::generator::*; use inflector::Inflector; use quote::Tokens; use regex::Regex; diff --git a/api_generator/src/api_generator/code_gen/request/mod.rs b/api_generator/src/generator/code_gen/request/mod.rs similarity index 100% rename from api_generator/src/api_generator/code_gen/request/mod.rs rename to api_generator/src/generator/code_gen/request/mod.rs diff --git a/api_generator/src/api_generator/code_gen/request/request_builder.rs b/api_generator/src/generator/code_gen/request/request_builder.rs similarity index 93% rename from api_generator/src/api_generator/code_gen/request/request_builder.rs rename to api_generator/src/generator/code_gen/request/request_builder.rs index 673170bd..7afc1d84 100644 --- a/api_generator/src/api_generator/code_gen/request/request_builder.rs +++ b/api_generator/src/generator/code_gen/request/request_builder.rs @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -use crate::api_generator::{ +use crate::generator::{ code_gen, code_gen::url::enum_builder::EnumBuilder, code_gen::*, ApiEndpoint, HttpMethod, Type, TypeKind, }; @@ -25,7 +25,7 @@ use quote::{ToTokens, Tokens}; use reqwest::Url; use std::path::PathBuf; use std::{collections::BTreeMap, fs, str}; -use syn::{Field, FieldValue, ImplItem}; +use syn::{Field, FieldValue, ImplItem, TraitBoundModifier, TyParamBound}; /// Builder that generates the AST for a request builder struct pub struct RequestBuilder<'a> { @@ -126,7 +126,8 @@ impl<'a> RequestBuilder<'a> { let struct_fields = endpoint_params.iter().map(|(param_name, param_type)| { let field = Self::create_struct_field((param_name, param_type)); let field_rename = lit(param_name); - if param_type.ty == TypeKind::List { + // TODO: we special case expand_wildcards here to be a list, but this should be fixed upstream + if param_type.ty == TypeKind::List || param_name == "expand_wildcards" { let serialize_with = lit("crate::client::serialize_coll_qs"); quote! { #[serde(rename = #field_rename, serialize_with = #serialize_with)] @@ -352,10 +353,36 @@ impl<'a> RequestBuilder<'a> { /// Creates the AST for a builder fn for a builder impl fn create_impl_fn(f: (&String, &Type)) -> syn::ImplItem { let name = valid_name(&f.0).to_lowercase(); + let (ty, value_ident, fn_generics) = { + let ty = typekind_to_ty(&f.0, &f.1.ty, true, true); + match ty { + syn::Ty::Path(ref _q, ref p) => { + if p.get_ident().as_ref() == "Into" { + let ty = syn::parse_type("T").unwrap(); + let ident = code_gen::ident(format!("{}.into()", &name)); + let ty_param = syn::TyParam { + ident: code_gen::ident("T"), + default: None, + attrs: vec![], + bounds: vec![TyParamBound::Trait( + syn::PolyTraitRef { + trait_ref: p.clone(), + bound_lifetimes: vec![], + }, + TraitBoundModifier::None, + )], + }; + let generics = generics(vec![], vec![ty_param]); + (ty, ident, generics) + } else { + (ty, ident(&name), generics_none()) + } + } + _ => (ty, ident(&name), generics_none()), + } + }; let impl_ident = ident(&name); let field_ident = ident(&name); - let value_ident = ident(&name); - let ty = typekind_to_ty(&f.0, &f.1.ty, true); let doc_attr = match &f.1.description { Some(docs) => vec![doc(docs)], _ => vec![], @@ -382,7 +409,7 @@ impl<'a> RequestBuilder<'a> { output: syn::FunctionRetTy::Ty(code_gen::ty("Self")), variadic: false, }, - generics: generics_none(), + generics: fn_generics, }, // generates a fn body of the form // -------- @@ -409,13 +436,11 @@ impl<'a> RequestBuilder<'a> { enum_builder: &EnumBuilder, accepts_nd_body: bool, ) -> Tokens { - // TODO: lazy_static! for this? let mut common_fields: Vec = common_params .iter() .map(Self::create_struct_field) .collect(); - // TODO: lazy_static! for this? let mut common_builder_fns: Vec = common_params.iter().map(Self::create_impl_fn).collect(); @@ -667,7 +692,7 @@ impl<'a> RequestBuilder<'a> { ident: Some(ident(valid_name(&f.0).to_lowercase())), vis: syn::Visibility::Inherited, attrs: vec![], - ty: typekind_to_ty(&f.0, &f.1.ty, false), + ty: typekind_to_ty(&f.0, &f.1.ty, false, false), } } diff --git a/api_generator/src/api_generator/code_gen/root.rs b/api_generator/src/generator/code_gen/root.rs similarity index 92% rename from api_generator/src/api_generator/code_gen/root.rs rename to api_generator/src/generator/code_gen/root.rs index a1eb2bf4..bcb9d559 100644 --- a/api_generator/src/api_generator/code_gen/root.rs +++ b/api_generator/src/generator/code_gen/root.rs @@ -16,9 +16,9 @@ * specific language governing permissions and limitations * under the License. */ -use crate::api_generator::*; -use crate::api_generator::code_gen::request::request_builder::RequestBuilder; -use crate::api_generator::code_gen::*; +use crate::generator::code_gen::request::request_builder::RequestBuilder; +use crate::generator::code_gen::*; +use crate::generator::*; use inflector::Inflector; use quote::Tokens; use std::path::PathBuf; diff --git a/api_generator/src/api_generator/code_gen/url/enum_builder.rs b/api_generator/src/generator/code_gen/url/enum_builder.rs similarity index 97% rename from api_generator/src/api_generator/code_gen/url/enum_builder.rs rename to api_generator/src/generator/code_gen/url/enum_builder.rs index 6d917c51..6838c9e2 100644 --- a/api_generator/src/api_generator/code_gen/url/enum_builder.rs +++ b/api_generator/src/generator/code_gen/url/enum_builder.rs @@ -32,8 +32,8 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -use crate::api_generator::code_gen::url::url_builder::{IntoExpr, UrlBuilder}; -use crate::api_generator::{code_gen::*, ApiEndpoint, Path}; +use crate::generator::code_gen::url::url_builder::{IntoExpr, UrlBuilder}; +use crate::generator::{code_gen::*, ApiEndpoint, Path}; use inflector::Inflector; /// Builder for request url parts enum @@ -139,7 +139,7 @@ impl<'a> EnumBuilder<'a> { ident: None, vis: syn::Visibility::Inherited, attrs: vec![], - ty: typekind_to_ty(p, ty, true), + ty: typekind_to_ty(p, ty, true, false), } }) .collect(), @@ -306,15 +306,16 @@ mod tests { #![cfg_attr(rustfmt, rustfmt_skip)] use super::*; - use crate::api_generator::{Url, Path, HttpMethod, Body, Deprecated, Type, TypeKind, Documentation, ast_eq}; + use crate::generator::{Url, Path, HttpMethod, Body, Deprecated, Type, TypeKind, Documentation, ast_eq}; use std::collections::BTreeMap; - use crate::api_generator::code_gen::url::url_builder::PathString; + use crate::generator::code_gen::url::url_builder::PathString; #[test] fn generate_parts_enum_from_endpoint() { let endpoint = ( "search".to_string(), ApiEndpoint { + full_name: Some("search".to_string()), documentation: Documentation { description: None, url: None, diff --git a/api_generator/src/api_generator/code_gen/url/mod.rs b/api_generator/src/generator/code_gen/url/mod.rs similarity index 100% rename from api_generator/src/api_generator/code_gen/url/mod.rs rename to api_generator/src/generator/code_gen/url/mod.rs diff --git a/api_generator/src/api_generator/code_gen/url/url_builder.rs b/api_generator/src/generator/code_gen/url/url_builder.rs similarity index 94% rename from api_generator/src/api_generator/code_gen/url/url_builder.rs rename to api_generator/src/generator/code_gen/url/url_builder.rs index d5928793..3343e7cc 100644 --- a/api_generator/src/api_generator/code_gen/url/url_builder.rs +++ b/api_generator/src/generator/code_gen/url/url_builder.rs @@ -32,8 +32,8 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -use crate::api_generator::code_gen::*; -use crate::api_generator::{Path, Type, TypeKind}; +use crate::generator::code_gen::*; +use crate::generator::{Path, Type, TypeKind}; use quote::ToTokens; use serde::{Deserialize, Deserializer}; use std::{collections::BTreeMap, fmt, iter::Iterator, str}; @@ -170,8 +170,6 @@ impl<'a> UrlBuilder<'a> { /// Build the AST for an allocated url from the path literals and params. fn build_owned(self) -> syn::Block { - - // collection of let {name}_str = [self.]{name}.[join(",")|to_string()]; let let_params_exprs = Self::let_parameters_exprs(&self.path, &self.parts); @@ -228,37 +226,30 @@ impl<'a> UrlBuilder<'a> { } /// Creates the AST for a let expression to percent encode path parts - fn let_encoded_exprs( - url: &[PathPart<'a>], - parts: &BTreeMap, - ) -> Vec { + fn let_encoded_exprs(url: &[PathPart<'a>], parts: &BTreeMap) -> Vec { url.iter() .filter_map(|p| match *p { PathPart::Param(p) => { let name = valid_name(p); let path_expr = match &parts[p].ty { TypeKind::String => path_none(name).into_expr(), - _ => path_none(format!("{}_str", name).as_str()).into_expr() + _ => path_none(format!("{}_str", name).as_str()).into_expr(), }; let encoded_ident = ident(format!("encoded_{}", name)); let percent_encode_call: syn::Expr = syn::ExprKind::Call( Box::new(path_none("percent_encode").into_expr()), vec![ - syn::ExprKind::MethodCall( - ident("as_bytes"), - vec![], - vec![path_expr] - ).into(), - path_none("PARTS_ENCODED").into_expr() + syn::ExprKind::MethodCall(ident("as_bytes"), vec![], vec![path_expr]) + .into(), + path_none("PARTS_ENCODED").into_expr(), ], - ).into(); + ) + .into(); - let into_call: syn::Expr = syn::ExprKind::MethodCall( - ident("into"), - vec![], - vec![percent_encode_call] - ).into(); + let into_call: syn::Expr = + syn::ExprKind::MethodCall(ident("into"), vec![], vec![percent_encode_call]) + .into(); Some(syn::Stmt::Local(Box::new(syn::Local { pat: Box::new(syn::Pat::Ident( @@ -346,9 +337,7 @@ impl<'a> UrlBuilder<'a> { } /// Get an expression to find the number of chars in each parameter part for the url. - fn parameter_length_exprs( - url: &[PathPart<'a>], - ) -> Vec { + fn parameter_length_exprs(url: &[PathPart<'a>]) -> Vec { url.iter() .filter_map(|p| match *p { PathPart::Param(p) => { @@ -413,10 +402,7 @@ impl<'a> UrlBuilder<'a> { } /// Get a list of statements that append each part to a `String` in order. - fn push_str_stmts( - url_ident: syn::Ident, - url: &[PathPart<'a>] - ) -> Vec { + fn push_str_stmts(url_ident: syn::Ident, url: &[PathPart<'a>]) -> Vec { url.iter() .map(|p| match *p { PathPart::Literal(p) => { diff --git a/api_generator/src/api_generator/mod.rs b/api_generator/src/generator/mod.rs similarity index 89% rename from api_generator/src/api_generator/mod.rs rename to api_generator/src/generator/mod.rs index 072ff73e..c63cf2ad 100644 --- a/api_generator/src/api_generator/mod.rs +++ b/api_generator/src/generator/mod.rs @@ -16,9 +16,12 @@ * specific language governing permissions and limitations * under the License. */ -use crate::api_generator::code_gen::url::url_builder::PathString; +use crate::generator::code_gen::url::url_builder::PathString; use rustfmt_nightly::{Config, Edition, EmitMode, Input, Session}; -use serde::{Deserialize, Deserializer}; +use serde::{ + de::{MapAccess, Visitor}, + Deserialize, Deserializer, +}; use serde_json::Value; use std::{ collections::{BTreeMap, HashSet}, @@ -26,18 +29,21 @@ use std::{ fs::{self, File, OpenOptions}, hash::{Hash, Hasher}, io::{prelude::*, Read}, + marker::PhantomData, path::PathBuf, + str::FromStr, }; #[cfg(test)] use quote::{ToTokens, Tokens}; use semver::Version; -use serde::de::{Error, MapAccess, Visitor}; -use std::marker::PhantomData; -use std::str::FromStr; use void::Void; -mod code_gen; +pub mod code_gen; + +lazy_static! { + static ref VERSION: Version = semver::Version::parse(env!("CARGO_PKG_VERSION")).unwrap(); +} /// A complete API specification parsed from the REST API specs pub struct Api { @@ -52,6 +58,24 @@ pub struct Api { pub enums: Vec, } +impl Api { + /// Find the right ApiEndpoint from the REST API specs for the API call + /// defined in the YAML test. + /// + /// The REST API specs model only the stable APIs + /// currently, so no endpoint will be found for experimental or beta APIs + pub fn endpoint_for_api_call(&self, api_call: &str) -> Option<&ApiEndpoint> { + let api_call_path: Vec<&str> = api_call.split('.').collect(); + match api_call_path.len() { + 1 => self.root.get(api_call_path[0]), + _ => match self.namespaces.get(api_call_path[0]) { + Some(namespace) => namespace.get(api_call_path[1]), + None => None, + }, + } + } +} + /// A HTTP method in the REST API spec #[derive(Debug, Eq, PartialEq, Deserialize, Clone, Copy, Ord, PartialOrd)] pub enum HttpMethod { @@ -101,7 +125,7 @@ pub struct Type { /// The type of the param or part #[derive(Debug, PartialEq, Clone)] pub enum TypeKind { - None, + Unknown(String), List, Enum, String, @@ -123,20 +147,7 @@ impl<'de> Deserialize<'de> for TypeKind { D: Deserializer<'de>, { let value = String::deserialize(deserializer)?; - if value.contains('|') { - let values: Vec<&str> = value.split('|').collect(); - - if values.len() > 2 { - Err(D::Error::custom( - "TypeKind union contains more than two values", - )) - } else { - let union = Box::new((TypeKind::from(values[0]), TypeKind::from(values[1]))); - Ok(TypeKind::Union(union)) - } - } else { - Ok(TypeKind::from(value.as_str())) - } + Ok(TypeKind::from(value.as_str())) } } @@ -155,14 +166,22 @@ impl From<&str> for TypeKind { "long" => TypeKind::Long, "date" => TypeKind::Date, "time" => TypeKind::Time, - n => panic!("unknown typekind {}", n), + n => { + let values: Vec<&str> = n.split('|').collect(); + if values.len() != 2 { + TypeKind::Unknown(n.to_string()) + } else { + let union = Box::new((TypeKind::from(values[0]), TypeKind::from(values[1]))); + TypeKind::Union(union) + } + } } } } impl Default for TypeKind { fn default() -> Self { - TypeKind::None + TypeKind::Unknown("".to_string()) } } @@ -186,7 +205,7 @@ pub struct Path { /// The URL components of an API endpoint #[derive(Debug, PartialEq, Deserialize, Clone)] pub struct Url { - paths: Vec, + pub paths: Vec, } /// Body of an API endpoint @@ -197,11 +216,6 @@ pub struct Body { pub serialize: Option, } -lazy_static! { - static ref MAJOR_MINOR_VERSION: Version = - semver::Version::parse(env!("CARGO_PKG_VERSION")).unwrap(); -} - /// Wraps the URL string to replace master or current in URL path with the /// major.minor version of the api_generator. fn documentation_url_string<'de, D>(deserializer: D) -> Result @@ -233,11 +247,7 @@ impl DocumentationUrlString { u.path() .replace( "/master", - format!( - "/{}.{}", - MAJOR_MINOR_VERSION.major, MAJOR_MINOR_VERSION.minor - ) - .as_str(), + format!("/{}.{}", VERSION.major, VERSION.minor).as_str(), ) .as_str(), ); @@ -246,11 +256,7 @@ impl DocumentationUrlString { u.path() .replace( "/current", - format!( - "/{}.{}", - MAJOR_MINOR_VERSION.major, MAJOR_MINOR_VERSION.minor - ) - .as_str(), + format!("/{}.{}", VERSION.major, VERSION.minor).as_str(), ) .as_str(), ); @@ -332,13 +338,14 @@ where /// An API endpoint defined in the REST API specs #[derive(Debug, PartialEq, Deserialize, Clone)] pub struct ApiEndpoint { + pub full_name: Option, #[serde(deserialize_with = "string_or_struct")] documentation: Documentation, - stability: String, - url: Url, + pub stability: String, + pub url: Url, #[serde(default = "BTreeMap::new")] - params: BTreeMap, - body: Option, + pub params: BTreeMap, + pub body: Option, } impl ApiEndpoint { @@ -495,7 +502,7 @@ fn write_file(input: String, dir: &PathBuf, file: &str) -> Result<(), failure::E } /// Reads Api from a directory of REST Api specs -fn read_api(branch: &str, download_dir: &PathBuf) -> Result { +pub fn read_api(branch: &str, download_dir: &PathBuf) -> Result { let paths = fs::read_dir(download_dir)?; let mut namespaces = BTreeMap::new(); let mut enums: HashSet = HashSet::new(); @@ -509,7 +516,7 @@ fn read_api(branch: &str, download_dir: &PathBuf) -> Result if name .unwrap() - .map(|name| !name.starts_with('_')) + .map(|name| name.ends_with(".json") && !name.starts_with('_')) .unwrap_or(true) { let mut file = File::open(&path)?; @@ -598,6 +605,7 @@ where // get the first (and only) endpoint name and endpoint body let mut first_endpoint = endpoint.into_iter().next().unwrap(); + first_endpoint.1.full_name = Some(first_endpoint.0.clone()); // sort the HTTP methods so that we can easily pattern match on them later for path in first_endpoint.1.url.paths.iter_mut() { @@ -621,7 +629,7 @@ where /// formats tokens using rustfmt /// https://github.com/bcmyers/num-format/blob/b7a99480b8087924d291887b13d8c38b7ce43a36/num-format-dev/src/rustfmt.rs -fn rust_fmt(module: S) -> Result +pub fn rust_fmt(module: S) -> Result where S: Into, { diff --git a/api_generator/src/lib.rs b/api_generator/src/lib.rs new file mode 100644 index 00000000..f55d4e67 --- /dev/null +++ b/api_generator/src/lib.rs @@ -0,0 +1,12 @@ +// needed for quote! +#![recursion_limit = "256"] + +#[macro_use] +extern crate lazy_static; + +#[macro_use] +extern crate quote; + +pub mod error; +pub mod generator; +pub mod rest_spec; diff --git a/api_generator/src/rest_spec/mod.rs b/api_generator/src/rest_spec/mod.rs index defca253..7f0c9c22 100644 --- a/api_generator/src/rest_spec/mod.rs +++ b/api_generator/src/rest_spec/mod.rs @@ -55,7 +55,7 @@ struct RestApiSpec { links: Links, } -pub fn download_specs(branch: &str, download_dir: &PathBuf) { +pub fn download_specs(branch: &str, download_dir: &PathBuf) -> Result<(), failure::Error> { let spec_urls = [ ("core".to_string(), "https://api.github.com/repos/elastic/elasticsearch/contents/rest-api-spec/src/main/resources/rest-api-spec/api".to_string()), ("xpack".to_string(), "https://api.github.com/repos/elastic/elasticsearch/contents/x-pack/plugin/src/test/resources/rest-api-spec/api".to_string())]; @@ -72,13 +72,15 @@ pub fn download_specs(branch: &str, download_dir: &PathBuf) { }) .collect(); - fs::create_dir_all(download_dir).unwrap(); + fs::create_dir_all(download_dir)?; for spec in specs { - download_endpoints(&spec, &download_dir); + download_endpoints(&spec, &download_dir)?; } + + Ok(()) } -fn download_endpoints(spec: &GitHubSpec, download_dir: &PathBuf) { +fn download_endpoints(spec: &GitHubSpec, download_dir: &PathBuf) -> Result<(), failure::Error> { let client = reqwest::blocking::ClientBuilder::new() .user_agent(concat!("RustApiGenerator/", env!("CARGO_PKG_VERSION"))) .build() @@ -89,4 +91,5 @@ fn download_endpoints(spec: &GitHubSpec, download_dir: &PathBuf) { println!("Downloading {} specs from {}", spec.dir, spec.branch); download_specs_to_dir(client, rest_api_specs.as_slice(), download_dir).unwrap(); println!("Done downloading {} specs from {}", spec.dir, spec.branch); + Ok(()) } diff --git a/elasticsearch/src/client.rs b/elasticsearch/src/client.rs index 14e4f038..8c351f9e 100644 --- a/elasticsearch/src/client.rs +++ b/elasticsearch/src/client.rs @@ -23,21 +23,33 @@ use crate::{ use serde::{Serialize, Serializer}; -/// Serializes an `Option<&[&str]>` with +/// Serializes an `Option<&[Serialize]>` with /// `Some(value)` to a comma separated string of values. /// Used to serialize values within the query string -pub fn serialize_coll_qs( - value: &Option<&[&str]>, +pub(crate) fn serialize_coll_qs( + value: &Option<&[T]>, serializer: S, ) -> Result<::Ok, ::Error> where S: Serializer, + T: Serialize, { - let vec = value - .as_ref() - .expect("attempt to serialize Option::None value"); - let joined = vec.join(","); - serializer.serialize_str(joined.as_ref()) + let vec = value.expect("attempt to serialize Option::None value"); + + // TODO: There must be a better way of serializing a Vec to a comma-separated url encoded string... + // (mis)use serde_json to_string and trim the surrounding quotes... + let serialized = vec + .iter() + .map(|v| serde_json::to_string(v).unwrap()) + .collect::>(); + + let target = serialized + .iter() + .map(|s| s.trim_matches('"')) + .collect::>() + .join(","); + + serializer.serialize_str(&target) } /// Root client for top level APIs diff --git a/elasticsearch/src/generated/namespace_clients/async_search.rs b/elasticsearch/src/generated/namespace_clients/async_search.rs index bbecea92..b2ed83c6 100644 --- a/elasticsearch/src/generated/namespace_clients/async_search.rs +++ b/elasticsearch/src/generated/namespace_clients/async_search.rs @@ -345,7 +345,7 @@ pub struct AsyncSearchSubmit<'a, 'b, B> { df: Option<&'b str>, docvalue_fields: Option<&'b [&'b str]>, error_trace: Option, - expand_wildcards: Option, + expand_wildcards: Option<&'b [ExpandWildcards]>, explain: Option, filter_path: Option<&'b [&'b str]>, from: Option, @@ -560,7 +560,7 @@ where self } #[doc = "Whether to expand wildcard expression to concrete indices that are open, closed or both."] - pub fn expand_wildcards(mut self, expand_wildcards: ExpandWildcards) -> Self { + pub fn expand_wildcards(mut self, expand_wildcards: &'b [ExpandWildcards]) -> Self { self.expand_wildcards = Some(expand_wildcards); self } @@ -715,8 +715,8 @@ where self } #[doc = "Indicate if the number of documents that match the query should be tracked"] - pub fn track_total_hits(mut self, track_total_hits: TrackTotalHits) -> Self { - self.track_total_hits = Some(track_total_hits); + pub fn track_total_hits>(mut self, track_total_hits: T) -> Self { + self.track_total_hits = Some(track_total_hits.into()); self } #[doc = "Specify whether aggregation and suggester names should be prefixed by their respective types in the response"] @@ -779,8 +779,11 @@ where docvalue_fields: Option<&'b [&'b str]>, #[serde(rename = "error_trace")] error_trace: Option, - #[serde(rename = "expand_wildcards")] - expand_wildcards: Option, + #[serde( + rename = "expand_wildcards", + serialize_with = "crate::client::serialize_coll_qs" + )] + expand_wildcards: Option<&'b [ExpandWildcards]>, #[serde(rename = "explain")] explain: Option, #[serde( diff --git a/elasticsearch/src/generated/namespace_clients/cat.rs b/elasticsearch/src/generated/namespace_clients/cat.rs index 5ed2669f..2018b509 100644 --- a/elasticsearch/src/generated/namespace_clients/cat.rs +++ b/elasticsearch/src/generated/namespace_clients/cat.rs @@ -68,7 +68,7 @@ pub struct CatAliases<'a, 'b> { client: &'a Elasticsearch, parts: CatAliasesParts<'b>, error_trace: Option, - expand_wildcards: Option, + expand_wildcards: Option<&'b [ExpandWildcards]>, filter_path: Option<&'b [&'b str]>, format: Option<&'b str>, h: Option<&'b [&'b str]>, @@ -111,7 +111,7 @@ impl<'a, 'b> CatAliases<'a, 'b> { self } #[doc = "Whether to expand wildcard expression to concrete indices that are open, closed or both."] - pub fn expand_wildcards(mut self, expand_wildcards: ExpandWildcards) -> Self { + pub fn expand_wildcards(mut self, expand_wildcards: &'b [ExpandWildcards]) -> Self { self.expand_wildcards = Some(expand_wildcards); self } @@ -181,8 +181,11 @@ impl<'a, 'b> CatAliases<'a, 'b> { struct QueryParams<'b> { #[serde(rename = "error_trace")] error_trace: Option, - #[serde(rename = "expand_wildcards")] - expand_wildcards: Option, + #[serde( + rename = "expand_wildcards", + serialize_with = "crate::client::serialize_coll_qs" + )] + expand_wildcards: Option<&'b [ExpandWildcards]>, #[serde( rename = "filter_path", serialize_with = "crate::client::serialize_coll_qs" @@ -1151,7 +1154,7 @@ pub struct CatIndices<'a, 'b> { parts: CatIndicesParts<'b>, bytes: Option, error_trace: Option, - expand_wildcards: Option, + expand_wildcards: Option<&'b [ExpandWildcards]>, filter_path: Option<&'b [&'b str]>, format: Option<&'b str>, h: Option<&'b [&'b str]>, @@ -1210,7 +1213,7 @@ impl<'a, 'b> CatIndices<'a, 'b> { self } #[doc = "Whether to expand wildcard expression to concrete indices that are open, closed or both."] - pub fn expand_wildcards(mut self, expand_wildcards: ExpandWildcards) -> Self { + pub fn expand_wildcards(mut self, expand_wildcards: &'b [ExpandWildcards]) -> Self { self.expand_wildcards = Some(expand_wildcards); self } @@ -1307,8 +1310,11 @@ impl<'a, 'b> CatIndices<'a, 'b> { bytes: Option, #[serde(rename = "error_trace")] error_trace: Option, - #[serde(rename = "expand_wildcards")] - expand_wildcards: Option, + #[serde( + rename = "expand_wildcards", + serialize_with = "crate::client::serialize_coll_qs" + )] + expand_wildcards: Option<&'b [ExpandWildcards]>, #[serde( rename = "filter_path", serialize_with = "crate::client::serialize_coll_qs" diff --git a/elasticsearch/src/generated/namespace_clients/cluster.rs b/elasticsearch/src/generated/namespace_clients/cluster.rs index 7336e973..303d6eb9 100644 --- a/elasticsearch/src/generated/namespace_clients/cluster.rs +++ b/elasticsearch/src/generated/namespace_clients/cluster.rs @@ -377,7 +377,7 @@ pub struct ClusterHealth<'a, 'b> { client: &'a Elasticsearch, parts: ClusterHealthParts<'b>, error_trace: Option, - expand_wildcards: Option, + expand_wildcards: Option<&'b [ExpandWildcards]>, filter_path: Option<&'b [&'b str]>, headers: HeaderMap, human: Option, @@ -426,7 +426,7 @@ impl<'a, 'b> ClusterHealth<'a, 'b> { self } #[doc = "Whether to expand wildcard expression to concrete indices that are open, closed or both."] - pub fn expand_wildcards(mut self, expand_wildcards: ExpandWildcards) -> Self { + pub fn expand_wildcards(mut self, expand_wildcards: &'b [ExpandWildcards]) -> Self { self.expand_wildcards = Some(expand_wildcards); self } @@ -519,8 +519,11 @@ impl<'a, 'b> ClusterHealth<'a, 'b> { struct QueryParams<'b> { #[serde(rename = "error_trace")] error_trace: Option, - #[serde(rename = "expand_wildcards")] - expand_wildcards: Option, + #[serde( + rename = "expand_wildcards", + serialize_with = "crate::client::serialize_coll_qs" + )] + expand_wildcards: Option<&'b [ExpandWildcards]>, #[serde( rename = "filter_path", serialize_with = "crate::client::serialize_coll_qs" @@ -1237,7 +1240,7 @@ pub struct ClusterState<'a, 'b> { parts: ClusterStateParts<'b>, allow_no_indices: Option, error_trace: Option, - expand_wildcards: Option, + expand_wildcards: Option<&'b [ExpandWildcards]>, filter_path: Option<&'b [&'b str]>, flat_settings: Option, headers: HeaderMap, @@ -1284,7 +1287,7 @@ impl<'a, 'b> ClusterState<'a, 'b> { self } #[doc = "Whether to expand wildcard expression to concrete indices that are open, closed or both."] - pub fn expand_wildcards(mut self, expand_wildcards: ExpandWildcards) -> Self { + pub fn expand_wildcards(mut self, expand_wildcards: &'b [ExpandWildcards]) -> Self { self.expand_wildcards = Some(expand_wildcards); self } @@ -1356,8 +1359,11 @@ impl<'a, 'b> ClusterState<'a, 'b> { allow_no_indices: Option, #[serde(rename = "error_trace")] error_trace: Option, - #[serde(rename = "expand_wildcards")] - expand_wildcards: Option, + #[serde( + rename = "expand_wildcards", + serialize_with = "crate::client::serialize_coll_qs" + )] + expand_wildcards: Option<&'b [ExpandWildcards]>, #[serde( rename = "filter_path", serialize_with = "crate::client::serialize_coll_qs" diff --git a/elasticsearch/src/generated/namespace_clients/indices.rs b/elasticsearch/src/generated/namespace_clients/indices.rs index 45d8ae85..0fb0bee7 100644 --- a/elasticsearch/src/generated/namespace_clients/indices.rs +++ b/elasticsearch/src/generated/namespace_clients/indices.rs @@ -229,7 +229,7 @@ pub struct IndicesClearCache<'a, 'b, B> { allow_no_indices: Option, body: Option, error_trace: Option, - expand_wildcards: Option, + expand_wildcards: Option<&'b [ExpandWildcards]>, fielddata: Option, fields: Option<&'b [&'b str]>, filter_path: Option<&'b [&'b str]>, @@ -305,7 +305,7 @@ where self } #[doc = "Whether to expand wildcard expression to concrete indices that are open, closed or both."] - pub fn expand_wildcards(mut self, expand_wildcards: ExpandWildcards) -> Self { + pub fn expand_wildcards(mut self, expand_wildcards: &'b [ExpandWildcards]) -> Self { self.expand_wildcards = Some(expand_wildcards); self } @@ -377,8 +377,11 @@ where allow_no_indices: Option, #[serde(rename = "error_trace")] error_trace: Option, - #[serde(rename = "expand_wildcards")] - expand_wildcards: Option, + #[serde( + rename = "expand_wildcards", + serialize_with = "crate::client::serialize_coll_qs" + )] + expand_wildcards: Option<&'b [ExpandWildcards]>, #[serde(rename = "fielddata")] fielddata: Option, #[serde(rename = "fields", serialize_with = "crate::client::serialize_coll_qs")] @@ -637,7 +640,7 @@ pub struct IndicesClose<'a, 'b, B> { allow_no_indices: Option, body: Option, error_trace: Option, - expand_wildcards: Option, + expand_wildcards: Option<&'b [ExpandWildcards]>, filter_path: Option<&'b [&'b str]>, headers: HeaderMap, human: Option, @@ -707,7 +710,7 @@ where self } #[doc = "Whether to expand wildcard expression to concrete indices that are open, closed or both."] - pub fn expand_wildcards(mut self, expand_wildcards: ExpandWildcards) -> Self { + pub fn expand_wildcards(mut self, expand_wildcards: &'b [ExpandWildcards]) -> Self { self.expand_wildcards = Some(expand_wildcards); self } @@ -769,8 +772,11 @@ where allow_no_indices: Option, #[serde(rename = "error_trace")] error_trace: Option, - #[serde(rename = "expand_wildcards")] - expand_wildcards: Option, + #[serde( + rename = "expand_wildcards", + serialize_with = "crate::client::serialize_coll_qs" + )] + expand_wildcards: Option<&'b [ExpandWildcards]>, #[serde( rename = "filter_path", serialize_with = "crate::client::serialize_coll_qs" @@ -1027,7 +1033,7 @@ pub struct IndicesDelete<'a, 'b> { parts: IndicesDeleteParts<'b>, allow_no_indices: Option, error_trace: Option, - expand_wildcards: Option, + expand_wildcards: Option<&'b [ExpandWildcards]>, filter_path: Option<&'b [&'b str]>, headers: HeaderMap, human: Option, @@ -1068,7 +1074,7 @@ impl<'a, 'b> IndicesDelete<'a, 'b> { self } #[doc = "Whether wildcard expressions should get expanded to open or closed indices (default: open)"] - pub fn expand_wildcards(mut self, expand_wildcards: ExpandWildcards) -> Self { + pub fn expand_wildcards(mut self, expand_wildcards: &'b [ExpandWildcards]) -> Self { self.expand_wildcards = Some(expand_wildcards); self } @@ -1125,8 +1131,11 @@ impl<'a, 'b> IndicesDelete<'a, 'b> { allow_no_indices: Option, #[serde(rename = "error_trace")] error_trace: Option, - #[serde(rename = "expand_wildcards")] - expand_wildcards: Option, + #[serde( + rename = "expand_wildcards", + serialize_with = "crate::client::serialize_coll_qs" + )] + expand_wildcards: Option<&'b [ExpandWildcards]>, #[serde( rename = "filter_path", serialize_with = "crate::client::serialize_coll_qs" @@ -1478,7 +1487,7 @@ pub struct IndicesExists<'a, 'b> { parts: IndicesExistsParts<'b>, allow_no_indices: Option, error_trace: Option, - expand_wildcards: Option, + expand_wildcards: Option<&'b [ExpandWildcards]>, filter_path: Option<&'b [&'b str]>, flat_settings: Option, headers: HeaderMap, @@ -1521,7 +1530,7 @@ impl<'a, 'b> IndicesExists<'a, 'b> { self } #[doc = "Whether wildcard expressions should get expanded to open or closed indices (default: open)"] - pub fn expand_wildcards(mut self, expand_wildcards: ExpandWildcards) -> Self { + pub fn expand_wildcards(mut self, expand_wildcards: &'b [ExpandWildcards]) -> Self { self.expand_wildcards = Some(expand_wildcards); self } @@ -1583,8 +1592,11 @@ impl<'a, 'b> IndicesExists<'a, 'b> { allow_no_indices: Option, #[serde(rename = "error_trace")] error_trace: Option, - #[serde(rename = "expand_wildcards")] - expand_wildcards: Option, + #[serde( + rename = "expand_wildcards", + serialize_with = "crate::client::serialize_coll_qs" + )] + expand_wildcards: Option<&'b [ExpandWildcards]>, #[serde( rename = "filter_path", serialize_with = "crate::client::serialize_coll_qs" @@ -1674,7 +1686,7 @@ pub struct IndicesExistsAlias<'a, 'b> { parts: IndicesExistsAliasParts<'b>, allow_no_indices: Option, error_trace: Option, - expand_wildcards: Option, + expand_wildcards: Option<&'b [ExpandWildcards]>, filter_path: Option<&'b [&'b str]>, headers: HeaderMap, human: Option, @@ -1713,7 +1725,7 @@ impl<'a, 'b> IndicesExistsAlias<'a, 'b> { self } #[doc = "Whether to expand wildcard expression to concrete indices that are open, closed or both."] - pub fn expand_wildcards(mut self, expand_wildcards: ExpandWildcards) -> Self { + pub fn expand_wildcards(mut self, expand_wildcards: &'b [ExpandWildcards]) -> Self { self.expand_wildcards = Some(expand_wildcards); self } @@ -1765,8 +1777,11 @@ impl<'a, 'b> IndicesExistsAlias<'a, 'b> { allow_no_indices: Option, #[serde(rename = "error_trace")] error_trace: Option, - #[serde(rename = "expand_wildcards")] - expand_wildcards: Option, + #[serde( + rename = "expand_wildcards", + serialize_with = "crate::client::serialize_coll_qs" + )] + expand_wildcards: Option<&'b [ExpandWildcards]>, #[serde( rename = "filter_path", serialize_with = "crate::client::serialize_coll_qs" @@ -1986,7 +2001,7 @@ pub struct IndicesExistsType<'a, 'b> { parts: IndicesExistsTypeParts<'b>, allow_no_indices: Option, error_trace: Option, - expand_wildcards: Option, + expand_wildcards: Option<&'b [ExpandWildcards]>, filter_path: Option<&'b [&'b str]>, headers: HeaderMap, human: Option, @@ -2025,7 +2040,7 @@ impl<'a, 'b> IndicesExistsType<'a, 'b> { self } #[doc = "Whether to expand wildcard expression to concrete indices that are open, closed or both."] - pub fn expand_wildcards(mut self, expand_wildcards: ExpandWildcards) -> Self { + pub fn expand_wildcards(mut self, expand_wildcards: &'b [ExpandWildcards]) -> Self { self.expand_wildcards = Some(expand_wildcards); self } @@ -2077,8 +2092,11 @@ impl<'a, 'b> IndicesExistsType<'a, 'b> { allow_no_indices: Option, #[serde(rename = "error_trace")] error_trace: Option, - #[serde(rename = "expand_wildcards")] - expand_wildcards: Option, + #[serde( + rename = "expand_wildcards", + serialize_with = "crate::client::serialize_coll_qs" + )] + expand_wildcards: Option<&'b [ExpandWildcards]>, #[serde( rename = "filter_path", serialize_with = "crate::client::serialize_coll_qs" @@ -2150,7 +2168,7 @@ pub struct IndicesFlush<'a, 'b, B> { allow_no_indices: Option, body: Option, error_trace: Option, - expand_wildcards: Option, + expand_wildcards: Option<&'b [ExpandWildcards]>, filter_path: Option<&'b [&'b str]>, force: Option, headers: HeaderMap, @@ -2217,7 +2235,7 @@ where self } #[doc = "Whether to expand wildcard expression to concrete indices that are open, closed or both."] - pub fn expand_wildcards(mut self, expand_wildcards: ExpandWildcards) -> Self { + pub fn expand_wildcards(mut self, expand_wildcards: &'b [ExpandWildcards]) -> Self { self.expand_wildcards = Some(expand_wildcards); self } @@ -2277,8 +2295,11 @@ where allow_no_indices: Option, #[serde(rename = "error_trace")] error_trace: Option, - #[serde(rename = "expand_wildcards")] - expand_wildcards: Option, + #[serde( + rename = "expand_wildcards", + serialize_with = "crate::client::serialize_coll_qs" + )] + expand_wildcards: Option<&'b [ExpandWildcards]>, #[serde( rename = "filter_path", serialize_with = "crate::client::serialize_coll_qs" @@ -2353,7 +2374,7 @@ pub struct IndicesFlushSynced<'a, 'b, B> { allow_no_indices: Option, body: Option, error_trace: Option, - expand_wildcards: Option, + expand_wildcards: Option<&'b [ExpandWildcards]>, filter_path: Option<&'b [&'b str]>, headers: HeaderMap, human: Option, @@ -2414,7 +2435,7 @@ where self } #[doc = "Whether to expand wildcard expression to concrete indices that are open, closed or both."] - pub fn expand_wildcards(mut self, expand_wildcards: ExpandWildcards) -> Self { + pub fn expand_wildcards(mut self, expand_wildcards: &'b [ExpandWildcards]) -> Self { self.expand_wildcards = Some(expand_wildcards); self } @@ -2464,8 +2485,11 @@ where allow_no_indices: Option, #[serde(rename = "error_trace")] error_trace: Option, - #[serde(rename = "expand_wildcards")] - expand_wildcards: Option, + #[serde( + rename = "expand_wildcards", + serialize_with = "crate::client::serialize_coll_qs" + )] + expand_wildcards: Option<&'b [ExpandWildcards]>, #[serde( rename = "filter_path", serialize_with = "crate::client::serialize_coll_qs" @@ -2534,7 +2558,7 @@ pub struct IndicesForcemerge<'a, 'b, B> { allow_no_indices: Option, body: Option, error_trace: Option, - expand_wildcards: Option, + expand_wildcards: Option<&'b [ExpandWildcards]>, filter_path: Option<&'b [&'b str]>, flush: Option, headers: HeaderMap, @@ -2604,7 +2628,7 @@ where self } #[doc = "Whether to expand wildcard expression to concrete indices that are open, closed or both."] - pub fn expand_wildcards(mut self, expand_wildcards: ExpandWildcards) -> Self { + pub fn expand_wildcards(mut self, expand_wildcards: &'b [ExpandWildcards]) -> Self { self.expand_wildcards = Some(expand_wildcards); self } @@ -2666,8 +2690,11 @@ where allow_no_indices: Option, #[serde(rename = "error_trace")] error_trace: Option, - #[serde(rename = "expand_wildcards")] - expand_wildcards: Option, + #[serde( + rename = "expand_wildcards", + serialize_with = "crate::client::serialize_coll_qs" + )] + expand_wildcards: Option<&'b [ExpandWildcards]>, #[serde( rename = "filter_path", serialize_with = "crate::client::serialize_coll_qs" @@ -2741,7 +2768,7 @@ pub struct IndicesFreeze<'a, 'b, B> { allow_no_indices: Option, body: Option, error_trace: Option, - expand_wildcards: Option, + expand_wildcards: Option<&'b [ExpandWildcards]>, filter_path: Option<&'b [&'b str]>, headers: HeaderMap, human: Option, @@ -2811,7 +2838,7 @@ where self } #[doc = "Whether to expand wildcard expression to concrete indices that are open, closed or both."] - pub fn expand_wildcards(mut self, expand_wildcards: ExpandWildcards) -> Self { + pub fn expand_wildcards(mut self, expand_wildcards: &'b [ExpandWildcards]) -> Self { self.expand_wildcards = Some(expand_wildcards); self } @@ -2873,8 +2900,11 @@ where allow_no_indices: Option, #[serde(rename = "error_trace")] error_trace: Option, - #[serde(rename = "expand_wildcards")] - expand_wildcards: Option, + #[serde( + rename = "expand_wildcards", + serialize_with = "crate::client::serialize_coll_qs" + )] + expand_wildcards: Option<&'b [ExpandWildcards]>, #[serde( rename = "filter_path", serialize_with = "crate::client::serialize_coll_qs" @@ -2947,7 +2977,7 @@ pub struct IndicesGet<'a, 'b> { parts: IndicesGetParts<'b>, allow_no_indices: Option, error_trace: Option, - expand_wildcards: Option, + expand_wildcards: Option<&'b [ExpandWildcards]>, filter_path: Option<&'b [&'b str]>, flat_settings: Option, headers: HeaderMap, @@ -2994,7 +3024,7 @@ impl<'a, 'b> IndicesGet<'a, 'b> { self } #[doc = "Whether wildcard expressions should get expanded to open or closed indices (default: open)"] - pub fn expand_wildcards(mut self, expand_wildcards: ExpandWildcards) -> Self { + pub fn expand_wildcards(mut self, expand_wildcards: &'b [ExpandWildcards]) -> Self { self.expand_wildcards = Some(expand_wildcards); self } @@ -3066,8 +3096,11 @@ impl<'a, 'b> IndicesGet<'a, 'b> { allow_no_indices: Option, #[serde(rename = "error_trace")] error_trace: Option, - #[serde(rename = "expand_wildcards")] - expand_wildcards: Option, + #[serde( + rename = "expand_wildcards", + serialize_with = "crate::client::serialize_coll_qs" + )] + expand_wildcards: Option<&'b [ExpandWildcards]>, #[serde( rename = "filter_path", serialize_with = "crate::client::serialize_coll_qs" @@ -3178,7 +3211,7 @@ pub struct IndicesGetAlias<'a, 'b> { parts: IndicesGetAliasParts<'b>, allow_no_indices: Option, error_trace: Option, - expand_wildcards: Option, + expand_wildcards: Option<&'b [ExpandWildcards]>, filter_path: Option<&'b [&'b str]>, headers: HeaderMap, human: Option, @@ -3217,7 +3250,7 @@ impl<'a, 'b> IndicesGetAlias<'a, 'b> { self } #[doc = "Whether to expand wildcard expression to concrete indices that are open, closed or both."] - pub fn expand_wildcards(mut self, expand_wildcards: ExpandWildcards) -> Self { + pub fn expand_wildcards(mut self, expand_wildcards: &'b [ExpandWildcards]) -> Self { self.expand_wildcards = Some(expand_wildcards); self } @@ -3269,8 +3302,11 @@ impl<'a, 'b> IndicesGetAlias<'a, 'b> { allow_no_indices: Option, #[serde(rename = "error_trace")] error_trace: Option, - #[serde(rename = "expand_wildcards")] - expand_wildcards: Option, + #[serde( + rename = "expand_wildcards", + serialize_with = "crate::client::serialize_coll_qs" + )] + expand_wildcards: Option<&'b [ExpandWildcards]>, #[serde( rename = "filter_path", serialize_with = "crate::client::serialize_coll_qs" @@ -3392,7 +3428,7 @@ pub struct IndicesGetFieldMapping<'a, 'b> { parts: IndicesGetFieldMappingParts<'b>, allow_no_indices: Option, error_trace: Option, - expand_wildcards: Option, + expand_wildcards: Option<&'b [ExpandWildcards]>, filter_path: Option<&'b [&'b str]>, headers: HeaderMap, human: Option, @@ -3435,7 +3471,7 @@ impl<'a, 'b> IndicesGetFieldMapping<'a, 'b> { self } #[doc = "Whether to expand wildcard expression to concrete indices that are open, closed or both."] - pub fn expand_wildcards(mut self, expand_wildcards: ExpandWildcards) -> Self { + pub fn expand_wildcards(mut self, expand_wildcards: &'b [ExpandWildcards]) -> Self { self.expand_wildcards = Some(expand_wildcards); self } @@ -3497,8 +3533,11 @@ impl<'a, 'b> IndicesGetFieldMapping<'a, 'b> { allow_no_indices: Option, #[serde(rename = "error_trace")] error_trace: Option, - #[serde(rename = "expand_wildcards")] - expand_wildcards: Option, + #[serde( + rename = "expand_wildcards", + serialize_with = "crate::client::serialize_coll_qs" + )] + expand_wildcards: Option<&'b [ExpandWildcards]>, #[serde( rename = "filter_path", serialize_with = "crate::client::serialize_coll_qs" @@ -3600,7 +3639,7 @@ pub struct IndicesGetMapping<'a, 'b> { parts: IndicesGetMappingParts<'b>, allow_no_indices: Option, error_trace: Option, - expand_wildcards: Option, + expand_wildcards: Option<&'b [ExpandWildcards]>, filter_path: Option<&'b [&'b str]>, headers: HeaderMap, human: Option, @@ -3643,7 +3682,7 @@ impl<'a, 'b> IndicesGetMapping<'a, 'b> { self } #[doc = "Whether to expand wildcard expression to concrete indices that are open, closed or both."] - pub fn expand_wildcards(mut self, expand_wildcards: ExpandWildcards) -> Self { + pub fn expand_wildcards(mut self, expand_wildcards: &'b [ExpandWildcards]) -> Self { self.expand_wildcards = Some(expand_wildcards); self } @@ -3705,8 +3744,11 @@ impl<'a, 'b> IndicesGetMapping<'a, 'b> { allow_no_indices: Option, #[serde(rename = "error_trace")] error_trace: Option, - #[serde(rename = "expand_wildcards")] - expand_wildcards: Option, + #[serde( + rename = "expand_wildcards", + serialize_with = "crate::client::serialize_coll_qs" + )] + expand_wildcards: Option<&'b [ExpandWildcards]>, #[serde( rename = "filter_path", serialize_with = "crate::client::serialize_coll_qs" @@ -3811,7 +3853,7 @@ pub struct IndicesGetSettings<'a, 'b> { parts: IndicesGetSettingsParts<'b>, allow_no_indices: Option, error_trace: Option, - expand_wildcards: Option, + expand_wildcards: Option<&'b [ExpandWildcards]>, filter_path: Option<&'b [&'b str]>, flat_settings: Option, headers: HeaderMap, @@ -3856,7 +3898,7 @@ impl<'a, 'b> IndicesGetSettings<'a, 'b> { self } #[doc = "Whether to expand wildcard expression to concrete indices that are open, closed or both."] - pub fn expand_wildcards(mut self, expand_wildcards: ExpandWildcards) -> Self { + pub fn expand_wildcards(mut self, expand_wildcards: &'b [ExpandWildcards]) -> Self { self.expand_wildcards = Some(expand_wildcards); self } @@ -3923,8 +3965,11 @@ impl<'a, 'b> IndicesGetSettings<'a, 'b> { allow_no_indices: Option, #[serde(rename = "error_trace")] error_trace: Option, - #[serde(rename = "expand_wildcards")] - expand_wildcards: Option, + #[serde( + rename = "expand_wildcards", + serialize_with = "crate::client::serialize_coll_qs" + )] + expand_wildcards: Option<&'b [ExpandWildcards]>, #[serde( rename = "filter_path", serialize_with = "crate::client::serialize_coll_qs" @@ -4166,7 +4211,7 @@ pub struct IndicesGetUpgrade<'a, 'b> { parts: IndicesGetUpgradeParts<'b>, allow_no_indices: Option, error_trace: Option, - expand_wildcards: Option, + expand_wildcards: Option<&'b [ExpandWildcards]>, filter_path: Option<&'b [&'b str]>, headers: HeaderMap, human: Option, @@ -4203,7 +4248,7 @@ impl<'a, 'b> IndicesGetUpgrade<'a, 'b> { self } #[doc = "Whether to expand wildcard expression to concrete indices that are open, closed or both."] - pub fn expand_wildcards(mut self, expand_wildcards: ExpandWildcards) -> Self { + pub fn expand_wildcards(mut self, expand_wildcards: &'b [ExpandWildcards]) -> Self { self.expand_wildcards = Some(expand_wildcards); self } @@ -4250,8 +4295,11 @@ impl<'a, 'b> IndicesGetUpgrade<'a, 'b> { allow_no_indices: Option, #[serde(rename = "error_trace")] error_trace: Option, - #[serde(rename = "expand_wildcards")] - expand_wildcards: Option, + #[serde( + rename = "expand_wildcards", + serialize_with = "crate::client::serialize_coll_qs" + )] + expand_wildcards: Option<&'b [ExpandWildcards]>, #[serde( rename = "filter_path", serialize_with = "crate::client::serialize_coll_qs" @@ -4317,7 +4365,7 @@ pub struct IndicesOpen<'a, 'b, B> { allow_no_indices: Option, body: Option, error_trace: Option, - expand_wildcards: Option, + expand_wildcards: Option<&'b [ExpandWildcards]>, filter_path: Option<&'b [&'b str]>, headers: HeaderMap, human: Option, @@ -4387,7 +4435,7 @@ where self } #[doc = "Whether to expand wildcard expression to concrete indices that are open, closed or both."] - pub fn expand_wildcards(mut self, expand_wildcards: ExpandWildcards) -> Self { + pub fn expand_wildcards(mut self, expand_wildcards: &'b [ExpandWildcards]) -> Self { self.expand_wildcards = Some(expand_wildcards); self } @@ -4449,8 +4497,11 @@ where allow_no_indices: Option, #[serde(rename = "error_trace")] error_trace: Option, - #[serde(rename = "expand_wildcards")] - expand_wildcards: Option, + #[serde( + rename = "expand_wildcards", + serialize_with = "crate::client::serialize_coll_qs" + )] + expand_wildcards: Option<&'b [ExpandWildcards]>, #[serde( rename = "filter_path", serialize_with = "crate::client::serialize_coll_qs" @@ -4716,7 +4767,7 @@ pub struct IndicesPutMapping<'a, 'b, B> { allow_no_indices: Option, body: Option, error_trace: Option, - expand_wildcards: Option, + expand_wildcards: Option<&'b [ExpandWildcards]>, filter_path: Option<&'b [&'b str]>, headers: HeaderMap, human: Option, @@ -4786,7 +4837,7 @@ where self } #[doc = "Whether to expand wildcard expression to concrete indices that are open, closed or both."] - pub fn expand_wildcards(mut self, expand_wildcards: ExpandWildcards) -> Self { + pub fn expand_wildcards(mut self, expand_wildcards: &'b [ExpandWildcards]) -> Self { self.expand_wildcards = Some(expand_wildcards); self } @@ -4848,8 +4899,11 @@ where allow_no_indices: Option, #[serde(rename = "error_trace")] error_trace: Option, - #[serde(rename = "expand_wildcards")] - expand_wildcards: Option, + #[serde( + rename = "expand_wildcards", + serialize_with = "crate::client::serialize_coll_qs" + )] + expand_wildcards: Option<&'b [ExpandWildcards]>, #[serde( rename = "filter_path", serialize_with = "crate::client::serialize_coll_qs" @@ -4927,7 +4981,7 @@ pub struct IndicesPutSettings<'a, 'b, B> { allow_no_indices: Option, body: Option, error_trace: Option, - expand_wildcards: Option, + expand_wildcards: Option<&'b [ExpandWildcards]>, filter_path: Option<&'b [&'b str]>, flat_settings: Option, headers: HeaderMap, @@ -5000,7 +5054,7 @@ where self } #[doc = "Whether to expand wildcard expression to concrete indices that are open, closed or both."] - pub fn expand_wildcards(mut self, expand_wildcards: ExpandWildcards) -> Self { + pub fn expand_wildcards(mut self, expand_wildcards: &'b [ExpandWildcards]) -> Self { self.expand_wildcards = Some(expand_wildcards); self } @@ -5067,8 +5121,11 @@ where allow_no_indices: Option, #[serde(rename = "error_trace")] error_trace: Option, - #[serde(rename = "expand_wildcards")] - expand_wildcards: Option, + #[serde( + rename = "expand_wildcards", + serialize_with = "crate::client::serialize_coll_qs" + )] + expand_wildcards: Option<&'b [ExpandWildcards]>, #[serde( rename = "filter_path", serialize_with = "crate::client::serialize_coll_qs" @@ -5475,7 +5532,7 @@ pub struct IndicesRefresh<'a, 'b, B> { allow_no_indices: Option, body: Option, error_trace: Option, - expand_wildcards: Option, + expand_wildcards: Option<&'b [ExpandWildcards]>, filter_path: Option<&'b [&'b str]>, headers: HeaderMap, human: Option, @@ -5536,7 +5593,7 @@ where self } #[doc = "Whether to expand wildcard expression to concrete indices that are open, closed or both."] - pub fn expand_wildcards(mut self, expand_wildcards: ExpandWildcards) -> Self { + pub fn expand_wildcards(mut self, expand_wildcards: &'b [ExpandWildcards]) -> Self { self.expand_wildcards = Some(expand_wildcards); self } @@ -5586,8 +5643,11 @@ where allow_no_indices: Option, #[serde(rename = "error_trace")] error_trace: Option, - #[serde(rename = "expand_wildcards")] - expand_wildcards: Option, + #[serde( + rename = "expand_wildcards", + serialize_with = "crate::client::serialize_coll_qs" + )] + expand_wildcards: Option<&'b [ExpandWildcards]>, #[serde( rename = "filter_path", serialize_with = "crate::client::serialize_coll_qs" @@ -5653,7 +5713,7 @@ pub struct IndicesReloadSearchAnalyzers<'a, 'b, B> { allow_no_indices: Option, body: Option, error_trace: Option, - expand_wildcards: Option, + expand_wildcards: Option<&'b [ExpandWildcards]>, filter_path: Option<&'b [&'b str]>, headers: HeaderMap, human: Option, @@ -5714,7 +5774,7 @@ where self } #[doc = "Whether to expand wildcard expression to concrete indices that are open, closed or both."] - pub fn expand_wildcards(mut self, expand_wildcards: ExpandWildcards) -> Self { + pub fn expand_wildcards(mut self, expand_wildcards: &'b [ExpandWildcards]) -> Self { self.expand_wildcards = Some(expand_wildcards); self } @@ -5764,8 +5824,11 @@ where allow_no_indices: Option, #[serde(rename = "error_trace")] error_trace: Option, - #[serde(rename = "expand_wildcards")] - expand_wildcards: Option, + #[serde( + rename = "expand_wildcards", + serialize_with = "crate::client::serialize_coll_qs" + )] + expand_wildcards: Option<&'b [ExpandWildcards]>, #[serde( rename = "filter_path", serialize_with = "crate::client::serialize_coll_qs" @@ -6044,7 +6107,7 @@ pub struct IndicesSegments<'a, 'b> { parts: IndicesSegmentsParts<'b>, allow_no_indices: Option, error_trace: Option, - expand_wildcards: Option, + expand_wildcards: Option<&'b [ExpandWildcards]>, filter_path: Option<&'b [&'b str]>, headers: HeaderMap, human: Option, @@ -6083,7 +6146,7 @@ impl<'a, 'b> IndicesSegments<'a, 'b> { self } #[doc = "Whether to expand wildcard expression to concrete indices that are open, closed or both."] - pub fn expand_wildcards(mut self, expand_wildcards: ExpandWildcards) -> Self { + pub fn expand_wildcards(mut self, expand_wildcards: &'b [ExpandWildcards]) -> Self { self.expand_wildcards = Some(expand_wildcards); self } @@ -6135,8 +6198,11 @@ impl<'a, 'b> IndicesSegments<'a, 'b> { allow_no_indices: Option, #[serde(rename = "error_trace")] error_trace: Option, - #[serde(rename = "expand_wildcards")] - expand_wildcards: Option, + #[serde( + rename = "expand_wildcards", + serialize_with = "crate::client::serialize_coll_qs" + )] + expand_wildcards: Option<&'b [ExpandWildcards]>, #[serde( rename = "filter_path", serialize_with = "crate::client::serialize_coll_qs" @@ -6207,7 +6273,7 @@ pub struct IndicesShardStores<'a, 'b> { parts: IndicesShardStoresParts<'b>, allow_no_indices: Option, error_trace: Option, - expand_wildcards: Option, + expand_wildcards: Option<&'b [ExpandWildcards]>, filter_path: Option<&'b [&'b str]>, headers: HeaderMap, human: Option, @@ -6246,7 +6312,7 @@ impl<'a, 'b> IndicesShardStores<'a, 'b> { self } #[doc = "Whether to expand wildcard expression to concrete indices that are open, closed or both."] - pub fn expand_wildcards(mut self, expand_wildcards: ExpandWildcards) -> Self { + pub fn expand_wildcards(mut self, expand_wildcards: &'b [ExpandWildcards]) -> Self { self.expand_wildcards = Some(expand_wildcards); self } @@ -6298,8 +6364,11 @@ impl<'a, 'b> IndicesShardStores<'a, 'b> { allow_no_indices: Option, #[serde(rename = "error_trace")] error_trace: Option, - #[serde(rename = "expand_wildcards")] - expand_wildcards: Option, + #[serde( + rename = "expand_wildcards", + serialize_with = "crate::client::serialize_coll_qs" + )] + expand_wildcards: Option<&'b [ExpandWildcards]>, #[serde( rename = "filter_path", serialize_with = "crate::client::serialize_coll_qs" @@ -6776,7 +6845,7 @@ pub struct IndicesStats<'a, 'b> { parts: IndicesStatsParts<'b>, completion_fields: Option<&'b [&'b str]>, error_trace: Option, - expand_wildcards: Option, + expand_wildcards: Option<&'b [ExpandWildcards]>, fielddata_fields: Option<&'b [&'b str]>, fields: Option<&'b [&'b str]>, filter_path: Option<&'b [&'b str]>, @@ -6827,7 +6896,7 @@ impl<'a, 'b> IndicesStats<'a, 'b> { self } #[doc = "Whether to expand wildcard expression to concrete indices that are open, closed or both."] - pub fn expand_wildcards(mut self, expand_wildcards: ExpandWildcards) -> Self { + pub fn expand_wildcards(mut self, expand_wildcards: &'b [ExpandWildcards]) -> Self { self.expand_wildcards = Some(expand_wildcards); self } @@ -6912,8 +6981,11 @@ impl<'a, 'b> IndicesStats<'a, 'b> { completion_fields: Option<&'b [&'b str]>, #[serde(rename = "error_trace")] error_trace: Option, - #[serde(rename = "expand_wildcards")] - expand_wildcards: Option, + #[serde( + rename = "expand_wildcards", + serialize_with = "crate::client::serialize_coll_qs" + )] + expand_wildcards: Option<&'b [ExpandWildcards]>, #[serde( rename = "fielddata_fields", serialize_with = "crate::client::serialize_coll_qs" @@ -7002,7 +7074,7 @@ pub struct IndicesUnfreeze<'a, 'b, B> { allow_no_indices: Option, body: Option, error_trace: Option, - expand_wildcards: Option, + expand_wildcards: Option<&'b [ExpandWildcards]>, filter_path: Option<&'b [&'b str]>, headers: HeaderMap, human: Option, @@ -7072,7 +7144,7 @@ where self } #[doc = "Whether to expand wildcard expression to concrete indices that are open, closed or both."] - pub fn expand_wildcards(mut self, expand_wildcards: ExpandWildcards) -> Self { + pub fn expand_wildcards(mut self, expand_wildcards: &'b [ExpandWildcards]) -> Self { self.expand_wildcards = Some(expand_wildcards); self } @@ -7134,8 +7206,11 @@ where allow_no_indices: Option, #[serde(rename = "error_trace")] error_trace: Option, - #[serde(rename = "expand_wildcards")] - expand_wildcards: Option, + #[serde( + rename = "expand_wildcards", + serialize_with = "crate::client::serialize_coll_qs" + )] + expand_wildcards: Option<&'b [ExpandWildcards]>, #[serde( rename = "filter_path", serialize_with = "crate::client::serialize_coll_qs" @@ -7368,7 +7443,7 @@ pub struct IndicesUpgrade<'a, 'b, B> { allow_no_indices: Option, body: Option, error_trace: Option, - expand_wildcards: Option, + expand_wildcards: Option<&'b [ExpandWildcards]>, filter_path: Option<&'b [&'b str]>, headers: HeaderMap, human: Option, @@ -7435,7 +7510,7 @@ where self } #[doc = "Whether to expand wildcard expression to concrete indices that are open, closed or both."] - pub fn expand_wildcards(mut self, expand_wildcards: ExpandWildcards) -> Self { + pub fn expand_wildcards(mut self, expand_wildcards: &'b [ExpandWildcards]) -> Self { self.expand_wildcards = Some(expand_wildcards); self } @@ -7492,8 +7567,11 @@ where allow_no_indices: Option, #[serde(rename = "error_trace")] error_trace: Option, - #[serde(rename = "expand_wildcards")] - expand_wildcards: Option, + #[serde( + rename = "expand_wildcards", + serialize_with = "crate::client::serialize_coll_qs" + )] + expand_wildcards: Option<&'b [ExpandWildcards]>, #[serde( rename = "filter_path", serialize_with = "crate::client::serialize_coll_qs" @@ -7589,7 +7667,7 @@ pub struct IndicesValidateQuery<'a, 'b, B> { default_operator: Option, df: Option<&'b str>, error_trace: Option, - expand_wildcards: Option, + expand_wildcards: Option<&'b [ExpandWildcards]>, explain: Option, filter_path: Option<&'b [&'b str]>, headers: HeaderMap, @@ -7697,7 +7775,7 @@ where self } #[doc = "Whether to expand wildcard expression to concrete indices that are open, closed or both."] - pub fn expand_wildcards(mut self, expand_wildcards: ExpandWildcards) -> Self { + pub fn expand_wildcards(mut self, expand_wildcards: &'b [ExpandWildcards]) -> Self { self.expand_wildcards = Some(expand_wildcards); self } @@ -7777,8 +7855,11 @@ where df: Option<&'b str>, #[serde(rename = "error_trace")] error_trace: Option, - #[serde(rename = "expand_wildcards")] - expand_wildcards: Option, + #[serde( + rename = "expand_wildcards", + serialize_with = "crate::client::serialize_coll_qs" + )] + expand_wildcards: Option<&'b [ExpandWildcards]>, #[serde(rename = "explain")] explain: Option, #[serde( diff --git a/elasticsearch/src/generated/namespace_clients/ml.rs b/elasticsearch/src/generated/namespace_clients/ml.rs index 6938e0c2..0f8ca226 100644 --- a/elasticsearch/src/generated/namespace_clients/ml.rs +++ b/elasticsearch/src/generated/namespace_clients/ml.rs @@ -5191,7 +5191,7 @@ pub struct MlPutDatafeed<'a, 'b, B> { allow_no_indices: Option, body: Option, error_trace: Option, - expand_wildcards: Option, + expand_wildcards: Option<&'b [ExpandWildcards]>, filter_path: Option<&'b [&'b str]>, headers: HeaderMap, human: Option, @@ -5255,7 +5255,7 @@ where self } #[doc = "Whether source index expressions should get expanded to open or closed indices (default: open)"] - pub fn expand_wildcards(mut self, expand_wildcards: ExpandWildcards) -> Self { + pub fn expand_wildcards(mut self, expand_wildcards: &'b [ExpandWildcards]) -> Self { self.expand_wildcards = Some(expand_wildcards); self } @@ -5307,8 +5307,11 @@ where allow_no_indices: Option, #[serde(rename = "error_trace")] error_trace: Option, - #[serde(rename = "expand_wildcards")] - expand_wildcards: Option, + #[serde( + rename = "expand_wildcards", + serialize_with = "crate::client::serialize_coll_qs" + )] + expand_wildcards: Option<&'b [ExpandWildcards]>, #[serde( rename = "filter_path", serialize_with = "crate::client::serialize_coll_qs" @@ -6317,7 +6320,7 @@ pub struct MlUpdateDatafeed<'a, 'b, B> { allow_no_indices: Option, body: Option, error_trace: Option, - expand_wildcards: Option, + expand_wildcards: Option<&'b [ExpandWildcards]>, filter_path: Option<&'b [&'b str]>, headers: HeaderMap, human: Option, @@ -6381,7 +6384,7 @@ where self } #[doc = "Whether source index expressions should get expanded to open or closed indices (default: open)"] - pub fn expand_wildcards(mut self, expand_wildcards: ExpandWildcards) -> Self { + pub fn expand_wildcards(mut self, expand_wildcards: &'b [ExpandWildcards]) -> Self { self.expand_wildcards = Some(expand_wildcards); self } @@ -6433,8 +6436,11 @@ where allow_no_indices: Option, #[serde(rename = "error_trace")] error_trace: Option, - #[serde(rename = "expand_wildcards")] - expand_wildcards: Option, + #[serde( + rename = "expand_wildcards", + serialize_with = "crate::client::serialize_coll_qs" + )] + expand_wildcards: Option<&'b [ExpandWildcards]>, #[serde( rename = "filter_path", serialize_with = "crate::client::serialize_coll_qs" diff --git a/elasticsearch/src/generated/root.rs b/elasticsearch/src/generated/root.rs index 9d574648..fd3dfd64 100644 --- a/elasticsearch/src/generated/root.rs +++ b/elasticsearch/src/generated/root.rs @@ -501,7 +501,7 @@ pub struct Count<'a, 'b, B> { default_operator: Option, df: Option<&'b str>, error_trace: Option, - expand_wildcards: Option, + expand_wildcards: Option<&'b [ExpandWildcards]>, filter_path: Option<&'b [&'b str]>, headers: HeaderMap, human: Option, @@ -611,7 +611,7 @@ where self } #[doc = "Whether to expand wildcard expression to concrete indices that are open, closed or both."] - pub fn expand_wildcards(mut self, expand_wildcards: ExpandWildcards) -> Self { + pub fn expand_wildcards(mut self, expand_wildcards: &'b [ExpandWildcards]) -> Self { self.expand_wildcards = Some(expand_wildcards); self } @@ -704,8 +704,11 @@ where df: Option<&'b str>, #[serde(rename = "error_trace")] error_trace: Option, - #[serde(rename = "expand_wildcards")] - expand_wildcards: Option, + #[serde( + rename = "expand_wildcards", + serialize_with = "crate::client::serialize_coll_qs" + )] + expand_wildcards: Option<&'b [ExpandWildcards]>, #[serde( rename = "filter_path", serialize_with = "crate::client::serialize_coll_qs" @@ -1281,7 +1284,7 @@ pub struct DeleteByQuery<'a, 'b, B> { default_operator: Option, df: Option<&'b str>, error_trace: Option, - expand_wildcards: Option, + expand_wildcards: Option<&'b [ExpandWildcards]>, filter_path: Option<&'b [&'b str]>, from: Option, headers: HeaderMap, @@ -1464,7 +1467,7 @@ where self } #[doc = "Whether to expand wildcard expression to concrete indices that are open, closed or both."] - pub fn expand_wildcards(mut self, expand_wildcards: ExpandWildcards) -> Self { + pub fn expand_wildcards(mut self, expand_wildcards: &'b [ExpandWildcards]) -> Self { self.expand_wildcards = Some(expand_wildcards); self } @@ -1646,8 +1649,11 @@ where df: Option<&'b str>, #[serde(rename = "error_trace")] error_trace: Option, - #[serde(rename = "expand_wildcards")] - expand_wildcards: Option, + #[serde( + rename = "expand_wildcards", + serialize_with = "crate::client::serialize_coll_qs" + )] + expand_wildcards: Option<&'b [ExpandWildcards]>, #[serde( rename = "filter_path", serialize_with = "crate::client::serialize_coll_qs" @@ -2881,7 +2887,7 @@ pub struct FieldCaps<'a, 'b, B> { allow_no_indices: Option, body: Option, error_trace: Option, - expand_wildcards: Option, + expand_wildcards: Option<&'b [ExpandWildcards]>, fields: Option<&'b [&'b str]>, filter_path: Option<&'b [&'b str]>, headers: HeaderMap, @@ -2948,7 +2954,7 @@ where self } #[doc = "Whether to expand wildcard expression to concrete indices that are open, closed or both."] - pub fn expand_wildcards(mut self, expand_wildcards: ExpandWildcards) -> Self { + pub fn expand_wildcards(mut self, expand_wildcards: &'b [ExpandWildcards]) -> Self { self.expand_wildcards = Some(expand_wildcards); self } @@ -3008,8 +3014,11 @@ where allow_no_indices: Option, #[serde(rename = "error_trace")] error_trace: Option, - #[serde(rename = "expand_wildcards")] - expand_wildcards: Option, + #[serde( + rename = "expand_wildcards", + serialize_with = "crate::client::serialize_coll_qs" + )] + expand_wildcards: Option<&'b [ExpandWildcards]>, #[serde(rename = "fields", serialize_with = "crate::client::serialize_coll_qs")] fields: Option<&'b [&'b str]>, #[serde( @@ -6142,7 +6151,7 @@ pub struct Search<'a, 'b, B> { df: Option<&'b str>, docvalue_fields: Option<&'b [&'b str]>, error_trace: Option, - expand_wildcards: Option, + expand_wildcards: Option<&'b [ExpandWildcards]>, explain: Option, filter_path: Option<&'b [&'b str]>, from: Option, @@ -6364,7 +6373,7 @@ where self } #[doc = "Whether to expand wildcard expression to concrete indices that are open, closed or both."] - pub fn expand_wildcards(mut self, expand_wildcards: ExpandWildcards) -> Self { + pub fn expand_wildcards(mut self, expand_wildcards: &'b [ExpandWildcards]) -> Self { self.expand_wildcards = Some(expand_wildcards); self } @@ -6524,8 +6533,8 @@ where self } #[doc = "Indicate if the number of documents that match the query should be tracked"] - pub fn track_total_hits(mut self, track_total_hits: TrackTotalHits) -> Self { - self.track_total_hits = Some(track_total_hits); + pub fn track_total_hits>(mut self, track_total_hits: T) -> Self { + self.track_total_hits = Some(track_total_hits.into()); self } #[doc = "Specify whether aggregation and suggester names should be prefixed by their respective types in the response"] @@ -6588,8 +6597,11 @@ where docvalue_fields: Option<&'b [&'b str]>, #[serde(rename = "error_trace")] error_trace: Option, - #[serde(rename = "expand_wildcards")] - expand_wildcards: Option, + #[serde( + rename = "expand_wildcards", + serialize_with = "crate::client::serialize_coll_qs" + )] + expand_wildcards: Option<&'b [ExpandWildcards]>, #[serde(rename = "explain")] explain: Option, #[serde( @@ -6759,7 +6771,7 @@ pub struct SearchShards<'a, 'b, B> { allow_no_indices: Option, body: Option, error_trace: Option, - expand_wildcards: Option, + expand_wildcards: Option<&'b [ExpandWildcards]>, filter_path: Option<&'b [&'b str]>, headers: HeaderMap, human: Option, @@ -6829,7 +6841,7 @@ where self } #[doc = "Whether to expand wildcard expression to concrete indices that are open, closed or both."] - pub fn expand_wildcards(mut self, expand_wildcards: ExpandWildcards) -> Self { + pub fn expand_wildcards(mut self, expand_wildcards: &'b [ExpandWildcards]) -> Self { self.expand_wildcards = Some(expand_wildcards); self } @@ -6894,8 +6906,11 @@ where allow_no_indices: Option, #[serde(rename = "error_trace")] error_trace: Option, - #[serde(rename = "expand_wildcards")] - expand_wildcards: Option, + #[serde( + rename = "expand_wildcards", + serialize_with = "crate::client::serialize_coll_qs" + )] + expand_wildcards: Option<&'b [ExpandWildcards]>, #[serde( rename = "filter_path", serialize_with = "crate::client::serialize_coll_qs" @@ -6990,7 +7005,7 @@ pub struct SearchTemplate<'a, 'b, B> { body: Option, ccs_minimize_roundtrips: Option, error_trace: Option, - expand_wildcards: Option, + expand_wildcards: Option<&'b [ExpandWildcards]>, explain: Option, filter_path: Option<&'b [&'b str]>, headers: HeaderMap, @@ -7085,7 +7100,7 @@ where self } #[doc = "Whether to expand wildcard expression to concrete indices that are open, closed or both."] - pub fn expand_wildcards(mut self, expand_wildcards: ExpandWildcards) -> Self { + pub fn expand_wildcards(mut self, expand_wildcards: &'b [ExpandWildcards]) -> Self { self.expand_wildcards = Some(expand_wildcards); self } @@ -7182,8 +7197,11 @@ where ccs_minimize_roundtrips: Option, #[serde(rename = "error_trace")] error_trace: Option, - #[serde(rename = "expand_wildcards")] - expand_wildcards: Option, + #[serde( + rename = "expand_wildcards", + serialize_with = "crate::client::serialize_coll_qs" + )] + expand_wildcards: Option<&'b [ExpandWildcards]>, #[serde(rename = "explain")] explain: Option, #[serde( @@ -7907,7 +7925,7 @@ pub struct UpdateByQuery<'a, 'b, B> { default_operator: Option, df: Option<&'b str>, error_trace: Option, - expand_wildcards: Option, + expand_wildcards: Option<&'b [ExpandWildcards]>, filter_path: Option<&'b [&'b str]>, from: Option, headers: HeaderMap, @@ -8096,7 +8114,7 @@ where self } #[doc = "Whether to expand wildcard expression to concrete indices that are open, closed or both."] - pub fn expand_wildcards(mut self, expand_wildcards: ExpandWildcards) -> Self { + pub fn expand_wildcards(mut self, expand_wildcards: &'b [ExpandWildcards]) -> Self { self.expand_wildcards = Some(expand_wildcards); self } @@ -8288,8 +8306,11 @@ where df: Option<&'b str>, #[serde(rename = "error_trace")] error_trace: Option, - #[serde(rename = "expand_wildcards")] - expand_wildcards: Option, + #[serde( + rename = "expand_wildcards", + serialize_with = "crate::client::serialize_coll_qs" + )] + expand_wildcards: Option<&'b [ExpandWildcards]>, #[serde( rename = "filter_path", serialize_with = "crate::client::serialize_coll_qs" diff --git a/elasticsearch/src/http/transport.rs b/elasticsearch/src/http/transport.rs index 4a8a376b..c2fb43a0 100644 --- a/elasticsearch/src/http/transport.rs +++ b/elasticsearch/src/http/transport.rs @@ -340,6 +340,30 @@ impl Transport { let reqwest_method = self.method(method); let mut request_builder = self.client.request(reqwest_method, url); + // set credentials before any headers, as credentials append to existing headers in reqwest, + // whilst setting headers() overwrites, so if an Authorization header has been specified + // on a specific request, we want it to overwrite. + if let Some(c) = &self.credentials { + request_builder = match c { + Credentials::Basic(u, p) => request_builder.basic_auth(u, Some(p)), + Credentials::Bearer(t) => request_builder.bearer_auth(t), + #[cfg(any(feature = "native-tls", feature = "rustls-tls"))] + Credentials::Certificate(_) => request_builder, + Credentials::ApiKey(i, k) => { + let mut header_value = b"ApiKey ".to_vec(); + { + let mut encoder = Base64Encoder::new(&mut header_value, base64::STANDARD); + write!(encoder, "{}:", i).unwrap(); + write!(encoder, "{}", k).unwrap(); + } + request_builder.header( + AUTHORIZATION, + HeaderValue::from_bytes(&header_value).unwrap(), + ) + } + } + } + // default headers first, overwrite with any provided let mut request_headers = HeaderMap::with_capacity(3 + headers.len()); request_headers.insert(CONTENT_TYPE, HeaderValue::from_static(DEFAULT_CONTENT_TYPE)); @@ -367,27 +391,6 @@ impl Transport { request_builder = request_builder.query(q); } - if let Some(c) = &self.credentials { - request_builder = match c { - Credentials::Basic(u, p) => request_builder.basic_auth(u, Some(p)), - Credentials::Bearer(t) => request_builder.bearer_auth(t), - #[cfg(any(feature = "native-tls", feature = "rustls-tls"))] - Credentials::Certificate(_) => request_builder, - Credentials::ApiKey(i, k) => { - let mut header_value = b"ApiKey ".to_vec(); - { - let mut encoder = Base64Encoder::new(&mut header_value, base64::STANDARD); - write!(encoder, "{}:", i).unwrap(); - write!(encoder, "{}", k).unwrap(); - } - request_builder.header( - AUTHORIZATION, - HeaderValue::from_bytes(&header_value).unwrap(), - ) - } - } - } - let response = request_builder.send().await; match response { Ok(r) => Ok(Response::new(r, method)), diff --git a/elasticsearch/src/params/mod.rs b/elasticsearch/src/params/mod.rs index b52293a0..9d702138 100644 --- a/elasticsearch/src/params/mod.rs +++ b/elasticsearch/src/params/mod.rs @@ -39,6 +39,18 @@ pub enum TrackTotalHits { Count(i64), } +impl From for TrackTotalHits { + fn from(b: bool) -> Self { + TrackTotalHits::Track(b) + } +} + +impl From for TrackTotalHits { + fn from(i: i64) -> Self { + TrackTotalHits::Count(i) + } +} + /// Control how the `_source` field is returned with every hit. /// /// By default operations return the contents of the `_source` field diff --git a/elasticsearch/tests/common/mod.rs b/elasticsearch/tests/common/mod.rs index ee878269..bd2e4292 100644 --- a/elasticsearch/tests/common/mod.rs +++ b/elasticsearch/tests/common/mod.rs @@ -16,10 +16,6 @@ * specific language governing permissions and limitations * under the License. */ -// From reqwest crate -// Licensed under Apache License, Version 2.0 -// https://github.com/seanmonstar/reqwest/blob/master/LICENSE-APACHE - pub mod client; pub mod server; diff --git a/yaml_test_runner/Cargo.toml b/yaml_test_runner/Cargo.toml new file mode 100644 index 00000000..cccf9be1 --- /dev/null +++ b/yaml_test_runner/Cargo.toml @@ -0,0 +1,36 @@ +[package] +name = "yaml_test_runner" +version = "7.7.0-alpha.1" +edition = "2018" +authors = ["Elastic and Contributors"] +description = "Generates and runs tests from Elasticsearch's YAML test specs" +repository = "https://github.com/elastic/elasticsearch-rs" + +[dependencies] +elasticsearch = { path = "../elasticsearch" } +api_generator = { path = "../api_generator" } + +base64 = "^0.11" +clap = "~2" +failure = "0.1.6" +itertools = "0.8.2" +Inflector = "0.11.4" +lazy_static = "1.4.0" +log = "0.4.8" +once_cell = "1.4.0" +path-slash = "0.1.1" +quote = "~0.3" +regex = "1.3.1" +reqwest = "~0.9" +semver = "0.9.0" +serde = "~1" +serde_yaml = "0.8.11" +serde_json = { version = "~1", features = ["arbitrary_precision"] } +simple_logger = "1.6.0" +syn = { version = "~0.11", features = ["full"] } +sysinfo = "0.9.6" +url = "2.1.1" +yaml-rust = "0.4.3" + +[dev-dependencies] +tokio = { version = "0.2.0", default-features = false, features = ["macros", "tcp", "time"] } diff --git a/yaml_test_runner/skip.yml b/yaml_test_runner/skip.yml new file mode 100644 index 00000000..fe67b5c2 --- /dev/null +++ b/yaml_test_runner/skip.yml @@ -0,0 +1,54 @@ +# Skip file of features and tests to skip. +# This is used at compilation time, when compiling tests from the YAML tests, to not generate tests that match +# on name or features defined below that should be skipped. Accordingly, changing values in this file requires +# recompiling tests in order for the changes to take effect. + +# features not yet implemented +features: + - node_selector + - stash_path_replace + - embedded_stash_key + +# tests to skip generating and compiling a test for +tests: + xpack/ssl/10_basic.yml: + # this test returns the CA cert before the cert, so always fails + - "Test get SSL certificates" + + xpack/transform/transforms_stats_continuous.yml: + # this test always returns "exponential_avg_checkpoint_duration_ms": 0.0 . seems like it might be missing data in + # the setup, fires quicker than any documents are processed, or the delay of 1m is too high? + - "Test get continuous transform stats" + + xpack/ml/jobs_get_result_overall_buckets.yml: + # this test always returns 3 buckets where 1 is expected + - "Test overall buckets given overall_score filter" + + xpack/snapshot/10_basic.yml: + # this test fails because it can't access snapshot to restore it + - "Create a source only snapshot and then restore it" + + oss/cat.aliases/10_basic.yml: + # this test fails as the regex needs a \n before the ending $ + - "Multiple alias names" + + oss/cat.indices/10_basic.yml: + # this test fails as the regex needs a \n before the ending $ + - "Test cat indices using health status" + + oss/indices.shard_stores/10_basic.yml: + # uses number as a key into object. serde_json::Value expects a string key + - "basic index test" + - "multiple indices test" + + oss/indices.flush/10_basic.yml: + # uses number as a key into object. serde_json::Value expects a string key + - "Index synced flush rest test" + + oss/indices.segments/10_basic.yml: + # uses number as a key into object. serde_json::Value expects a string key + - "basic segments test" + + oss/indices.stats/12_level.yml: + # uses number as a key into object. serde_json::Value expects a string key + - "Level - shards" \ No newline at end of file diff --git a/yaml_test_runner/src/generator.rs b/yaml_test_runner/src/generator.rs new file mode 100644 index 00000000..8e0a0c4d --- /dev/null +++ b/yaml_test_runner/src/generator.rs @@ -0,0 +1,605 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +use crate::step::*; +use api_generator::generator::Api; +use inflector::Inflector; +use path_slash::PathExt; +use quote::{ToTokens, Tokens}; +use regex::Regex; +use semver::Version; +use serde::Deserialize; +use std::borrow::Borrow; +use std::collections::{BTreeMap, HashSet}; +use std::fs; +use std::fs::{File, OpenOptions}; +use std::io::Write; +use std::path::{Component, Path, PathBuf}; +use yaml_rust::{Yaml, YamlLoader}; + +/// The test suite to compile +#[derive(Debug, PartialEq)] +pub enum TestSuite { + Oss, + XPack, +} + +/// The components of a test file, constructed from a yaml file +struct YamlTests<'a> { + path: String, + version: &'a Version, + skip: &'a GlobalSkip, + suite: TestSuite, + directives: HashSet, + setup: Option, + teardown: Option, + tests: Vec, +} + +impl<'a> YamlTests<'a> { + pub fn new( + path: &'a Path, + version: &'a semver::Version, + skip: &'a GlobalSkip, + suite: TestSuite, + len: usize, + ) -> Self { + let path = path.to_slash_lossy(); + Self { + path, + version, + skip, + suite, + directives: HashSet::with_capacity(len), + setup: None, + teardown: None, + tests: Vec::with_capacity(len), + } + } + + /// Collects the use directives required for all steps and tests + fn use_directives_from_steps(steps: &[Step]) -> Vec { + steps + .iter() + .filter_map(Step::r#do) + .filter_map(|d| d.namespace()) + .map(|s| s.to_string()) + .collect() + } + + /// Adds a specific setup function + pub fn add_setup(&mut self, setup: TestFn) -> &mut Self { + let directives = Self::use_directives_from_steps(&setup.steps); + for directive in directives { + self.directives.insert(directive); + } + + self.setup = Some(setup); + self + } + + /// Adds a specific teardown function + pub fn add_teardown(&mut self, teardown: TestFn) -> &mut Self { + let directives = Self::use_directives_from_steps(&teardown.steps); + for directive in directives { + self.directives.insert(directive); + } + + self.teardown = Some(teardown); + self + } + + /// Adds a test to the collection of tests + pub fn add_test_fn(&mut self, test_fn: TestFn) -> &mut Self { + let directives = Self::use_directives_from_steps(&test_fn.steps); + for directive in directives { + self.directives.insert(directive); + } + + self.tests.push(test_fn); + self + } + + /// Generates the AST for the Yaml test file + pub fn build(self) -> Tokens { + let (setup_fn, setup_call) = Self::generate_fixture(&self.setup); + let (teardown_fn, teardown_call) = Self::generate_fixture(&self.teardown); + let general_setup_call = match self.suite { + TestSuite::Oss => quote!(client::general_oss_setup().await?;), + TestSuite::XPack => quote!(client::general_xpack_setup().await?;), + }; + + let tests = self.fn_impls(general_setup_call, setup_call, teardown_call); + + let directives: Vec = self + .directives + .iter() + .map(|n| { + let ident = syn::Ident::from(n.as_str()); + quote!(use elasticsearch::#ident::*;) + }) + .collect(); + + quote! { + #![allow(unused_imports, unused_variables, dead_code)] + use crate::common::{client, macros, transform}; + use elasticsearch::*; + use elasticsearch::http::{ + headers::{HeaderName, HeaderValue}, + request::JsonBody, + Method, + }; + use elasticsearch::params::*; + #(#directives)* + use ::regex; + use serde_json::{json, Value}; + + #setup_fn + #teardown_fn + #(#tests)* + } + } + + /// Whether to emit code to read the last response, as text and optionally json + pub fn read_response(read_response: bool, tokens: &mut Tokens) -> bool { + if !read_response { + tokens.append(quote! { + let (method, status_code, text, json) = client::read_response(response).await?; + }); + } + + true + } + + /// Whether the test should be skipped + fn skip_test(&self, name: &str) -> bool { + if self.skip.tests.contains_key(self.path.as_str()) { + let tests = self.skip.tests.get(self.path.as_str()); + + return match tests { + Some(t) => t.contains(name.to_string().borrow()), + None => true, + }; + } + + false + } + + fn fn_impls( + &self, + general_setup_call: Tokens, + setup_call: Option, + teardown_call: Option, + ) -> Vec> { + let mut seen_names = HashSet::new(); + + self.tests + .iter() + .map(|test_fn| { + let name = test_fn.name(); + let unique_name = test_fn.unique_name(&mut seen_names); + if self.skip_test(name) { + info!( + r#"skipping "{}" in {} because it's included in skip.yml"#, + name, + self.path, + ); + return None; + } + + let fn_name = syn::Ident::from(unique_name.as_str()); + let mut body = Tokens::new(); + let mut skip : Option = None; + let mut read_response = false; + + for step in &test_fn.steps { + match step { + Step::Skip(s) => { + skip = if s.skip_version(self.version) { + let m = format!( + r#"skipping "{}" in {} because version "{}" is met. {}"#, + name, + &self.path, + s.version(), + s.reason() + ); + Some(m) + } else if s.skip_features(&self.skip.features) { + let m = format!( + r#"skipping "{}" in {} because it needs features "{:?}" which are currently not implemented"#, + name, + &self.path, + s.features() + ); + Some(m) + } else { + None + } + } + Step::Do(d) => { + read_response = d.to_tokens(false, &mut body); + } + Step::Match(m) => { + read_response = Self::read_response(read_response,&mut body); + m.to_tokens(&mut body); + } + Step::Set(s) => { + read_response = Self::read_response(read_response, &mut body); + s.to_tokens(&mut body); + } + Step::Length(l) => { + read_response = Self::read_response(read_response,&mut body); + l.to_tokens(&mut body); + }, + Step::IsTrue(t) => { + read_response = Self::read_response(read_response,&mut body); + t.to_tokens(&mut body); + }, + Step::IsFalse(f) => { + read_response = Self::read_response(read_response, &mut body); + f.to_tokens(&mut body); + }, + Step::Comparison(c) => { + read_response = Self::read_response(read_response,&mut body); + c.to_tokens(&mut body); + }, + Step::Contains(c) => { + read_response = Self::read_response(read_response,&mut body); + c.to_tokens(&mut body); + }, + Step::TransformAndSet(t) => { + read_response = Self::read_response(read_response,&mut body); + t.to_tokens(&mut body); + } + } + } + + match skip { + Some(s) => { + info!("{}", s); + None + }, + None => Some(quote! { + #[tokio::test] + async fn #fn_name() -> Result<(), failure::Error> { + let client = client::get(); + #general_setup_call + #setup_call + #body + #teardown_call + Ok(()) + } + }), + } + }) + .collect() + } + + /// Generates the AST for the fixture fn and its invocation + fn generate_fixture(test_fn: &Option) -> (Option, Option) { + if let Some(t) = test_fn { + let ident = syn::Ident::from(t.name.as_str()); + + // TODO: collect up the do calls for now. We do also need to handle skip, etc. + let tokens = t + .steps + .iter() + .filter_map(Step::r#do) + .map(|d| { + let mut tokens = Tokens::new(); + ToTokens::to_tokens(d, &mut tokens); + tokens + }) + .collect::>(); + + ( + Some(quote! { + async fn #ident(client: &Elasticsearch) -> Result<(), failure::Error> { + #(#tokens)* + Ok(()) + } + }), + Some(quote! { #ident(&client).await?; }), + ) + } else { + (None, None) + } + } +} + +/// A test function +struct TestFn { + name: String, + steps: Vec, +} + +impl TestFn { + pub fn new>(name: S, steps: Vec) -> Self { + Self { + name: name.into(), + steps, + } + } + + /// The function name as declared in yaml + pub fn name(&self) -> &str { + self.name.as_str() + } + + /// some function descriptions are the same in YAML tests, which would result in + /// duplicate generated test function names. Deduplicate by appending incrementing number + pub fn unique_name(&self, seen_names: &mut HashSet) -> String { + let mut fn_name = self.name.replace(" ", "_").to_lowercase().to_snake_case(); + while !seen_names.insert(fn_name.clone()) { + lazy_static! { + static ref ENDING_DIGITS_REGEX: Regex = Regex::new(r"^(.*?)_(\d*?)$").unwrap(); + } + if let Some(c) = ENDING_DIGITS_REGEX.captures(&fn_name) { + let name = c.get(1).unwrap().as_str(); + let n = c.get(2).unwrap().as_str().parse::().unwrap(); + fn_name = format!("{}_{}", name, n + 1); + } else { + fn_name.push_str("_2"); + } + } + fn_name + } +} + +/// Items to globally skip +#[derive(Deserialize)] +struct GlobalSkip { + features: Vec, + tests: BTreeMap>, +} + +pub fn generate_tests_from_yaml( + api: &Api, + suite: &TestSuite, + version: &semver::Version, + base_download_dir: &PathBuf, + download_dir: &PathBuf, + generated_dir: &PathBuf, +) -> Result<(), failure::Error> { + let skips = serde_yaml::from_str::(include_str!("./../skip.yml"))?; + let paths = fs::read_dir(download_dir)?; + for entry in paths { + if let Ok(entry) = entry { + if let Ok(file_type) = entry.file_type() { + if file_type.is_dir() { + generate_tests_from_yaml( + api, + suite, + version, + base_download_dir, + &entry.path(), + generated_dir, + )?; + } else if file_type.is_file() { + let path = entry.path(); + // skip non-yaml files + let extension = path.extension().unwrap_or_else(|| "".as_ref()); + if extension != "yml" && extension != "yaml" { + continue; + } + + let relative_path = path.strip_prefix(&base_download_dir)?; + let test_suite = { + let components = relative_path.components(); + let mut top_dir = "".to_string(); + for c in components { + if c != Component::RootDir { + top_dir = c.as_os_str().to_string_lossy().into_owned(); + break; + } + } + + match top_dir.as_str() { + "oss" => TestSuite::Oss, + "xpack" => TestSuite::XPack, + _ => panic!("Unknown test suite"), + } + }; + + if &test_suite != suite { + info!( + "skipping {}. compiling tests for {:?}", + relative_path.to_slash_lossy(), + suite + ); + continue; + } + + let yaml = fs::read_to_string(&entry.path()).unwrap(); + + // a yaml test can contain multiple yaml docs, so use yaml_rust to parse + let result = YamlLoader::load_from_str(&yaml); + if result.is_err() { + error!( + "skipping {}. cannot read as Yaml struct: {}", + relative_path.to_slash_lossy(), + result.err().unwrap().to_string() + ); + continue; + } + + let docs = result.unwrap(); + let mut test = + YamlTests::new(relative_path, version, &skips, test_suite, docs.len()); + + let results : Vec> = docs + .iter() + .map(|doc| { + let hash = doc + .as_hash() + .ok_or_else(|| failure::err_msg(format!( + "expected hash but found {:?}", + &doc + )))?; + + let (key, value) = hash.iter().next().unwrap(); + match (key, value) { + (Yaml::String(name), Yaml::Array(steps)) => { + let steps = parse_steps(api, steps)?; + let test_fn = TestFn::new(name, steps); + match name.as_str() { + "setup" => test.add_setup(test_fn), + "teardown" => test.add_teardown(test_fn), + _ => test.add_test_fn(test_fn), + }; + Ok(()) + } + (k, v) => { + Err(failure::err_msg(format!( + "expected string key and array value in {:?}, but found {:?} and {:?}", + relative_path, + &k, + &v, + ))) + } + } + }) + .collect(); + + //if there has been an Err in any step of the yaml test file, don't create a test for it + match ok_or_accumulate(&results) { + Ok(_) => write_test_file(test, relative_path, generated_dir)?, + Err(e) => { + info!("skipping {} because {}", relative_path.to_slash_lossy(), e) + } + } + } + } + } + } + + write_mod_files(&generated_dir)?; + + Ok(()) +} + +/// Writes a mod.rs file in each generated directory +fn write_mod_files(generated_dir: &PathBuf) -> Result<(), failure::Error> { + if !generated_dir.exists() { + fs::create_dir(generated_dir)?; + } + + let paths = fs::read_dir(generated_dir)?; + let mut mods = vec![]; + for path in paths { + if let Ok(entry) = path { + let file_type = entry.file_type().unwrap(); + let path = entry.path(); + let name = path.file_stem().unwrap().to_string_lossy(); + + let is_tests_common_dir = + name.as_ref() == "common" && path.parent().unwrap().file_name().unwrap() == "tests"; + + if name.as_ref() != "mod" { + if is_tests_common_dir { + mods.push("#[macro_use]".to_string()); + } + + mods.push(format!( + "pub mod {};", + path.file_stem().unwrap().to_string_lossy() + )); + } + + if file_type.is_dir() && !is_tests_common_dir { + write_mod_files(&entry.path())?; + } + } + } + + let mut path = generated_dir.clone(); + path.push("mod.rs"); + let mut file = File::create(&path)?; + let generated_mods: String = mods.join("\n"); + file.write_all(generated_mods.as_bytes())?; + Ok(()) +} + +fn test_file_path(relative_path: &Path) -> Result { + let mut relative = relative_path.to_path_buf(); + relative.set_extension(""); + // directories and files will form the module names so ensure they're valid module names + let clean: String = relative + .to_string_lossy() + .replace(".", "_") + .replace("-", "_"); + + relative = PathBuf::from(clean); + + let file_name = relative.file_name().unwrap().to_string_lossy().into_owned(); + // modules can't start with a number so prefix with underscore + if file_name.starts_with(char::is_numeric) { + relative.set_file_name(format!("_{}", file_name)); + } + + Ok(relative) +} + +fn write_test_file( + test: YamlTests, + relative_path: &Path, + generated_dir: &PathBuf, +) -> Result<(), failure::Error> { + let mut path = test_file_path(relative_path)?; + path = generated_dir.join(path); + path.set_extension("rs"); + + fs::create_dir_all(&path.parent().unwrap())?; + let mut file = File::create(&path)?; + file.write_all( + r#"/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +// ----------------------------------------------- +// This file is generated, please do not edit it manually. +// Run the following in the root of the repo: +// +// cargo run -p yaml_test_runner -- --branch --token --path +// ----------------------------------------------- +"# + .as_bytes(), + )?; + + let tokens = test.build(); + let generated = api_generator::generator::rust_fmt(tokens.to_string())?; + let mut file = OpenOptions::new().append(true).open(&path)?; + file.write_all(generated.as_bytes())?; + file.write_all(b"\n")?; + + Ok(()) +} diff --git a/yaml_test_runner/src/github.rs b/yaml_test_runner/src/github.rs new file mode 100644 index 00000000..aee2f6c1 --- /dev/null +++ b/yaml_test_runner/src/github.rs @@ -0,0 +1,229 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +use io::Write; +use reqwest::header::{HeaderMap, HeaderValue, AUTHORIZATION}; +use serde::Deserialize; +use std::error::Error as StdError; +use std::fmt::Formatter; +use std::fs::File; +use std::path::PathBuf; +use std::{fs, io}; + +struct YamlTestSuite { + dir: String, + branch: String, + url: String, +} + +#[derive(Deserialize, Debug)] +struct Links { + #[serde(rename = "self")] + self_link: String, + git: String, + html: String, +} + +#[derive(Deserialize, Debug)] +struct GitHubContent { + name: String, + path: String, + sha: String, + size: i32, + url: String, + html_url: String, + git_url: String, + download_url: Option, + #[serde(rename = "type")] + ty: String, + #[serde(rename = "_links")] + links: Links, +} + +/// Downloads the yaml tests if not already downloaded +pub fn download_test_suites( + token: &str, + branch: &str, + download_dir: &PathBuf, +) -> Result<(), failure::Error> { + let mut last_downloaded_version = download_dir.clone(); + last_downloaded_version.push("last_downloaded_version"); + if last_downloaded_version.exists() { + let version = fs::read_to_string(&last_downloaded_version) + .expect("Unable to read last_downloaded_version of yaml tests"); + if version == branch { + info!("yaml tests for branch {} already downloaded", branch); + return Ok(()); + } + } + + let test_suite_map = [ + ("oss".to_string(), "https://api.github.com/repos/elastic/elasticsearch/contents/rest-api-spec/src/main/resources/rest-api-spec/test".to_string()), + ("xpack".to_string(), "https://api.github.com/repos/elastic/elasticsearch/contents/x-pack/plugin/src/test/resources/rest-api-spec/test".to_string())]; + + let test_suites: Vec = test_suite_map + .iter() + .map(|(dir, template_url)| { + let url = format!("{}?ref={}", template_url, branch); + YamlTestSuite { + dir: dir.to_string(), + branch: branch.to_string(), + url, + } + }) + .collect(); + + let mut headers = HeaderMap::new(); + let token_value = format!("token {}", token); + headers.append(AUTHORIZATION, HeaderValue::from_str(&token_value)?); + let client = reqwest::ClientBuilder::new() + .default_headers(headers) + .build() + .unwrap(); + + // delete existing yaml tests + if download_dir.exists() { + fs::remove_dir_all(&download_dir)?; + } + + fs::create_dir_all(download_dir)?; + + for suite in test_suites { + download_tests(&client, &suite, &download_dir)?; + } + + File::create(last_downloaded_version) + .expect("failed to create last_downloaded_version file") + .write_all(branch.as_bytes()) + .expect("unable to write branch to last_downloaded_version file"); + + Ok(()) +} + +fn download_tests( + client: &reqwest::Client, + suite: &YamlTestSuite, + download_dir: &PathBuf, +) -> Result<(), DownloadError> { + let suite_dir = { + let mut d = download_dir.clone(); + d.push(&suite.dir); + d + }; + + fs::create_dir_all(&suite_dir)?; + info!("Downloading {} tests from {}", &suite.dir, &suite.branch); + download(client, &suite.url, &suite_dir)?; + info!( + "Done downloading {} tests from {}", + &suite.dir, &suite.branch + ); + + Ok(()) +} + +fn download( + client: &reqwest::Client, + url: &str, + download_dir: &PathBuf, +) -> Result<(), DownloadError> { + let mut response = client.get(url).send()?; + + let remaining_rate_limit: i32 = response + .headers() + .get("X-RateLimit-Remaining") + .unwrap() + .to_str() + .unwrap() + .parse() + .unwrap(); + + if remaining_rate_limit < 10 { + warn!("Remaining rate limit: {}", remaining_rate_limit); + } + + let contents: Vec = response.json()?; + for content in contents { + let content_path = { + let mut d = download_dir.clone(); + d.push(&content.name); + d + }; + + match content.ty.as_str() { + "file" => { + let mut file = File::create(content_path)?; + // no need to send the token for downloading content + let mut file_response = reqwest::get(&content.download_url.unwrap())?; + io::copy(&mut file_response, &mut file)?; + } + "dir" => { + fs::create_dir_all(&content_path)?; + download(client, &content.url, &content_path)?; + } + t => { + return Err(DownloadError::InvalidType(format!( + "Unexpected GitHub content type: {}", + t + ))) + } + } + } + + Ok(()) +} + +#[derive(Debug)] +pub enum DownloadError { + IoErr(io::Error), + HttpError(reqwest::Error), + InvalidType(String), +} + +impl std::fmt::Display for DownloadError { + fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> { + match self { + DownloadError::IoErr(err) => write!(f, "IoErr {}", err), + DownloadError::HttpError(err) => write!(f, "HttpError {}", err), + DownloadError::InvalidType(s) => write!(f, "InvalidType {}", s), + } + } +} + +impl StdError for DownloadError { + #[allow(warnings)] + fn description(&self) -> &str { + match self { + DownloadError::IoErr(err) => err.description(), + DownloadError::HttpError(err) => err.description(), + DownloadError::InvalidType(s) => s.as_ref(), + } + } +} + +impl From for DownloadError { + fn from(e: io::Error) -> Self { + DownloadError::IoErr(e) + } +} + +impl From for DownloadError { + fn from(e: reqwest::Error) -> Self { + DownloadError::HttpError(e) + } +} diff --git a/yaml_test_runner/src/main.rs b/yaml_test_runner/src/main.rs new file mode 100644 index 00000000..837853a1 --- /dev/null +++ b/yaml_test_runner/src/main.rs @@ -0,0 +1,162 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +extern crate api_generator; +#[macro_use] +extern crate lazy_static; +#[macro_use] +extern crate log; +#[macro_use] +extern crate quote; + +extern crate simple_logger; + +use clap::{App, Arg}; +use log::Level; +use std::fs; +use std::path::PathBuf; +use std::process::exit; + +mod generator; +mod github; +mod regex; +mod step; + +use generator::TestSuite; + +fn main() -> Result<(), failure::Error> { + simple_logger::init_with_level(Level::Info).unwrap(); + + let matches = App::new(env!("CARGO_PKG_NAME")) + .about(env!("CARGO_PKG_DESCRIPTION")) + .arg(Arg::with_name("branch") + .short("b") + .long("branch") + .value_name("BRANCH") + .help("The git branch in the Elasticsearch repository from which to download yaml tests") + .required(true) + .default_value("master") + .takes_value(true)) + .arg(Arg::with_name("token") + .short("t") + .long("token") + .value_name("TOKEN") + .help("The GitHub access token. Required to increase the rate limit to be able to download all yaml tests") + .required(true) + .takes_value(true)) + .arg(Arg::with_name("path") + .short("p") + .long("path") + .value_name("PATH") + .help("The path to the rest API specs. Required to build a representation of the client API.") + .required(true) + .takes_value(true)) + .get_matches(); + + // Get the version from ELASTICSEARCH_VERSION environment variable, if set. + // any prerelease part needs to be trimmed because the semver crate only allows + // a version with a prerelease to match against predicates, if at least one predicate + // has a prerelease. See + // https://github.com/steveklabnik/semver/blob/afa5fc853cb4d6d2b1329579e5528f86f3b550f9/src/version_req.rs#L319-L331 + let (suite, version) = match std::env::var("ELASTICSEARCH_VERSION") { + Ok(v) => { + let suite = if v.contains("oss") { + TestSuite::Oss + } else { + TestSuite::XPack + }; + + let v = v + .split(':') + .next_back() + .unwrap() + .trim_end_matches(|c: char| c.is_alphabetic() || c == '-'); + + (suite, semver::Version::parse(v)?) + } + Err(_) => { + error!("ELASTICSEARCH_VERSION environment variable must be set to compile tests"); + exit(1); + } + }; + + info!("Using version {:?} to compile tests", &version); + + let branch = matches + .value_of("branch") + .expect("missing 'branch' argument"); + let token = matches.value_of("token").expect("missing 'token' argument"); + let path = matches.value_of("path").expect("missing 'path' argument"); + let rest_specs_dir = PathBuf::from(path); + let download_dir = PathBuf::from(format!("./{}/yaml", env!("CARGO_PKG_NAME"))); + let generated_dir = PathBuf::from(format!("./{}/tests", env!("CARGO_PKG_NAME"))); + + github::download_test_suites(token, branch, &download_dir)?; + + let mut last_downloaded_rest_spec_branch = rest_specs_dir.clone(); + last_downloaded_rest_spec_branch.push("last_downloaded_version"); + + let mut download_rest_specs = true; + if last_downloaded_rest_spec_branch.exists() { + let version = fs::read_to_string(last_downloaded_rest_spec_branch) + .expect("Could not read rest specs last_downloaded version into string"); + + if version == branch { + info!( + "rest specs for branch {} already downloaded in {:?}", + branch, &rest_specs_dir + ); + download_rest_specs = false; + } + } + + if download_rest_specs { + api_generator::rest_spec::download_specs(branch, &rest_specs_dir)?; + } + + let api = api_generator::generator::read_api(branch, &rest_specs_dir)?; + + // delete everything under the generated_dir except common dir + if generated_dir.exists() { + let entries = fs::read_dir(&generated_dir)?; + for entry in entries { + if let Ok(e) = entry { + if let Ok(f) = e.file_type() { + if e.file_name() != "common" { + if f.is_dir() { + fs::remove_dir_all(e.path())?; + } else if f.is_file() { + fs::remove_file(e.path())?; + } + } + } + } + } + } + + generator::generate_tests_from_yaml( + &api, + &suite, + &version, + &download_dir, + &download_dir, + &generated_dir, + )?; + + Ok(()) +} diff --git a/yaml_test_runner/src/regex.rs b/yaml_test_runner/src/regex.rs new file mode 100644 index 00000000..cdb538b1 --- /dev/null +++ b/yaml_test_runner/src/regex.rs @@ -0,0 +1,76 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +use ::regex::{Captures, Regex}; + +lazy_static! { + // replace usages of "$.*" with the captured value + pub static ref SET_REGEX: Regex = + Regex::new(r#""\$(.*?)""#).unwrap(); + + // replace usages of "${.*}" with the captured value + pub static ref SET_QUOTED_DELIMITED_REGEX: Regex = + Regex::new(r#""\$\{(.*?)\}""#).unwrap(); + + // replace usages of ${.*} with the captured value + pub static ref SET_DELIMITED_REGEX: Regex = + Regex::new(r#"\$\{(.*?)\}"#).unwrap(); + + // include i64 suffix on whole numbers larger than i32 + // will match on numbers with 10 or more digits, with the replace + // call testing against i32::max_value + pub static ref INT_REGEX: Regex = + regex::Regex::new(r"([,:\[{]\s*)(\d{10,}?)(\s*[,}\]])").unwrap(); +} + +/// cleans up a regex as specified in YAML to one that will work with the regex crate. +pub fn clean_regex>(s: S) -> String { + s.as_ref() + .trim() + .trim_matches('/') + .replace("\\/", "/") + .replace("\\:", ":") + .replace("\\#", "#") + .replace("\\%", "%") + .replace("\\'", "'") + .replace("\\`", "`") +} + +/// Replaces a "set" step value with a variable +pub fn replace_set>(s: S) -> String { + let mut s = SET_QUOTED_DELIMITED_REGEX + .replace_all(s.as_ref(), "$1") + .into_owned(); + + s = SET_DELIMITED_REGEX + .replace_all(s.as_ref(), "$1") + .into_owned(); + + SET_REGEX.replace_all(s.as_ref(), "$1").into_owned() +} + +/// Replaces all integers in a string to suffix with i64, to ensure that numbers +/// larger than i32 will be handled correctly when passed to json! macro +pub fn replace_i64>(s: S) -> String { + INT_REGEX + .replace_all(s.as_ref(), |c: &Captures| match &c[2].parse::() { + Ok(i) if *i > i32::max_value() as i64 => format!("{}{}i64{}", &c[1], &c[2], &c[3]), + _ => c[0].to_string(), + }) + .into_owned() +} diff --git a/yaml_test_runner/src/step/comparison.rs b/yaml_test_runner/src/step/comparison.rs new file mode 100644 index 00000000..78ecc912 --- /dev/null +++ b/yaml_test_runner/src/step/comparison.rs @@ -0,0 +1,101 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +use super::Step; +use crate::step::Expr; +use quote::{ToTokens, Tokens}; +use yaml_rust::Yaml; + +pub const OPERATORS: [&str; 4] = ["lt", "lte", "gt", "gte"]; + +pub struct Comparison { + pub(crate) expr: Expr, + value: Yaml, + op: String, +} + +impl From for Step { + fn from(comparison: Comparison) -> Self { + Step::Comparison(comparison) + } +} + +impl Comparison { + pub fn try_parse(yaml: &Yaml, op: &str) -> Result { + let hash = yaml + .as_hash() + .ok_or_else(|| failure::err_msg(format!("expected hash but found {:?}", yaml)))?; + + let (k, v) = hash.iter().next().unwrap(); + let expr = k + .as_str() + .ok_or_else(|| failure::err_msg(format!("expected string key but found {:?}", k)))?; + + Ok(Comparison { + expr: expr.into(), + value: v.clone(), + op: op.into(), + }) + } + + fn assert(&self, t: T, expr: &str, op: &str, tokens: &mut Tokens) { + let ident = syn::Ident::from(expr); + let op_ident = syn::Ident::from(op); + tokens.append(quote! { + assert_comparison!(&json#ident, #op_ident #t); + }); + } +} + +impl ToTokens for Comparison { + fn to_tokens(&self, tokens: &mut Tokens) { + let expr = self.expr.expression(); + let op = match self.op.as_str() { + "lte" => "<=", + "lt" => "<", + "gt" => ">", + "gte" => ">=", + n => panic!("unsupported op {}", n), + }; + + match self.value.as_i64() { + Some(i) => self.assert(i, &expr, op, tokens), + None => match self.value.as_f64() { + Some(f) => self.assert(f, &expr, op, tokens), + None => { + match self.value.as_str() { + // handle "set" values + Some(s) if s.starts_with('$') => { + let s = s + .trim_start_matches('$') + .trim_start_matches('{') + .trim_end_matches('}'); + let expr_ident = syn::Ident::from(expr.as_str()); + let ident = syn::Ident::from(s); + let op_ident = syn::Ident::from(op); + tokens.append(quote! { + assert_comparison_from_set_value!(&json#expr_ident, #op_ident #ident); + }); + } + _ => panic!("Expected i64 or f64 but found {:?}", &self.value), + } + } + }, + } + } +} diff --git a/yaml_test_runner/src/step/contains.rs b/yaml_test_runner/src/step/contains.rs new file mode 100644 index 00000000..18c4b885 --- /dev/null +++ b/yaml_test_runner/src/step/contains.rs @@ -0,0 +1,92 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +use super::Step; +use crate::step::{json_string_from_yaml, Expr}; +use quote::{ToTokens, Tokens}; +use yaml_rust::Yaml; + +pub struct Contains { + expr: Expr, + value: Yaml, +} + +impl From for Step { + fn from(contains: Contains) -> Self { + Step::Contains(contains) + } +} + +impl Contains { + pub fn try_parse(yaml: &Yaml) -> Result { + let hash = yaml + .as_hash() + .ok_or_else(|| failure::err_msg(format!("expected hash but found {:?}", yaml)))?; + + let (k, v) = hash.iter().next().unwrap(); + let expr = k.as_str().unwrap().trim(); + Ok(Contains { + expr: expr.into(), + value: v.clone(), + }) + } +} + +impl ToTokens for Contains { + fn to_tokens(&self, tokens: &mut Tokens) { + let expr = self.expr.expression(); + let ident = syn::Ident::from(expr.as_str()); + + match &self.value { + Yaml::Real(r) => { + let f = r.parse::().unwrap(); + tokens.append(quote! { + assert_contains!(json#ident, json!(#f)); + }); + } + Yaml::Integer(i) => { + tokens.append(quote! { + assert_contains!(json#ident, json!(#i)); + }); + } + Yaml::String(s) => { + tokens.append(quote! { + assert_contains!(json#ident, json!(#s)); + }); + } + Yaml::Boolean(b) => { + tokens.append(quote! { + assert_contains!(json#ident, json!(#b)); + }); + } + yaml if yaml.is_array() || yaml.as_hash().is_some() => { + let json = { + let s = json_string_from_yaml(yaml); + syn::Ident::from(s) + }; + + tokens.append(quote! { + assert_contains!(json#ident, json!(#json)); + }); + } + yaml => { + panic!("Bad yaml value {:?}", &yaml); + } + } + } +} diff --git a/yaml_test_runner/src/step/do.rs b/yaml_test_runner/src/step/do.rs new file mode 100644 index 00000000..b9b5e92b --- /dev/null +++ b/yaml_test_runner/src/step/do.rs @@ -0,0 +1,917 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +use super::{ok_or_accumulate, Step}; +use crate::regex::clean_regex; +use crate::regex::*; +use api_generator::generator::{Api, ApiEndpoint, TypeKind}; +use inflector::Inflector; +use itertools::Itertools; +use quote::{ToTokens, Tokens}; +use std::collections::BTreeMap; +use yaml_rust::{Yaml, YamlEmitter}; + +/// A catch expression on a do step +pub struct Catch(String); + +impl Catch { + fn needs_response_body(&self) -> bool { + self.0.starts_with('/') + } +} + +impl ToTokens for Catch { + fn to_tokens(&self, tokens: &mut Tokens) { + fn http_status_code(status_code: u16, tokens: &mut Tokens) { + tokens.append(quote! { + assert_status_code!(response.status_code(), #status_code); + }); + } + + match self.0.as_ref() { + "bad_request" => http_status_code(400, tokens), + "unauthorized" => http_status_code(401, tokens), + "forbidden" => http_status_code(403, tokens), + "missing" => http_status_code(404, tokens), + "request_timeout" => http_status_code(408, tokens), + "conflict" => http_status_code(409, tokens), + "request" => { + tokens.append(quote! { + assert_request_status_code!(response.status_code()); + }); + } + "unavailable" => http_status_code(503, tokens), + "param" => { + // Not possible to pass a bad param to the client so ignore. + } + s => { + let t = clean_regex(s); + tokens.append(quote! { + assert_regex_match!(&text, #t); + }); + } + } + } +} + +pub struct Do { + api_call: ApiCall, + warnings: Vec, + allowed_warnings: Vec, + catch: Option, +} + +impl ToTokens for Do { + fn to_tokens(&self, tokens: &mut Tokens) { + let _ = self.to_tokens(false, tokens); + } +} + +impl From for Step { + fn from(d: Do) -> Self { + Step::Do(d) + } +} + +impl Do { + pub fn to_tokens(&self, mut read_response: bool, tokens: &mut Tokens) -> bool { + self.api_call.to_tokens(tokens); + + // only assert that there are no warnings if expected warnings is empty and not allowing warnings + if !self.warnings.is_empty() { + tokens.append(quote! { + let warnings: Vec<&str> = response.warning_headers().collect(); + }); + for warning in &self.warnings { + tokens.append(quote! { + assert_warnings_contain!(warnings, #warning); + }); + } + } else if !self.allowed_warnings.is_empty() { + tokens.append(quote! { + let warnings: Vec<&str> = response.warning_headers().collect(); + assert_warnings_is_empty!(warnings); + }); + } + + if let Some(c) = &self.catch { + if !read_response && c.needs_response_body() { + read_response = true; + tokens.append(quote! { + let (method, status_code, text, json) = client::read_response(response).await?; + }); + } + c.to_tokens(tokens); + } + + match &self.api_call.ignore { + Some(i) => tokens.append(quote! { + assert_response_success_or!(response, #i); + }), + None => (), + } + + read_response + } + + pub fn try_parse(api: &Api, yaml: &Yaml) -> Result { + let hash = yaml + .as_hash() + .ok_or_else(|| failure::err_msg(format!("expected hash but found {:?}", yaml)))?; + + let mut call: Option<(&str, &Yaml)> = None; + let mut headers = BTreeMap::new(); + let mut warnings: Vec = Vec::new(); + let mut allowed_warnings: Vec = Vec::new(); + let mut catch = None; + + fn to_string_vec(v: &Yaml) -> Vec { + v.as_vec() + .map(|a| a.iter().map(|y| y.as_str().unwrap().to_string()).collect()) + .unwrap() + } + + let results: Vec> = hash + .iter() + .map(|(k, v)| { + let key = k.as_str().ok_or_else(|| { + failure::err_msg(format!("expected string but found {:?}", k)) + })?; + + match key { + "headers" => { + let hash = v.as_hash().ok_or_else(|| { + failure::err_msg(format!("expected hash but found {:?}", v)) + })?; + for (hk, hv) in hash.iter() { + let h = hk.as_str().ok_or_else(|| { + failure::err_msg(format!("expected string but found {:?}", hk)) + })?; + let v = hv.as_str().ok_or_else(|| { + failure::err_msg(format!("expected string but found {:?}", hv)) + })?; + headers.insert(h.into(), v.into()); + } + Ok(()) + } + "catch" => { + catch = v.as_str().map(|s| Catch(s.to_string())); + Ok(()) + } + "node_selector" => Ok(()), + "warnings" => { + warnings = to_string_vec(v); + Ok(()) + } + "allowed_warnings" => { + allowed_warnings = to_string_vec(v); + Ok(()) + } + api_call => { + call = Some((api_call, v)); + Ok(()) + } + } + }) + .collect(); + + ok_or_accumulate(&results)?; + + let (call, value) = call.ok_or_else(|| failure::err_msg("no API found in do"))?; + let endpoint = api + .endpoint_for_api_call(call) + .ok_or_else(|| failure::err_msg(format!(r#"no API found for "{}""#, call)))?; + let api_call = ApiCall::try_from(api, endpoint, value, headers)?; + + Ok(Do { + api_call, + catch, + warnings, + allowed_warnings, + }) + } + + pub fn namespace(&self) -> Option<&String> { + self.api_call.namespace.as_ref() + } +} + +/// The components of an API call +pub struct ApiCall { + pub namespace: Option, + function: syn::Ident, + parts: Option, + params: Option, + headers: BTreeMap, + body: Option, + ignore: Option, +} + +impl ToTokens for ApiCall { + fn to_tokens(&self, tokens: &mut Tokens) { + let function = &self.function; + let parts = &self.parts; + let params = &self.params; + let body = &self.body; + + let headers: Vec = self + .headers + .iter() + .map(|(k, v)| { + // header names **must** be lowercase to satisfy Header lib + let k = k.to_lowercase(); + + // handle "set" value in headers + if let Some(c) = SET_DELIMITED_REGEX.captures(v) { + let token = syn::Ident::from(c.get(1).unwrap().as_str()); + let replacement = SET_DELIMITED_REGEX.replace_all(v, "{}"); + quote! { .header( + HeaderName::from_static(#k), + HeaderValue::from_str(format!(#replacement, #token.as_str().unwrap()).as_ref())?) + } + } else { + quote! { .header( + HeaderName::from_static(#k), + HeaderValue::from_static(#v)) + } + } + }) + .collect(); + + tokens.append(quote! { + let response = client.#function(#parts) + #(#headers)* + #params + #body + .send() + .await?; + }); + } +} + +impl ApiCall { + /// Try to create an API call + pub fn try_from( + api: &Api, + endpoint: &ApiEndpoint, + yaml: &Yaml, + headers: BTreeMap, + ) -> Result { + let hash = yaml + .as_hash() + .ok_or_else(|| failure::err_msg(format!("expected hash but found {:?}", yaml)))?; + + let mut parts: Vec<(&str, &Yaml)> = vec![]; + let mut params: Vec<(&str, &Yaml)> = vec![]; + let mut body: Option = None; + let mut ignore: Option = None; + + // work out what's a URL part and what's a param in the supplied + // arguments for the API call + for (k, v) in hash.iter() { + match k.as_str().unwrap() { + "body" => body = Self::generate_body(endpoint, v), + "ignore" => { + ignore = match v.as_i64() { + Some(i) => Some(i as u16), + // handle ignore as an array of i64 + None => Some(v.as_vec().unwrap()[0].as_i64().unwrap() as u16), + } + } + key if endpoint.params.contains_key(key) || api.common_params.contains_key(key) => { + params.push((key, v)) + } + key => parts.push((key, v)), + } + } + + let api_call = endpoint.full_name.as_ref().unwrap(); + let parts = Self::generate_parts(api_call, endpoint, &parts)?; + let params = Self::generate_params(api, endpoint, ¶ms)?; + let function = syn::Ident::from(api_call.replace(".", "().")); + let namespace: Option = if api_call.contains('.') { + let namespaces: Vec<&str> = api_call.splitn(2, '.').collect(); + Some(namespaces[0].to_string()) + } else { + None + }; + + Ok(ApiCall { + namespace, + function, + parts, + params, + headers, + body, + ignore, + }) + } + + fn generate_enum( + enum_name: &str, + variant: &str, + options: &[serde_json::Value], + ) -> Result { + if !variant.is_empty() && !options.contains(&serde_json::Value::String(variant.to_owned())) + { + return Err(failure::err_msg(format!( + "options {:?} does not contain value {}", + &options, variant + ))); + } + + let e: String = enum_name.to_pascal_case(); + let enum_name = syn::Ident::from(e.as_str()); + let variant = if variant.is_empty() { + // TODO: Should we simply omit empty Refresh tests? + if e == "Refresh" { + syn::Ident::from("True") + } else if e == "Size" { + syn::Ident::from("Unspecified") + } else { + return Err(failure::err_msg(format!( + "unhandled empty value for {}", + &e + ))); + } + } else { + syn::Ident::from(variant.to_pascal_case()) + }; + + Ok(quote!(#enum_name::#variant)) + } + + fn generate_params( + api: &Api, + endpoint: &ApiEndpoint, + params: &[(&str, &Yaml)], + ) -> Result, failure::Error> { + match params.len() { + 0 => Ok(None), + _ => { + let mut tokens = Tokens::new(); + for (n, v) in params { + let param_ident = + syn::Ident::from(api_generator::generator::code_gen::valid_name(n)); + + let ty = match endpoint.params.get(*n) { + Some(t) => Ok(t), + None => match api.common_params.get(*n) { + Some(t) => Ok(t), + None => Err(failure::err_msg(format!(r#"no param found for "{}""#, n))), + }, + }?; + + let kind = &ty.ty; + + match v { + Yaml::String(ref s) => { + let is_set_value = s.starts_with('$'); + + match kind { + TypeKind::Enum => { + if n == &"expand_wildcards" { + // expand_wildcards might be defined as a comma-separated + // string. e.g. + let idents: Vec> = s + .split(',') + .collect::>() + .iter() + .map(|e| Self::generate_enum(n, e, &ty.options)) + .collect(); + + match ok_or_accumulate(&idents) { + Ok(_) => { + let idents: Vec = idents + .into_iter() + .filter_map(Result::ok) + .collect(); + + tokens.append(quote! { + .#param_ident(&[#(#idents),*]) + }); + } + Err(e) => return Err(failure::err_msg(e)), + } + } else { + let e = Self::generate_enum(n, s.as_str(), &ty.options)?; + tokens.append(quote! { + .#param_ident(#e) + }); + } + } + TypeKind::List => { + let values: Vec<&str> = s.split(',').collect(); + tokens.append(quote! { + .#param_ident(&[#(#values),*]) + }) + } + TypeKind::Boolean => match s.parse::() { + Ok(b) => tokens.append(quote! { + .#param_ident(#b) + }), + Err(e) => { + return Err(failure::err_msg(format!( + r#"cannot parse bool from "{}" for param "{}", {}"#, + s, + n, + e.to_string() + ))) + } + }, + TypeKind::Double => match s.parse::() { + Ok(f) => tokens.append(quote! { + .#param_ident(#f) + }), + Err(e) => { + return Err(failure::err_msg(format!( + r#"cannot parse f64 from "{}" for param "{}", {}"#, + s, + n, + e.to_string() + ))) + } + }, + TypeKind::Integer => { + if is_set_value { + let set_value = Self::from_set_value(s); + tokens.append(quote! { + .#param_ident(#set_value.as_i64().unwrap() as i32) + }); + } else { + match s.parse::() { + Ok(i) => tokens.append(quote! { + .#param_ident(#i) + }), + Err(e) => { + return Err(failure::err_msg(format!( + r#"cannot parse i32 from "{}" for param "{}", {}"#, + s, + n, + e.to_string() + ))) + } + } + } + } + TypeKind::Number | TypeKind::Long => { + if is_set_value { + let set_value = Self::from_set_value(s); + tokens.append(quote! { + .#param_ident(#set_value.as_i64().unwrap()) + }); + } else { + let i = s.parse::()?; + tokens.append(quote! { + .#param_ident(#i) + }); + } + } + _ => { + // handle set values + let t = if is_set_value { + let set_value = Self::from_set_value(s); + quote! { #set_value.as_str().unwrap() } + } else { + quote! { #s } + }; + + tokens.append(quote! { + .#param_ident(#t) + }) + } + } + } + Yaml::Boolean(ref b) => match kind { + TypeKind::Enum => { + let enum_name = syn::Ident::from(n.to_pascal_case()); + let variant = syn::Ident::from(b.to_string().to_pascal_case()); + tokens.append(quote! { + .#param_ident(#enum_name::#variant) + }) + } + TypeKind::List => { + // TODO: _source filter can be true|false|list of strings + let s = b.to_string(); + tokens.append(quote! { + .#param_ident(&[#s]) + }) + } + _ => { + tokens.append(quote! { + .#param_ident(#b) + }); + } + }, + Yaml::Integer(ref i) => match kind { + TypeKind::String => { + let s = i.to_string(); + tokens.append(quote! { + .#param_ident(#s) + }) + } + TypeKind::Integer => { + // yaml-rust parses all as i64 + let int = *i as i32; + tokens.append(quote! { + .#param_ident(#int) + }); + } + TypeKind::Float => { + // yaml-rust parses all as i64 + let f = *i as f32; + tokens.append(quote! { + .#param_ident(#f) + }); + } + TypeKind::Double => { + // yaml-rust parses all as i64 + let f = *i as f64; + tokens.append(quote! { + .#param_ident(#f) + }); + } + _ => { + tokens.append(quote! { + .#param_ident(#i) + }); + } + }, + Yaml::Array(arr) => { + // only support param string arrays + let result: Vec<&String> = arr + .iter() + .map(|i| match i { + Yaml::String(s) => Ok(s), + y => Err(failure::err_msg(format!( + "unsupported array value {:?}", + y + ))), + }) + .filter_map(Result::ok) + .collect(); + + if n == &"expand_wildcards" { + let result: Vec> = result + .iter() + .map(|s| Self::generate_enum(n, s.as_str(), &ty.options)) + .collect(); + + match ok_or_accumulate(&result) { + Ok(_) => { + let result: Vec = + result.into_iter().filter_map(Result::ok).collect(); + + tokens.append(quote! { + .#param_ident(&[#(#result),*]) + }); + } + Err(e) => return Err(failure::err_msg(e)), + } + } else { + tokens.append(quote! { + .#param_ident(&[#(#result),*]) + }); + } + } + Yaml::Real(r) => { + let f = r.parse::()?; + tokens.append(quote! { + .#param_ident(#f) + }); + } + _ => println!("unsupported value {:?} for param {}", v, n), + } + } + + Ok(Some(tokens)) + } + } + } + + fn from_set_value(s: &str) -> Tokens { + // check if the entire string is a token + if s.starts_with('$') { + let ident = syn::Ident::from( + s.trim_start_matches('$') + .trim_start_matches('{') + .trim_end_matches('}'), + ); + quote! { #ident } + } else { + // only part of the string is a token, so substitute + let token = syn::Ident::from( + SET_DELIMITED_REGEX + .captures(s) + .unwrap() + .get(1) + .unwrap() + .as_str(), + ); + let replacement = SET_DELIMITED_REGEX.replace_all(s, "{}"); + quote! { Value::String(format!(#replacement, #token.as_str().unwrap())) } + } + } + + fn generate_parts( + api_call: &str, + endpoint: &ApiEndpoint, + parts: &[(&str, &Yaml)], + ) -> Result, failure::Error> { + // TODO: ideally, this should share the logic from EnumBuilder + let enum_name = { + let name = api_call.to_pascal_case().replace(".", ""); + syn::Ident::from(format!("{}Parts", name)) + }; + + // Enum variants containing no URL parts where there is only a single API URL, + // are not required to be passed in the API. + // + // Also, short circuit for tests where the only parts specified are null + // e.g. security API test. It seems these should simply omit the value though... + if parts.is_empty() || parts.iter().all(|(_, v)| v.is_null()) { + let param_counts = endpoint + .url + .paths + .iter() + .map(|p| p.path.params().len()) + .collect::>(); + + // check there's actually a None value + if !param_counts.contains(&0) { + return Err(failure::err_msg(format!( + r#"no path for "{}" API with no url parts"#, + api_call + ))); + } + + return match endpoint.url.paths.len() { + 1 => Ok(None), + _ => Ok(Some(quote!(#enum_name::None))), + }; + } + + let path = match endpoint.url.paths.len() { + 1 => { + let path = &endpoint.url.paths[0]; + if path.path.params().len() == parts.len() { + Some(path) + } else { + None + } + } + _ => { + // get the matching path parts + let matching_path_parts = endpoint + .url + .paths + .iter() + .filter(|path| { + let p = path.path.params(); + if p.len() != parts.len() { + return false; + } + + let contains = parts + .iter() + .filter_map(|i| if p.contains(&i.0) { Some(()) } else { None }) + .collect::>(); + contains.len() == parts.len() + }) + .collect::>(); + + match matching_path_parts.len() { + 0 => None, + _ => Some(matching_path_parts[0]), + } + } + } + .ok_or_else(|| { + failure::err_msg(format!( + r#"no path for "{}" API with url parts {:?}"#, + &api_call, parts + )) + })?; + + let path_parts = path.path.params(); + let variant_name = { + let v = path_parts + .iter() + .map(|k| k.to_pascal_case()) + .collect::>() + .join(""); + syn::Ident::from(v) + }; + + let part_tokens: Vec> = parts + .iter() + // don't rely on URL parts being ordered in the yaml test in the same order as specified + // in the REST spec. + .sorted_by(|(p, _), (p2, _)| { + let f = path_parts.iter().position(|x| x == p).unwrap(); + let s = path_parts.iter().position(|x| x == p2).unwrap(); + f.cmp(&s) + }) + .map(|(p, v)| { + let ty = path.parts.get(*p).ok_or_else(|| { + failure::err_msg(format!( + r#"no url part found for "{}" in {}"#, + p, &path.path + )) + })?; + + match v { + Yaml::String(s) => { + let is_set_value = s.starts_with('$') || s.contains("${"); + + match ty.ty { + TypeKind::List => { + let values: Vec = s + .split(',') + .map(|s| { + if is_set_value { + let set_value = Self::from_set_value(s); + quote! { #set_value.as_str().unwrap() } + } else { + quote! { #s } + } + }) + .collect(); + Ok(quote! { &[#(#values),*] }) + } + TypeKind::Long => { + if is_set_value { + let set_value = Self::from_set_value(s); + Ok(quote! { #set_value.as_i64().unwrap() }) + } else { + let l = s.parse::().unwrap(); + Ok(quote! { #l }) + } + } + _ => { + if is_set_value { + let set_value = Self::from_set_value(s); + Ok(quote! { #set_value.as_str().unwrap() }) + } else { + Ok(quote! { #s }) + } + } + } + } + Yaml::Boolean(b) => { + let s = b.to_string(); + Ok(quote! { #s }) + } + Yaml::Integer(i) => match ty.ty { + TypeKind::Long => Ok(quote! { #i }), + _ => { + let s = i.to_string(); + Ok(quote! { #s }) + } + }, + Yaml::Array(arr) => { + // only support param string arrays + let result: Vec<_> = arr + .iter() + .map(|i| match i { + Yaml::String(s) => Ok(s), + y => Err(failure::err_msg(format!( + "unsupported array value {:?}", + y + ))), + }) + .collect(); + + match ok_or_accumulate(&result) { + Ok(_) => { + let result: Vec<_> = + result.into_iter().filter_map(Result::ok).collect(); + + match ty.ty { + // Some APIs specify a part is a string in the REST API spec + // but is really a list, which is what a YAML test might pass + // e.g. security.get_role_mapping. + // see https://github.com/elastic/elasticsearch/pull/53207 + TypeKind::String => { + let s = result.iter().join(","); + Ok(quote! { #s }) + } + _ => Ok(quote! { &[#(#result),*] }), + } + } + Err(e) => Err(failure::err_msg(e)), + } + } + _ => Err(failure::err_msg(format!("unsupported value {:?}", v))), + } + }) + .collect(); + + match ok_or_accumulate(&part_tokens) { + Ok(_) => { + let part_tokens: Vec = + part_tokens.into_iter().filter_map(Result::ok).collect(); + Ok(Some( + quote! { #enum_name::#variant_name(#(#part_tokens),*) }, + )) + } + Err(e) => Err(failure::err_msg(e)), + } + } + + /// Creates the body function call from a YAML value. + /// + /// When reading a body from the YAML test, it'll be converted to a Yaml variant, + /// usually a Hash. To get the JSON representation back requires converting + /// back to JSON + fn generate_body(endpoint: &ApiEndpoint, v: &Yaml) -> Option { + match v { + Yaml::Null => None, + Yaml::String(s) => { + let json = { + let json = replace_set(s); + replace_i64(json) + }; + if endpoint.supports_nd_body() { + // a newline delimited API body may be expressed + // as a scalar string literal style where line breaks are significant (using |) + // or where lines breaks are folded to an empty space unless it ends on an + // empty or a more-indented line (using >) + // see https://yaml.org/spec/1.2/spec.html#id2760844 + // + // need to trim the trailing newline to be able to differentiate... + let contains_newlines = json.trim_end_matches('\n').contains('\n'); + let split = if contains_newlines { + json.split('\n').collect::>() + } else { + json.split(char::is_whitespace).collect::>() + }; + + let values: Vec = split + .into_iter() + .filter(|s| !s.is_empty()) + .map(|s| { + let ident = syn::Ident::from(s); + quote! { JsonBody::from(json!(#ident)) } + }) + .collect(); + Some(quote!(.body(vec![#(#values),*]))) + } else { + let ident = syn::Ident::from(json); + Some(quote!(.body(json!{#ident}))) + } + } + _ => { + let mut s = String::new(); + { + let mut emitter = YamlEmitter::new(&mut s); + emitter.dump(v).unwrap(); + } + + if endpoint.supports_nd_body() { + let values: Vec = serde_yaml::from_str(&s).unwrap(); + let json: Vec = values + .iter() + .map(|value| { + let mut json = serde_json::to_string(&value).unwrap(); + if value.is_string() { + json = replace_set(&json); + let ident = syn::Ident::from(json); + quote!(#ident) + } else { + json = replace_set(json); + json = replace_i64(json); + let ident = syn::Ident::from(json); + quote!(JsonBody::from(json!(#ident))) + } + }) + .collect(); + Some(quote!(.body(vec![ #(#json),* ]))) + } else { + let value: serde_json::Value = serde_yaml::from_str(&s).unwrap(); + let mut json = serde_json::to_string_pretty(&value).unwrap(); + json = replace_set(json); + json = replace_i64(json); + let ident = syn::Ident::from(json); + + Some(quote!(.body(json!{#ident}))) + } + } + } + } +} diff --git a/yaml_test_runner/src/step/is_false.rs b/yaml_test_runner/src/step/is_false.rs new file mode 100644 index 00000000..2b935cc5 --- /dev/null +++ b/yaml_test_runner/src/step/is_false.rs @@ -0,0 +1,58 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +use super::Step; +use crate::step::Expr; +use quote::{ToTokens, Tokens}; +use yaml_rust::Yaml; + +pub struct IsFalse { + pub(crate) expr: Expr, +} + +impl From for Step { + fn from(is_false: IsFalse) -> Self { + Step::IsFalse(is_false) + } +} + +impl IsFalse { + pub fn try_parse(yaml: &Yaml) -> Result { + let expr = yaml.as_str().ok_or_else(|| { + failure::err_msg(format!("expected string key but found {:?}", &yaml)) + })?; + + Ok(IsFalse { expr: expr.into() }) + } +} + +impl ToTokens for IsFalse { + fn to_tokens(&self, tokens: &mut Tokens) { + if self.expr.is_body() { + tokens.append(quote! { + assert!(text.is_empty(), "expected value to be empty but was {}", &text); + }); + } else { + let expr = self.expr.expression(); + let ident = syn::Ident::from(expr.as_str()); + tokens.append(quote! { + assert_is_false!(&json#ident); + }); + } + } +} diff --git a/yaml_test_runner/src/step/is_true.rs b/yaml_test_runner/src/step/is_true.rs new file mode 100644 index 00000000..132aacb4 --- /dev/null +++ b/yaml_test_runner/src/step/is_true.rs @@ -0,0 +1,62 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +use super::Step; +use crate::step::Expr; +use quote::{ToTokens, Tokens}; +use yaml_rust::Yaml; + +pub struct IsTrue { + pub(crate) expr: Expr, +} + +impl From for Step { + fn from(is_true: IsTrue) -> Self { + Step::IsTrue(is_true) + } +} + +impl IsTrue { + pub fn try_parse(yaml: &Yaml) -> Result { + let expr = yaml.as_str().ok_or_else(|| { + failure::err_msg(format!("expected string key but found {:?}", &yaml)) + })?; + + Ok(IsTrue { expr: expr.into() }) + } +} + +impl ToTokens for IsTrue { + fn to_tokens(&self, tokens: &mut Tokens) { + if self.expr.is_body() { + // for a HEAD request, the body is expected to be empty, so check the status code instead. + tokens.append(quote! { + match method { + Method::Head => assert!(status_code.is_success(), "expected successful response for HEAD request but was {}", status_code.as_u16()), + _ => assert!(!text.is_empty(), "expected value to be true (not empty) but was {}", &text), + } + }); + } else { + let expr = self.expr.expression(); + let ident = syn::Ident::from(expr.as_str()); + tokens.append(quote! { + assert_is_true!(&json#ident); + }); + } + } +} diff --git a/yaml_test_runner/src/step/length.rs b/yaml_test_runner/src/step/length.rs new file mode 100644 index 00000000..75449d8b --- /dev/null +++ b/yaml_test_runner/src/step/length.rs @@ -0,0 +1,74 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +use super::Step; +use crate::step::Expr; +use quote::{ToTokens, Tokens}; +use yaml_rust::Yaml; + +pub struct Length { + pub(crate) expr: Expr, + len: usize, +} + +impl From for Step { + fn from(length: Length) -> Self { + Step::Length(length) + } +} + +impl Length { + pub fn try_parse(yaml: &Yaml) -> Result { + let hash = yaml + .as_hash() + .ok_or_else(|| failure::err_msg(format!("expected hash but found {:?}", yaml)))?; + + let (k, v) = hash.iter().next().unwrap(); + + let expr = k + .as_str() + .ok_or_else(|| failure::err_msg(format!("expected string key but found {:?}", k)))?; + + let len = v + .as_i64() + .ok_or_else(|| failure::err_msg(format!("expected i64 but found {:?}", v)))?; + + Ok(Length { + len: len as usize, + expr: expr.into(), + }) + } +} + +impl ToTokens for Length { + fn to_tokens(&self, tokens: &mut Tokens) { + let len = self.len; + + if self.expr.is_body() { + tokens.append(quote! { + assert_length!(&json, #len); + }); + } else { + let expr = self.expr.expression(); + let ident = syn::Ident::from(expr); + tokens.append(quote! { + assert_length!(&json#ident, #len); + }); + } + } +} diff --git a/yaml_test_runner/src/step/match.rs b/yaml_test_runner/src/step/match.rs new file mode 100644 index 00000000..ac2ba568 --- /dev/null +++ b/yaml_test_runner/src/step/match.rs @@ -0,0 +1,157 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +use super::Step; +use crate::regex::clean_regex; +use crate::step::{json_string_from_yaml, Expr}; +use quote::{ToTokens, Tokens}; +use yaml_rust::Yaml; + +pub struct Match { + pub expr: Expr, + value: Yaml, +} + +impl From for Step { + fn from(m: Match) -> Self { + Step::Match(m) + } +} + +impl Match { + pub fn try_parse(yaml: &Yaml) -> Result { + let hash = yaml + .as_hash() + .ok_or_else(|| failure::err_msg(format!("expected hash but found {:?}", yaml)))?; + + let (k, v) = hash.iter().next().unwrap(); + let expr = k.as_str().unwrap().trim(); + Ok(Match { + expr: expr.into(), + value: v.clone(), + }) + } +} + +impl ToTokens for Match { + fn to_tokens(&self, tokens: &mut Tokens) { + let expr = self.expr.expression(); + + match &self.value { + Yaml::String(s) => { + if s.starts_with('/') { + let s = clean_regex(s); + if self.expr.is_body() { + tokens.append(quote! { + assert_regex_match!(&text, #s, true); + }); + } else { + let ident = syn::Ident::from(expr.as_str()); + tokens.append(quote! { + assert_regex_match!(json#ident.as_str().unwrap(), #s, true); + }); + } + } else { + let ident = syn::Ident::from(expr.as_str()); + + // handle set values + if s.starts_with('$') { + let t = { + let s = s + .trim_start_matches('$') + .trim_start_matches('{') + .trim_end_matches('}'); + syn::Ident::from(s) + }; + + tokens.append(quote! { + assert_match!(json#ident, json!(#t)); + }); + } else { + tokens.append(quote! { + assert_match!(json#ident, json!(#s)); + }) + }; + } + } + Yaml::Integer(i) => { + if self.expr.is_body() { + panic!("match on $body with i64"); + } else { + let ident = syn::Ident::from(expr.as_str()); + tokens.append(quote! { + assert_numeric_match!(json#ident, #i); + }); + } + } + Yaml::Real(r) => { + let f = r.parse::().unwrap(); + if self.expr.is_body() { + panic!("match on $body with f64"); + } else { + let ident = syn::Ident::from(expr.as_str()); + tokens.append(quote! { + assert_match!(json#ident, json!(#f)); + }); + } + } + Yaml::Null => { + if self.expr.is_body() { + tokens.append(quote! { + assert!(text.is_empty(), "expected response to be null (empty) but was {}", &text); + }); + } else { + let ident = syn::Ident::from(expr.as_str()); + tokens.append(quote! { + assert_null!(json#ident); + }); + } + } + Yaml::Boolean(b) => { + if self.expr.is_body() { + panic!("match on $body with bool"); + } else { + let ident = syn::Ident::from(expr.as_str()); + tokens.append(quote! { + assert_match!(json#ident, json!(#b)); + }); + } + } + yaml if yaml.is_array() || yaml.as_hash().is_some() => { + let json = { + let s = json_string_from_yaml(yaml); + syn::Ident::from(s) + }; + + if self.expr.is_body() { + tokens.append(quote! { + assert_match!(json, json!(#json)); + }); + } else { + let ident = syn::Ident::from(expr.as_str()); + tokens.append(quote! { + assert_match!(json#ident, json!(#json)); + }); + } + } + yaml => { + panic!("Bad yaml value {:?}", &yaml); + } + } + } +} diff --git a/yaml_test_runner/src/step/mod.rs b/yaml_test_runner/src/step/mod.rs new file mode 100644 index 00000000..1fa93bd3 --- /dev/null +++ b/yaml_test_runner/src/step/mod.rs @@ -0,0 +1,251 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +use crate::regex::*; +use api_generator::generator::Api; +use std::fmt::Write; +use yaml_rust::{Yaml, YamlEmitter}; + +mod comparison; +mod contains; +mod r#do; +mod is_false; +mod is_true; +mod length; +mod r#match; +mod set; +mod skip; +mod transform_and_set; +pub use comparison::{Comparison, OPERATORS}; +pub use contains::*; +pub use is_false::*; +pub use is_true::*; +pub use length::*; +pub use r#do::*; +pub use r#match::*; +pub use set::*; +pub use skip::*; +pub use transform_and_set::*; + +pub fn parse_steps(api: &Api, steps: &[Yaml]) -> Result, failure::Error> { + let mut parsed_steps: Vec = Vec::new(); + for step in steps { + let hash = step + .as_hash() + .ok_or_else(|| failure::err_msg(format!("expected hash but found {:?}", step)))?; + + let (key, value) = { + let (k, yaml) = hash.iter().next().unwrap(); + let key = k.as_str().ok_or_else(|| { + failure::err_msg(format!("expected string key but found {:?}", k)) + })?; + + (key, yaml) + }; + + match key { + "skip" => { + let skip = Skip::try_parse(value)?; + parsed_steps.push(skip.into()); + } + "do" => { + let d = Do::try_parse(api, value)?; + parsed_steps.push(d.into()) + } + "set" => { + let s = Set::try_parse(value)?; + parsed_steps.push(s.into()); + } + "transform_and_set" => { + let t = TransformAndSet::try_parse(value)?; + parsed_steps.push(t.into()); + } + "match" => { + let m = Match::try_parse(value)?; + parsed_steps.push(m.into()); + } + "contains" => { + let c = Contains::try_parse(value)?; + parsed_steps.push(c.into()); + } + "is_true" => { + let e = IsTrue::try_parse(value)?; + parsed_steps.push(e.into()) + } + "is_false" => { + let e = IsFalse::try_parse(value)?; + parsed_steps.push(e.into()) + } + "length" => { + let l = Length::try_parse(value)?; + parsed_steps.push(l.into()) + } + op if OPERATORS.contains(&op) => { + let comp = Comparison::try_parse(value, op)?; + parsed_steps.push(comp.into()) + } + op => return Err(failure::err_msg(format!("unknown step operation: {}", op))), + } + } + + Ok(parsed_steps) +} + +/// An expression to apply to the response. Can be the whole body ($body or "") or an +/// indexer expression into a JSON response. +pub struct Expr { + expr: String, +} + +impl From<&str> for Expr { + fn from(s: &str) -> Self { + Expr::new(s) + } +} + +impl Expr { + pub fn new>(expr: S) -> Self { + Self { expr: expr.into() } + } + + /// Whether the expression is "$body" or "", which are both used to express the whole body + pub fn is_body(&self) -> bool { + Self::is_string_body(&self.expr) || self.expr.is_empty() + } + + fn is_string_body(s: &str) -> bool { + s == "$body" + } + + pub fn expression(&self) -> String { + if self.is_body() { + self.expr.clone() + } else { + let mut values = Vec::new(); + let mut value = String::new(); + let mut chars = self.expr.chars(); + while let Some(ch) = chars.next() { + match ch { + '\\' => { + // consume the next character too + if let Some(next) = chars.next() { + value.push(next); + } + } + '.' => { + values.push(value); + value = String::new(); + } + _ => { + value.push(ch); + } + } + } + values.push(value); + + // some APIs specify the response body as the first part of the path + // which should be removed. + if Self::is_string_body(values[0].as_ref()) { + values.remove(0); + } + + let mut expr = String::new(); + for s in values { + if s.is_empty() { + write!(expr, "[\"\"]").unwrap(); + } else if s.chars().all(char::is_numeric) { + write!(expr, "[{}]", s).unwrap(); + } else if s.starts_with('$') { + // handle "set" values + let t = s + .trim_start_matches('$') + .trim_start_matches('{') + .trim_end_matches('}'); + write!(expr, "[{}.as_str().unwrap()]", t).unwrap(); + } else if s.as_str() == "_arbitrary_key_" { + // handle _arbitrary_key_. + // wrap in Value::String to allow uniform unwrapping in subsequent steps + write!( + expr, + ".as_object().unwrap().iter().next().map(|(k, _)| json!(k)).unwrap()" + ) + .unwrap(); + } else { + write!(expr, "[\"{}\"]", s).unwrap(); + } + } + expr + } + } +} + +/// Steps defined in a yaml test +pub enum Step { + Skip(Skip), + Set(Set), + Do(Do), + Match(Match), + Length(Length), + IsTrue(IsTrue), + IsFalse(IsFalse), + Comparison(Comparison), + Contains(Contains), + TransformAndSet(TransformAndSet), +} + +impl Step { + /// Gets a Do step + pub fn r#do(&self) -> Option<&Do> { + match self { + Step::Do(d) => Some(d), + _ => None, + } + } +} + +/// Checks whether there are any Errs in the collection, and accumulates them into one +/// error message if there are. +pub fn ok_or_accumulate(results: &[Result]) -> Result<(), failure::Error> { + let errs = results + .iter() + .filter_map(|r| r.as_ref().err()) + .collect::>(); + if errs.is_empty() { + Ok(()) + } else { + let mut msgs = errs.iter().map(|e| e.to_string()).collect::>(); + msgs.sort(); + msgs.dedup_by(|a, b| a == b); + Err(failure::err_msg(msgs.join(", "))) + } +} + +pub fn json_string_from_yaml(yaml: &Yaml) -> String { + let mut s = String::new(); + { + let mut emitter = YamlEmitter::new(&mut s); + emitter.dump(yaml).unwrap(); + } + + let value: serde_json::Value = serde_yaml::from_str(&s).unwrap(); + + let mut json = value.to_string(); + json = replace_set(json); + json = replace_i64(json); + json +} diff --git a/yaml_test_runner/src/step/set.rs b/yaml_test_runner/src/step/set.rs new file mode 100644 index 00000000..a9dec7af --- /dev/null +++ b/yaml_test_runner/src/step/set.rs @@ -0,0 +1,65 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +use super::Step; +use crate::step::Expr; +use quote::{ToTokens, Tokens}; +use yaml_rust::Yaml; + +pub struct Set { + ident: syn::Ident, + expr: Expr, +} + +impl From for Step { + fn from(set: Set) -> Self { + Step::Set(set) + } +} + +impl Set { + pub fn try_parse(yaml: &Yaml) -> Result { + let hash = yaml + .as_hash() + .ok_or_else(|| failure::err_msg(format!("expected hash but found {:?}", yaml)))?; + + let (k, v) = hash.iter().next().unwrap(); + let expr = k + .as_str() + .ok_or_else(|| failure::err_msg(format!("expected string key but found {:?}", k)))?; + + let id = v + .as_str() + .ok_or_else(|| failure::err_msg(format!("expected string value but found {:?}", v)))?; + + Ok(Set { + ident: syn::Ident::from(id), + expr: expr.into(), + }) + } +} + +impl ToTokens for Set { + fn to_tokens(&self, tokens: &mut Tokens) { + let ident = &self.ident; + let expr = syn::Ident::from(self.expr.expression().as_str()); + tokens.append(quote! { + let #ident = json#expr.clone(); + }); + } +} diff --git a/yaml_test_runner/src/step/skip.rs b/yaml_test_runner/src/step/skip.rs new file mode 100644 index 00000000..feeb8369 --- /dev/null +++ b/yaml_test_runner/src/step/skip.rs @@ -0,0 +1,134 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +use super::Step; +use regex::Regex; +use yaml_rust::Yaml; + +pub struct Skip { + version_requirements: Option, + version: Option, + reason: Option, + features: Option>, +} + +impl From for Step { + fn from(skip: Skip) -> Self { + Step::Skip(skip) + } +} + +impl Skip { + /// Gets the version. Returns empty if no version + pub fn version(&self) -> String { + self.version.clone().unwrap_or_else(|| "".into()) + } + + /// Gets the reason. Returns empty string if no reason + pub fn reason(&self) -> String { + self.reason.clone().unwrap_or_else(|| "".into()) + } + + /// Gets the features. Returns empty slice if no features + pub fn features(&self) -> &[String] { + match &self.features { + Some(v) => v, + None => &[], + } + } + + /// Converts the version range specified in the yaml test into a [semver::VersionReq] + fn parse_version_requirements(version: &Option) -> Option { + if let Some(v) = version { + if v.to_lowercase() == "all" { + Some(semver::VersionReq::any()) + } else { + lazy_static! { + static ref VERSION_REGEX: Regex = + Regex::new(r"^([\w\.]+)?\s*?\-\s*?([\w\.]+)?$").unwrap(); + } + if let Some(c) = VERSION_REGEX.captures(v) { + match (c.get(1), c.get(2)) { + (Some(start), Some(end)) => Some( + semver::VersionReq::parse( + format!(">={},<={}", start.as_str(), end.as_str()).as_ref(), + ) + .unwrap(), + ), + (Some(start), None) => Some( + semver::VersionReq::parse(format!(">={}", start.as_str()).as_ref()) + .unwrap(), + ), + (None, Some(end)) => Some( + semver::VersionReq::parse(format!("<={}", end.as_str()).as_ref()) + .unwrap(), + ), + (None, None) => None, + } + } else { + None + } + } + } else { + None + } + } + + pub fn try_parse(yaml: &Yaml) -> Result { + let version = yaml["version"] + .as_str() + .map_or_else(|| None, |y| Some(y.to_string())); + let reason = yaml["reason"] + .as_str() + .map_or_else(|| None, |y| Some(y.to_string())); + let features = match &yaml["features"] { + Yaml::String(s) => Some(vec![s.to_string()]), + Yaml::Array(a) => Some( + a.iter() + .map(|y| y.as_str().map(|s| s.to_string()).unwrap()) + .collect(), + ), + _ => None, + }; + + let version_requirements = Self::parse_version_requirements(&version); + + Ok(Skip { + version, + version_requirements, + reason, + features, + }) + } + + /// Determines if this instance matches the version + pub fn skip_version(&self, version: &semver::Version) -> bool { + match &self.version_requirements { + Some(r) => r.matches(version), + None => false, + } + } + + /// Determines if this instance matches the version + pub fn skip_features(&self, features: &[String]) -> bool { + match &self.features { + Some(test_features) => test_features.iter().any(|f| features.contains(f)), + None => false, + } + } +} diff --git a/yaml_test_runner/src/step/transform_and_set.rs b/yaml_test_runner/src/step/transform_and_set.rs new file mode 100644 index 00000000..fc8ce03c --- /dev/null +++ b/yaml_test_runner/src/step/transform_and_set.rs @@ -0,0 +1,127 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +use super::Step; +use crate::step::Expr; +use inflector::Inflector; +use quote::{ToTokens, Tokens}; +use yaml_rust::Yaml; + +pub struct Transformation { + #[allow(dead_code)] + raw: String, + function: String, + exprs: Vec, +} + +impl Transformation { + pub fn transform(&self) -> syn::Ident { + let mut transform = String::new(); + transform.push_str(&self.function); + transform.push('('); + for expr in &self.exprs { + transform.push_str("json"); + transform.push_str(expr.expression().as_str()); + transform.push_str(".as_str().unwrap()"); + transform.push(','); + } + transform.push(')'); + syn::Ident::from(transform.as_str()) + } +} + +impl From<&str> for Transformation { + fn from(t: &str) -> Self { + let raw = t.to_string(); + let mut function = None; + let mut exprs = Vec::new(); + let mut value = String::new(); + for ch in t.chars() { + match ch { + '#' => { + continue; + } + '(' => { + let name = format!("transform::{}", value.as_str().to_snake_case()); + function = Some(name); + value = String::new(); + } + ',' | ')' => { + let expr = value.trim(); + exprs.push(Expr::new(expr)); + value = String::new(); + } + _ => { + value.push(ch); + } + } + } + + Self { + raw, + function: function.unwrap(), + exprs, + } + } +} + +pub struct TransformAndSet { + ident: syn::Ident, + transformation: Transformation, +} + +impl From for Step { + fn from(transform_and_set: TransformAndSet) -> Self { + Step::TransformAndSet(transform_and_set) + } +} + +impl TransformAndSet { + pub fn try_parse(yaml: &Yaml) -> Result { + let hash = yaml + .as_hash() + .ok_or_else(|| failure::err_msg(format!("expected hash but found {:?}", yaml)))?; + + let (k, v) = hash.iter().next().unwrap(); + let ident = k + .as_str() + .ok_or_else(|| failure::err_msg(format!("expected string key but found {:?}", k)))?; + + let transformation = v + .as_str() + .ok_or_else(|| failure::err_msg(format!("expected string value but found {:?}", v)))?; + + Ok(TransformAndSet { + ident: syn::Ident::from(ident), + transformation: transformation.into(), + }) + } +} + +impl ToTokens for TransformAndSet { + fn to_tokens(&self, tokens: &mut Tokens) { + let ident = &self.ident; + let transform = &self.transformation.transform(); + tokens.append(quote! { + let #ident = { + let transform = #transform; + json!(transform) + }; + }); + } +} diff --git a/yaml_test_runner/tests/common/client.rs b/yaml_test_runner/tests/common/client.rs new file mode 100644 index 00000000..c524e11c --- /dev/null +++ b/yaml_test_runner/tests/common/client.rs @@ -0,0 +1,459 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +use elasticsearch::cat::{CatSnapshotsParts, CatTemplatesParts}; +use elasticsearch::cluster::ClusterHealthParts; +use elasticsearch::http::response::Response; +use elasticsearch::http::{Method, StatusCode}; +use elasticsearch::ilm::IlmRemovePolicyParts; +use elasticsearch::indices::{IndicesDeleteParts, IndicesDeleteTemplateParts, IndicesRefreshParts}; +use elasticsearch::ml::{ + MlCloseJobParts, MlDeleteDatafeedParts, MlDeleteJobParts, MlGetDatafeedsParts, MlGetJobsParts, + MlStopDatafeedParts, +}; +use elasticsearch::params::{ExpandWildcards, WaitForStatus}; +use elasticsearch::security::{ + SecurityDeletePrivilegesParts, SecurityDeleteRoleParts, SecurityDeleteUserParts, + SecurityGetPrivilegesParts, SecurityGetRoleParts, SecurityGetUserParts, SecurityPutUserParts, +}; +use elasticsearch::snapshot::{SnapshotDeleteParts, SnapshotDeleteRepositoryParts}; +use elasticsearch::tasks::TasksCancelParts; +use elasticsearch::transform::{ + TransformDeleteTransformParts, TransformGetTransformParts, TransformStopTransformParts, +}; +use elasticsearch::watcher::WatcherDeleteWatchParts; +use elasticsearch::{ + auth::Credentials, + cert::CertificateValidation, + http::transport::{SingleNodeConnectionPool, TransportBuilder}, + Elasticsearch, Error, DEFAULT_ADDRESS, +}; +use once_cell::sync::Lazy; +use serde_json::{json, Value}; +use std::ops::Deref; +use sysinfo::SystemExt; +use url::Url; + +fn cluster_addr() -> String { + match std::env::var("ES_TEST_SERVER") { + Ok(server) => server, + Err(_) => DEFAULT_ADDRESS.into(), + } +} + +/// Determines if Fiddler.exe proxy process is running +fn running_proxy() -> bool { + let system = sysinfo::System::new(); + !system.get_process_by_name("Fiddler").is_empty() +} + +static GLOBAL_CLIENT: Lazy = Lazy::new(|| { + let mut url = Url::parse(cluster_addr().as_ref()).unwrap(); + + // if the url is https and specifies a username and password, remove from the url and set credentials + let credentials = if url.scheme() == "https" { + let username = if !url.username().is_empty() { + let u = url.username().to_string(); + url.set_username("").unwrap(); + u + } else { + "elastic".into() + }; + + let password = match url.password() { + Some(p) => { + let pass = p.to_string(); + url.set_password(None).unwrap(); + pass + } + None => "changeme".into(), + }; + + Some(Credentials::Basic(username, password)) + } else { + None + }; + + let conn_pool = SingleNodeConnectionPool::new(url); + let mut builder = TransportBuilder::new(conn_pool); + + builder = match credentials { + Some(c) => builder.auth(c).cert_validation(CertificateValidation::None), + None => builder, + }; + + if running_proxy() { + let proxy_url = Url::parse("http://localhost:8888").unwrap(); + builder = builder.proxy(proxy_url, None, None); + } + + let transport = builder.build().unwrap(); + Elasticsearch::new(transport) +}); + +/// Gets the client to use in tests +pub fn get() -> &'static Elasticsearch { + GLOBAL_CLIENT.deref() +} + +/// Reads the response from Elasticsearch, returning the method, status code, text response, +/// and the response parsed from json or yaml +pub async fn read_response( + response: Response, +) -> Result<(Method, StatusCode, String, Value), failure::Error> { + let is_json = response.content_type().starts_with("application/json"); + let is_yaml = response.content_type().starts_with("application/yaml"); + let method = response.method(); + let status_code = response.status_code(); + let text = response.text().await?; + let json = if is_json && !text.is_empty() { + serde_json::from_str::(text.as_ref())? + } else if is_yaml && !text.is_empty() { + serde_yaml::from_str::(text.as_ref())? + } else { + Value::Null + }; + + Ok((method, status_code, text, json)) +} + +/// general setup step for an OSS yaml test +pub async fn general_oss_setup() -> Result<(), Error> { + let client = get(); + delete_indices(client).await?; + delete_templates(client).await?; + + let cat_snapshot_response = client + .cat() + .snapshots(CatSnapshotsParts::None) + .h(&["id", "repository"]) + .send() + .await?; + + if cat_snapshot_response.status_code().is_success() { + let cat_snapshot_text = cat_snapshot_response.text().await?; + + let all_snapshots: Vec<(&str, &str)> = cat_snapshot_text + .split('\n') + .map(|s| s.split(' ').collect::>()) + .filter(|s| s.len() == 2) + .map(|s| (s[0].trim(), s[1].trim())) + .filter(|(id, repo)| !id.is_empty() && !repo.is_empty()) + .collect(); + + for (id, repo) in all_snapshots { + let _snapshot_response = client + .snapshot() + .delete(SnapshotDeleteParts::RepositorySnapshot(&repo, &id)) + .send() + .await?; + } + } + + let _delete_repo_response = client + .snapshot() + .delete_repository(SnapshotDeleteRepositoryParts::Repository(&["*"])) + .send() + .await?; + + Ok(()) +} + +/// general setup step for an xpack yaml test +pub async fn general_xpack_setup() -> Result<(), Error> { + let client = get(); + delete_templates(client).await?; + + let _delete_watch_response = client + .watcher() + .delete_watch(WatcherDeleteWatchParts::Id("my_watch")) + .send() + .await?; + + delete_roles(client).await?; + delete_users(client).await?; + delete_privileges(client).await?; + stop_and_delete_datafeeds(client).await?; + + let _ = client + .ilm() + .remove_policy(IlmRemovePolicyParts::Index("_all")) + .send() + .await?; + + close_and_delete_jobs(client).await?; + + // TODO: stop and delete rollup jobs once implemented in the client + + cancel_tasks(client).await?; + stop_and_delete_transforms(client).await?; + wait_for_yellow_status(client).await?; + delete_indices(client).await?; + + let _ = client + .security() + .put_user(SecurityPutUserParts::Username("x_pack_rest_user")) + .body(json!({ + "password": "x-pack-test-password", + "roles": ["superuser"] + })) + .send() + .await?; + + let _ = client + .indices() + .refresh(IndicesRefreshParts::Index(&["_all"])) + .expand_wildcards(&[ + ExpandWildcards::Open, + ExpandWildcards::Closed, + ExpandWildcards::Hidden, + ]) + .send() + .await?; + + wait_for_yellow_status(client).await?; + + Ok(()) +} + +async fn wait_for_yellow_status(client: &Elasticsearch) -> Result<(), Error> { + let cluster_health = client + .cluster() + .health(ClusterHealthParts::None) + .wait_for_status(WaitForStatus::Yellow) + .send() + .await?; + + assert_response_success!(cluster_health); + Ok(()) +} + +async fn delete_indices(client: &Elasticsearch) -> Result<(), Error> { + let delete_response = client + .indices() + .delete(IndicesDeleteParts::Index(&["*"])) + .expand_wildcards(&[ + ExpandWildcards::Open, + ExpandWildcards::Closed, + ExpandWildcards::Hidden, + ]) + .send() + .await?; + + assert_response_success!(delete_response); + Ok(()) +} + +async fn stop_and_delete_transforms(client: &Elasticsearch) -> Result<(), Error> { + let transforms_response = client + .transform() + .get_transform(TransformGetTransformParts::TransformId("_all")) + .send() + .await? + .json::() + .await?; + + for transform in transforms_response["transforms"].as_array().unwrap() { + let id = transform["id"].as_str().unwrap(); + let _ = client + .transform() + .stop_transform(TransformStopTransformParts::TransformId(id)) + .send() + .await?; + + let _ = client + .transform() + .delete_transform(TransformDeleteTransformParts::TransformId(id)) + .send() + .await?; + } + + Ok(()) +} + +async fn cancel_tasks(client: &Elasticsearch) -> Result<(), Error> { + let rollup_response = client.tasks().list().send().await?.json::().await?; + + for (_node_id, nodes) in rollup_response["nodes"].as_object().unwrap() { + for (task_id, task) in nodes["tasks"].as_object().unwrap() { + if let Some(b) = task["cancellable"].as_bool() { + if b { + let _ = client + .tasks() + .cancel(TasksCancelParts::TaskId(task_id)) + .send() + .await?; + } + } + } + } + + Ok(()) +} + +async fn delete_templates(client: &Elasticsearch) -> Result<(), Error> { + let cat_template_response = client + .cat() + .templates(CatTemplatesParts::Name("*")) + .h(&["name"]) + .send() + .await? + .text() + .await?; + + let all_templates: Vec<&str> = cat_template_response + .split('\n') + .filter(|s| !s.is_empty() && !s.starts_with('.') && s != &"security-audit-log") + .collect(); + + for template in all_templates { + let _delete_template_response = client + .indices() + .delete_template(IndicesDeleteTemplateParts::Name(&template)) + .send() + .await?; + } + + Ok(()) +} + +async fn delete_users(client: &Elasticsearch) -> Result<(), Error> { + let users_response = client + .security() + .get_user(SecurityGetUserParts::None) + .send() + .await? + .json::() + .await?; + + for (k, v) in users_response.as_object().unwrap() { + if let Some(b) = v["metadata"]["_reserved"].as_bool() { + if !b { + let _ = client + .security() + .delete_user(SecurityDeleteUserParts::Username(k)) + .send() + .await?; + } + } + } + + Ok(()) +} + +async fn delete_roles(client: &Elasticsearch) -> Result<(), Error> { + let roles_response = client + .security() + .get_role(SecurityGetRoleParts::None) + .send() + .await? + .json::() + .await?; + + for (k, v) in roles_response.as_object().unwrap() { + if let Some(b) = v["metadata"]["_reserved"].as_bool() { + if !b { + let _ = client + .security() + .delete_role(SecurityDeleteRoleParts::Name(k)) + .send() + .await?; + } + } + } + + Ok(()) +} + +async fn delete_privileges(client: &Elasticsearch) -> Result<(), Error> { + let privileges_response = client + .security() + .get_privileges(SecurityGetPrivilegesParts::None) + .send() + .await? + .json::() + .await?; + + for (k, v) in privileges_response.as_object().unwrap() { + if let Some(b) = v["metadata"]["_reserved"].as_bool() { + if !b { + let _ = client + .security() + .delete_privileges(SecurityDeletePrivilegesParts::ApplicationName(k, "_all")) + .send() + .await?; + } + } + } + + Ok(()) +} + +async fn stop_and_delete_datafeeds(client: &Elasticsearch) -> Result<(), Error> { + let _stop_data_feed_response = client + .ml() + .stop_datafeed(MlStopDatafeedParts::DatafeedId("_all")) + .send() + .await?; + + let get_data_feeds_response = client + .ml() + .get_datafeeds(MlGetDatafeedsParts::None) + .send() + .await? + .json::() + .await?; + + for feed in get_data_feeds_response["datafeeds"].as_array().unwrap() { + let id = feed["datafeed_id"].as_str().unwrap(); + let _ = client + .ml() + .delete_datafeed(MlDeleteDatafeedParts::DatafeedId(id)) + .send() + .await?; + } + + Ok(()) +} + +async fn close_and_delete_jobs(client: &Elasticsearch) -> Result<(), Error> { + let _ = client + .ml() + .close_job(MlCloseJobParts::JobId("_all")) + .send() + .await?; + + let get_jobs_response = client + .ml() + .get_jobs(MlGetJobsParts::JobId("_all")) + .send() + .await? + .json::() + .await?; + + for job in get_jobs_response["jobs"].as_array().unwrap() { + let id = job["job_id"].as_str().unwrap(); + let _ = client + .ml() + .delete_job(MlDeleteJobParts::JobId(id)) + .send() + .await?; + } + + Ok(()) +} diff --git a/yaml_test_runner/tests/common/macros.rs b/yaml_test_runner/tests/common/macros.rs new file mode 100644 index 00000000..592aa1ce --- /dev/null +++ b/yaml_test_runner/tests/common/macros.rs @@ -0,0 +1,347 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +/// Asserts that a [Response] has a status code >=200 and <300 +#[macro_export] +macro_rules! assert_response_success { + ($response:ident) => {{ + assert!( + $response.status_code().is_success(), + "expected response to be successful but was {}", + $response.status_code().as_u16() + ); + }}; +} + +/// Asserts that a [Response] has a status code >=200 and <300 or matches the passed status +#[macro_export] +macro_rules! assert_response_success_or { + ($response:ident, $status:expr) => {{ + assert!( + $response.status_code().is_success() || $response.status_code().as_u16() == $status, + "expected response to be successful or {} but was {}", + $status, + $response.status_code().as_u16() + ); + }}; +} + +#[macro_export] +macro_rules! assert_status_code { + ($status_code:expr, $expected:expr) => {{ + assert_eq!( + $expected, + $status_code.as_u16(), + "expected status code to be {} but was {}", + $expected, + $status_code.as_u16() + ); + }}; +} + +#[macro_export] +macro_rules! assert_request_status_code { + ($status_code:expr) => {{ + let status_code = $status_code.as_u16(); + assert!( + status_code >= 400 && status_code < 600, + "expected status code in range 400-599 but was {}", + status_code + ); + }}; +} + +/// Asserts that the passed [serde_json::Value] matches the second argument. +/// The second argument is converted to a [serde_json::Value] using the `json!` macro +#[macro_export] +macro_rules! assert_match { + ($expected:expr, $actual:expr) => {{ + assert_eq!( + $expected, + $actual, + "expected value {} to match {:?} but was {:?}", + stringify!($expected), + $actual, + $expected + ); + }}; +} + +/// Asserts that the passed [serde_json::Value] matches the expected numeric value. +/// This handles the case where a YAML test asserts a match against an integer value +/// but a floating point value is returned from Elasticsearch +#[macro_export] +macro_rules! assert_numeric_match { + ($expected:expr, $actual:expr) => {{ + if $expected.is_i64() { + assert_match!($expected, $actual); + } else { + assert_match!($expected, $actual as f64); + } + }}; +} + +/// Asserts that a [serde_json::Value] is null. +#[macro_export] +macro_rules! assert_null { + ($expected:expr) => {{ + assert!( + $expected.is_null(), + "expected value {} to be null but was {:?}", + stringify!($expected), + $expected + ); + }}; +} + +/// Asserts that the first string value matches the second string regular expression. An optional +/// third bool argument ignores pattern whitespace. +#[macro_export] +macro_rules! assert_regex_match { + ($expected:expr, $regex:expr) => {{ + let regex = regex::RegexBuilder::new($regex).build()?; + assert!( + regex.is_match($expected), + "expected value {} to match regex\n\n{}\n\nbut was\n\n{}", + stringify!($expected), + $regex, + $expected + ); + }}; + ($expected:expr, $regex:expr, $ignore_whitespace:expr) => {{ + let regex = regex::RegexBuilder::new($regex) + .ignore_whitespace($ignore_whitespace) + .build()?; + assert!( + regex.is_match($expected), + "expected value {} to match regex\n\n{}\n\nbut was\n\n{}", + stringify!($expected), + $regex, + $expected + ); + }}; +} + +/// Asserts that the length of a [serde_json::Value] matches the expected length. +/// A length is calculated from the value based on the variant e.g. +/// - string length +/// - array length +/// - number of keys in object +/// - numeric value +#[macro_export] +macro_rules! assert_length { + ($expr:expr, $len:expr) => {{ + let len = match $expr { + Value::Number(n) => n.as_i64().unwrap() as usize, + Value::String(s) => s.len(), + Value::Array(a) => a.len(), + Value::Object(o) => o.len(), + v => panic!("Cannot get length from {:?}", v), + }; + + assert_eq!( + $len, + len, + "expected value {} to have length {} but was {}", + stringify!($expr), + $len, + len + ); + }}; +} + +/// Asserts that the expression is "false" i.e. `0`, `false`, `undefined`, `null` or `""` +#[macro_export] +macro_rules! assert_is_false { + ($expr:expr) => {{ + let expr_string = stringify!($expr); + match $expr { + Value::Null => {} + Value::Bool(b) => assert_eq!( + *b, false, + "expected value at {} to be false but was {}", + expr_string, b + ), + Value::Number(n) => assert_eq!( + n.as_f64().unwrap(), + 0.0, + "expected value at {} to be false (0) but was {}", + expr_string, + n.as_f64().unwrap() + ), + Value::String(s) => assert!( + s.is_empty(), + "expected value at {} to be false (empty) but was {}", + expr_string, + &s + ), + v => assert!( + false, + "expected value at {} to be false but was {:?}", + expr_string, &v + ), + } + }}; +} + +/// Asserts that the expression is "true" i.e. not `0`, `false`, `undefined`, `null` or `""` +#[macro_export] +macro_rules! assert_is_true { + ($expr:expr) => {{ + let expr_string = stringify!($expr); + match $expr { + Value::Null => assert!( + false, + "expected value at {} to be true (not null) but was null", + expr_string + ), + Value::Bool(b) => assert!( + *b, + "expected value at {} to be true but was false", + expr_string + ), + Value::Number(n) => assert_ne!( + n.as_f64().unwrap(), + 0.0, + "expected value at {} to be true (not 0) but was {}", + expr_string, + n.as_f64().unwrap() + ), + Value::String(s) => assert!( + !s.is_empty(), + "expected value at {} to be true (not empty) but was {}", + expr_string, + &s + ), + v => {} + } + }}; +} + +/// Asserts that the deprecation warnings contain a given value +#[macro_export] +macro_rules! assert_warnings_contain { + ($warnings:expr, $expected:expr) => {{ + assert!( + $warnings.iter().any(|w| w.contains($expected)), + "expected warnings to contain '{}' but contained {:?}", + $expected, + &$warnings + ); + }}; +} + +/// Asserts that the deprecation warnings are empty +#[macro_export] +macro_rules! assert_warnings_is_empty { + ($warnings:expr) => {{ + assert!( + $warnings.is_empty(), + "expected warnings to be empty but found {:?}", + &$warnings + ); + }}; +} + +/// Asserts that the comparison is true +#[macro_export] +macro_rules! assert_comparison { + ($expr:expr, $($comparison:tt)+) => {{ + match $expr { + Value::Number(n) => { + match n.as_i64() { + Some(i) => assert!(i $($comparison)+ as i64, "Expected value {} to be {} but was {}", stringify!($expr), stringify!($($comparison)+ as i64), i), + None => match n.as_f64() { + Some(f) => assert!(f $($comparison)+ as f64, "Expected value {} to be {} but was {}", stringify!($expr), stringify!($($comparison)+ as f64), f), + None => match n.as_u64() { + Some(u) => assert!(u $($comparison)+ as u64, "Expected value {} to be {} but was {}", stringify!($expr), stringify!($($comparison)+ as u64), u), + None => assert!(false, "Expected value {} to be numeric but was {:?}", stringify!($expr), &n) + } + } + } + } + v => assert!(false, "Expected value {} to be numeric but was {:?}", stringify!($expr), &v), + } + }}; +} + +/// Asserts that the comparison is true when comparing against a "set" value +#[macro_export] +macro_rules! assert_comparison_from_set_value { + ($expr:expr, $($comparison:tt)+) => {{ + match $expr { + Value::Number(n) => { + match n.as_i64() { + Some(i) => assert!(i $($comparison)+.as_i64().unwrap(), "Expected value {} to be {} but was {}", stringify!($expr), stringify!($($comparison)+.as_i64().unwrap()), i), + None => match n.as_f64() { + Some(f) => assert!(f $($comparison)+.as_f64().unwrap(), "Expected value {} to be {} but was {}", stringify!($expr), stringify!($($comparison)+.as_f64().unwrap()), f), + None => match n.as_u64() { + Some(u) => assert!(u $($comparison)+.as_u64().unwrap(), "Expected value {} to be {} but was {}", stringify!($expr), stringify!($($comparison)+.as_u64().unwrap()), u), + None => assert!(false, "Expected value {} to be numeric but was {:?}", stringify!($expr), &n) + } + } + } + } + v => assert!(false, "Expected value {} to be numeric but was {:?}", stringify!($expr), &v), + } + }}; +} + +/// Asserts that the passed [serde_json::Value::Array] contains the second argument. +#[macro_export] +macro_rules! assert_contains { + ($expr:expr, $value:expr) => {{ + if !$expr.is_array() { + assert!( + false, + "expected {} to be an array but was {:?}", + stringify!($expr), + &$expr + ); + } + + let arr = $expr.as_array().unwrap(); + + // when dealing with a serde_json::Value::Object, the $value may only be a partial object + // such that equality can't be used. In this case, we need to assert that there is one + // object in the array that has all the keys and values of $value + if $value.is_object() { + let vv = $value.clone(); + let o = vv.as_object().unwrap(); + assert!( + arr.iter() + .filter_map(serde_json::Value::as_object) + .any(|ao| o + .iter() + .all(|(key, value)| ao.get(key).map_or(false, |v| *value == *v))), + "expected value {} to contain {:?} but contained {:?}", + stringify!($expr), + &vv, + &arr + ); + } else { + assert!( + arr.contains(&$value), + "expected value {} to contain {:?} but contained {:?}", + stringify!($expr), + &$value, + &arr + ); + } + }}; +} diff --git a/yaml_test_runner/tests/common/mod.rs b/yaml_test_runner/tests/common/mod.rs new file mode 100644 index 00000000..6e28cfb2 --- /dev/null +++ b/yaml_test_runner/tests/common/mod.rs @@ -0,0 +1,22 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +#[macro_use] +pub mod macros; +pub mod client; +pub mod transform; diff --git a/yaml_test_runner/tests/common/transform.rs b/yaml_test_runner/tests/common/transform.rs new file mode 100644 index 00000000..a33fb8d3 --- /dev/null +++ b/yaml_test_runner/tests/common/transform.rs @@ -0,0 +1,30 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +use base64::write::EncoderWriter as Base64Encoder; +use std::io::Write; + +pub fn base_64_encode_credentials(user: &str, password: &str) -> String { + let mut value = Vec::new(); + { + let mut encoder = Base64Encoder::new(&mut value, base64::STANDARD); + write!(encoder, "{}:", user).unwrap(); + write!(encoder, "{}", password).unwrap(); + }; + String::from_utf8(value).unwrap() +}