diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/AsciiFoldingTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/AsciiFoldingTokenFilter.g.cs index d9942e62982..26aeb8d9237 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/AsciiFoldingTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/AsciiFoldingTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class AsciiFoldingTokenFilter : ITokenFilterDefinition +public sealed partial class AsciiFoldingTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("preserve_original")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/CharFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/CharFilter.g.cs deleted file mode 100644 index 50c891eeca7..00000000000 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/CharFilter.g.cs +++ /dev/null @@ -1,38 +0,0 @@ -// Licensed to Elasticsearch B.V under one or more agreements. -// Elasticsearch B.V licenses this file to you under the Apache 2.0 License. -// See the LICENSE file in the project root for more information. -// -// ███╗ ██╗ ██████╗ ████████╗██╗ ██████╗███████╗ -// ████╗ ██║██╔═══██╗╚══██╔══╝██║██╔════╝██╔════╝ -// ██╔██╗ ██║██║ ██║ ██║ ██║██║ █████╗ -// ██║╚██╗██║██║ ██║ ██║ ██║██║ ██╔══╝ -// ██║ ╚████║╚██████╔╝ ██║ ██║╚██████╗███████╗ -// ╚═╝ ╚═══╝ ╚═════╝ ╚═╝ ╚═╝ ╚═════╝╚══════╝ -// ------------------------------------------------ -// -// This file is automatically generated. -// Please do not edit these files manually. -// -// ------------------------------------------------ - -using Elastic.Clients.Elasticsearch.Fluent; -using Elastic.Clients.Elasticsearch.Serialization; -using Elastic.Transport; -using System; -using System.Collections.Generic; -using System.Linq.Expressions; -using System.Text.Json; -using System.Text.Json.Serialization; - -#nullable restore -namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class CharFilter : Union -{ - public CharFilter(string name) : base(name) - { - } - - public CharFilter(Elastic.Clients.Elasticsearch.Analysis.CharFilterDefinitions definition) : base(definition) - { - } -} \ No newline at end of file diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/CharFilterDefinitions.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/CharFilterDefinitions.g.cs deleted file mode 100644 index 1872d287c1d..00000000000 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/CharFilterDefinitions.g.cs +++ /dev/null @@ -1,150 +0,0 @@ -// Licensed to Elasticsearch B.V under one or more agreements. -// Elasticsearch B.V licenses this file to you under the Apache 2.0 License. -// See the LICENSE file in the project root for more information. -// -// ███╗ ██╗ ██████╗ ████████╗██╗ ██████╗███████╗ -// ████╗ ██║██╔═══██╗╚══██╔══╝██║██╔════╝██╔════╝ -// ██╔██╗ ██║██║ ██║ ██║ ██║██║ █████╗ -// ██║╚██╗██║██║ ██║ ██║ ██║██║ ██╔══╝ -// ██║ ╚████║╚██████╔╝ ██║ ██║╚██████╗███████╗ -// ╚═╝ ╚═══╝ ╚═════╝ ╚═╝ ╚═╝ ╚═════╝╚══════╝ -// ------------------------------------------------ -// -// This file is automatically generated. -// Please do not edit these files manually. -// -// ------------------------------------------------ - -using Elastic.Clients.Elasticsearch.Fluent; -using Elastic.Clients.Elasticsearch.Serialization; -using Elastic.Transport; -using System; -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Linq.Expressions; -using System.Text.Json; -using System.Text.Json.Serialization; - -#nullable restore -namespace Elastic.Clients.Elasticsearch.Analysis; -public partial class CharFilterDefinitions : IsADictionary -{ - public CharFilterDefinitions() - { - } - - public CharFilterDefinitions(IDictionary container) : base(container) - { - } - - public void Add(string name, ICharFilterDefinition charFilterDefinition) => BackingDictionary.Add(Sanitize(name), charFilterDefinition); - public bool TryGetCharFilterDefinition(string name, [NotNullWhen(returnValue: true)] out ICharFilterDefinition charFilterDefinition) => BackingDictionary.TryGetValue(Sanitize(name), out charFilterDefinition); - public bool TryGetCharFilterDefinition(string name, [NotNullWhen(returnValue: true)] out T? charFilterDefinition) - where T : class, ICharFilterDefinition - { - if (BackingDictionary.TryGetValue(Sanitize(name), out var matchedValue) && matchedValue is T finalValue) - { - charFilterDefinition = finalValue; - return true; - } - - charFilterDefinition = null; - return false; - } -} - -public sealed partial class CharFilterDefinitionsDescriptor : IsADictionaryDescriptor -{ - public CharFilterDefinitionsDescriptor() : base(new CharFilterDefinitions()) - { - } - - public CharFilterDefinitionsDescriptor(CharFilterDefinitions charFilterDefinitions) : base(charFilterDefinitions ?? new CharFilterDefinitions()) - { - } - - public CharFilterDefinitionsDescriptor HtmlStripCharFilter(string charFilterDefinitionName) => AssignVariant(charFilterDefinitionName, null); - public CharFilterDefinitionsDescriptor HtmlStripCharFilter(string charFilterDefinitionName, Action configure) => AssignVariant(charFilterDefinitionName, configure); - public CharFilterDefinitionsDescriptor HtmlStripCharFilter(string charFilterDefinitionName, HtmlStripCharFilter htmlStripCharFilter) => AssignVariant(charFilterDefinitionName, htmlStripCharFilter); - public CharFilterDefinitionsDescriptor IcuNormalizationCharFilter(string charFilterDefinitionName) => AssignVariant(charFilterDefinitionName, null); - public CharFilterDefinitionsDescriptor IcuNormalizationCharFilter(string charFilterDefinitionName, Action configure) => AssignVariant(charFilterDefinitionName, configure); - public CharFilterDefinitionsDescriptor IcuNormalizationCharFilter(string charFilterDefinitionName, IcuNormalizationCharFilter icuNormalizationCharFilter) => AssignVariant(charFilterDefinitionName, icuNormalizationCharFilter); - public CharFilterDefinitionsDescriptor KuromojiIterationMarkCharFilter(string charFilterDefinitionName) => AssignVariant(charFilterDefinitionName, null); - public CharFilterDefinitionsDescriptor KuromojiIterationMarkCharFilter(string charFilterDefinitionName, Action configure) => AssignVariant(charFilterDefinitionName, configure); - public CharFilterDefinitionsDescriptor KuromojiIterationMarkCharFilter(string charFilterDefinitionName, KuromojiIterationMarkCharFilter kuromojiIterationMarkCharFilter) => AssignVariant(charFilterDefinitionName, kuromojiIterationMarkCharFilter); - public CharFilterDefinitionsDescriptor MappingCharFilter(string charFilterDefinitionName) => AssignVariant(charFilterDefinitionName, null); - public CharFilterDefinitionsDescriptor MappingCharFilter(string charFilterDefinitionName, Action configure) => AssignVariant(charFilterDefinitionName, configure); - public CharFilterDefinitionsDescriptor MappingCharFilter(string charFilterDefinitionName, MappingCharFilter mappingCharFilter) => AssignVariant(charFilterDefinitionName, mappingCharFilter); - public CharFilterDefinitionsDescriptor PatternReplaceCharFilter(string charFilterDefinitionName) => AssignVariant(charFilterDefinitionName, null); - public CharFilterDefinitionsDescriptor PatternReplaceCharFilter(string charFilterDefinitionName, Action configure) => AssignVariant(charFilterDefinitionName, configure); - public CharFilterDefinitionsDescriptor PatternReplaceCharFilter(string charFilterDefinitionName, PatternReplaceCharFilter patternReplaceCharFilter) => AssignVariant(charFilterDefinitionName, patternReplaceCharFilter); -} - -internal sealed partial class CharFilterDefinitionInterfaceConverter : JsonConverter -{ - public override ICharFilterDefinition Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) - { - var copiedReader = reader; - string? type = null; - using var jsonDoc = JsonDocument.ParseValue(ref copiedReader); - if (jsonDoc is not null && jsonDoc.RootElement.TryGetProperty("type", out var readType) && readType.ValueKind == JsonValueKind.String) - { - type = readType.ToString(); - } - - switch (type) - { - case "kuromoji_iteration_mark": - return JsonSerializer.Deserialize(ref reader, options); - case "icu_normalizer": - return JsonSerializer.Deserialize(ref reader, options); - case "pattern_replace": - return JsonSerializer.Deserialize(ref reader, options); - case "mapping": - return JsonSerializer.Deserialize(ref reader, options); - case "html_strip": - return JsonSerializer.Deserialize(ref reader, options); - default: - ThrowHelper.ThrowUnknownTaggedUnionVariantJsonException(type, typeof(ICharFilterDefinition)); - return null; - } - } - - public override void Write(Utf8JsonWriter writer, ICharFilterDefinition value, JsonSerializerOptions options) - { - if (value is null) - { - writer.WriteNullValue(); - return; - } - - switch (value.Type) - { - case "kuromoji_iteration_mark": - JsonSerializer.Serialize(writer, value, typeof(KuromojiIterationMarkCharFilter), options); - return; - case "icu_normalizer": - JsonSerializer.Serialize(writer, value, typeof(IcuNormalizationCharFilter), options); - return; - case "pattern_replace": - JsonSerializer.Serialize(writer, value, typeof(PatternReplaceCharFilter), options); - return; - case "mapping": - JsonSerializer.Serialize(writer, value, typeof(MappingCharFilter), options); - return; - case "html_strip": - JsonSerializer.Serialize(writer, value, typeof(HtmlStripCharFilter), options); - return; - default: - var type = value.GetType(); - JsonSerializer.Serialize(writer, value, type, options); - return; - } - } -} - -[JsonConverter(typeof(CharFilterDefinitionInterfaceConverter))] -public partial interface ICharFilterDefinition -{ - public string Type { get; } -} \ No newline at end of file diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/CharFilters.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/CharFilters.g.cs new file mode 100644 index 00000000000..abda6ce5c5b --- /dev/null +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/CharFilters.g.cs @@ -0,0 +1,150 @@ +// Licensed to Elasticsearch B.V under one or more agreements. +// Elasticsearch B.V licenses this file to you under the Apache 2.0 License. +// See the LICENSE file in the project root for more information. +// +// ███╗ ██╗ ██████╗ ████████╗██╗ ██████╗███████╗ +// ████╗ ██║██╔═══██╗╚══██╔══╝██║██╔════╝██╔════╝ +// ██╔██╗ ██║██║ ██║ ██║ ██║██║ █████╗ +// ██║╚██╗██║██║ ██║ ██║ ██║██║ ██╔══╝ +// ██║ ╚████║╚██████╔╝ ██║ ██║╚██████╗███████╗ +// ╚═╝ ╚═══╝ ╚═════╝ ╚═╝ ╚═╝ ╚═════╝╚══════╝ +// ------------------------------------------------ +// +// This file is automatically generated. +// Please do not edit these files manually. +// +// ------------------------------------------------ + +using Elastic.Clients.Elasticsearch.Fluent; +using Elastic.Clients.Elasticsearch.Serialization; +using Elastic.Transport; +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Linq.Expressions; +using System.Text.Json; +using System.Text.Json.Serialization; + +#nullable restore +namespace Elastic.Clients.Elasticsearch.Analysis; +public partial class CharFilters : IsADictionary +{ + public CharFilters() + { + } + + public CharFilters(IDictionary container) : base(container) + { + } + + public void Add(string name, ICharFilter charFilter) => BackingDictionary.Add(Sanitize(name), charFilter); + public bool TryGetCharFilter(string name, [NotNullWhen(returnValue: true)] out ICharFilter charFilter) => BackingDictionary.TryGetValue(Sanitize(name), out charFilter); + public bool TryGetCharFilter(string name, [NotNullWhen(returnValue: true)] out T? charFilter) + where T : class, ICharFilter + { + if (BackingDictionary.TryGetValue(Sanitize(name), out var matchedValue) && matchedValue is T finalValue) + { + charFilter = finalValue; + return true; + } + + charFilter = null; + return false; + } +} + +public sealed partial class CharFiltersDescriptor : IsADictionaryDescriptor +{ + public CharFiltersDescriptor() : base(new CharFilters()) + { + } + + public CharFiltersDescriptor(CharFilters charFilters) : base(charFilters ?? new CharFilters()) + { + } + + public CharFiltersDescriptor HtmlStrip(string charFilterName) => AssignVariant(charFilterName, null); + public CharFiltersDescriptor HtmlStrip(string charFilterName, Action configure) => AssignVariant(charFilterName, configure); + public CharFiltersDescriptor HtmlStrip(string charFilterName, HtmlStripCharFilter htmlStripCharFilter) => AssignVariant(charFilterName, htmlStripCharFilter); + public CharFiltersDescriptor IcuNormalization(string charFilterName) => AssignVariant(charFilterName, null); + public CharFiltersDescriptor IcuNormalization(string charFilterName, Action configure) => AssignVariant(charFilterName, configure); + public CharFiltersDescriptor IcuNormalization(string charFilterName, IcuNormalizationCharFilter icuNormalizationCharFilter) => AssignVariant(charFilterName, icuNormalizationCharFilter); + public CharFiltersDescriptor KuromojiIterationMark(string charFilterName) => AssignVariant(charFilterName, null); + public CharFiltersDescriptor KuromojiIterationMark(string charFilterName, Action configure) => AssignVariant(charFilterName, configure); + public CharFiltersDescriptor KuromojiIterationMark(string charFilterName, KuromojiIterationMarkCharFilter kuromojiIterationMarkCharFilter) => AssignVariant(charFilterName, kuromojiIterationMarkCharFilter); + public CharFiltersDescriptor Mapping(string charFilterName) => AssignVariant(charFilterName, null); + public CharFiltersDescriptor Mapping(string charFilterName, Action configure) => AssignVariant(charFilterName, configure); + public CharFiltersDescriptor Mapping(string charFilterName, MappingCharFilter mappingCharFilter) => AssignVariant(charFilterName, mappingCharFilter); + public CharFiltersDescriptor PatternReplace(string charFilterName) => AssignVariant(charFilterName, null); + public CharFiltersDescriptor PatternReplace(string charFilterName, Action configure) => AssignVariant(charFilterName, configure); + public CharFiltersDescriptor PatternReplace(string charFilterName, PatternReplaceCharFilter patternReplaceCharFilter) => AssignVariant(charFilterName, patternReplaceCharFilter); +} + +internal sealed partial class CharFilterInterfaceConverter : JsonConverter +{ + public override ICharFilter Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + { + var copiedReader = reader; + string? type = null; + using var jsonDoc = JsonDocument.ParseValue(ref copiedReader); + if (jsonDoc is not null && jsonDoc.RootElement.TryGetProperty("type", out var readType) && readType.ValueKind == JsonValueKind.String) + { + type = readType.ToString(); + } + + switch (type) + { + case "kuromoji_iteration_mark": + return JsonSerializer.Deserialize(ref reader, options); + case "icu_normalizer": + return JsonSerializer.Deserialize(ref reader, options); + case "pattern_replace": + return JsonSerializer.Deserialize(ref reader, options); + case "mapping": + return JsonSerializer.Deserialize(ref reader, options); + case "html_strip": + return JsonSerializer.Deserialize(ref reader, options); + default: + ThrowHelper.ThrowUnknownTaggedUnionVariantJsonException(type, typeof(ICharFilter)); + return null; + } + } + + public override void Write(Utf8JsonWriter writer, ICharFilter value, JsonSerializerOptions options) + { + if (value is null) + { + writer.WriteNullValue(); + return; + } + + switch (value.Type) + { + case "kuromoji_iteration_mark": + JsonSerializer.Serialize(writer, value, typeof(KuromojiIterationMarkCharFilter), options); + return; + case "icu_normalizer": + JsonSerializer.Serialize(writer, value, typeof(IcuNormalizationCharFilter), options); + return; + case "pattern_replace": + JsonSerializer.Serialize(writer, value, typeof(PatternReplaceCharFilter), options); + return; + case "mapping": + JsonSerializer.Serialize(writer, value, typeof(MappingCharFilter), options); + return; + case "html_strip": + JsonSerializer.Serialize(writer, value, typeof(HtmlStripCharFilter), options); + return; + default: + var type = value.GetType(); + JsonSerializer.Serialize(writer, value, type, options); + return; + } + } +} + +[JsonConverter(typeof(CharFilterInterfaceConverter))] +public partial interface ICharFilter +{ + public string Type { get; } +} \ No newline at end of file diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/CharGroupTokenizer.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/CharGroupTokenizer.g.cs index e2d9de5874d..efc4353e051 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/CharGroupTokenizer.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/CharGroupTokenizer.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class CharGroupTokenizer : ITokenizerDefinition +public sealed partial class CharGroupTokenizer : ITokenizer { [JsonInclude] [JsonPropertyName("max_token_length")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/CommonGramsTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/CommonGramsTokenFilter.g.cs index 82238423a95..9a9de61d0be 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/CommonGramsTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/CommonGramsTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class CommonGramsTokenFilter : ITokenFilterDefinition +public sealed partial class CommonGramsTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("common_words")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/ConditionTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/ConditionTokenFilter.g.cs index cd914b8a6c0..96b769a36e1 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/ConditionTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/ConditionTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class ConditionTokenFilter : ITokenFilterDefinition +public sealed partial class ConditionTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("filter")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/DelimitedPayloadTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/DelimitedPayloadTokenFilter.g.cs index ef91e30461c..8ffb2e4bcd3 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/DelimitedPayloadTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/DelimitedPayloadTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class DelimitedPayloadTokenFilter : ITokenFilterDefinition +public sealed partial class DelimitedPayloadTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("delimiter")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/DictionaryDecompounderTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/DictionaryDecompounderTokenFilter.g.cs index a606c7c74ec..7e21aa430e6 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/DictionaryDecompounderTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/DictionaryDecompounderTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class DictionaryDecompounderTokenFilter : ITokenFilterDefinition +public sealed partial class DictionaryDecompounderTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("hyphenation_patterns_path")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/EdgeNGramTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/EdgeNGramTokenFilter.g.cs index 216c2d960c5..3ccc1be72aa 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/EdgeNGramTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/EdgeNGramTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class EdgeNGramTokenFilter : ITokenFilterDefinition +public sealed partial class EdgeNGramTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("max_gram")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/EdgeNGramTokenizer.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/EdgeNGramTokenizer.g.cs index e91253fc9e9..eda7ee81290 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/EdgeNGramTokenizer.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/EdgeNGramTokenizer.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class EdgeNGramTokenizer : ITokenizerDefinition +public sealed partial class EdgeNGramTokenizer : ITokenizer { [JsonInclude] [JsonPropertyName("custom_token_chars")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/ElisionTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/ElisionTokenFilter.g.cs index 6d0a90aa561..61ec3ef7078 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/ElisionTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/ElisionTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class ElisionTokenFilter : ITokenFilterDefinition +public sealed partial class ElisionTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("articles")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/FingerprintTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/FingerprintTokenFilter.g.cs index 7ee156058aa..5b5d8b11d8c 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/FingerprintTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/FingerprintTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class FingerprintTokenFilter : ITokenFilterDefinition +public sealed partial class FingerprintTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("max_output_size")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/HtmlStripCharFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/HtmlStripCharFilter.g.cs index 7e3d85fdc41..293cf5bb70c 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/HtmlStripCharFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/HtmlStripCharFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class HtmlStripCharFilter : ICharFilterDefinition +public sealed partial class HtmlStripCharFilter : ICharFilter { [JsonInclude] [JsonPropertyName("type")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/HunspellTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/HunspellTokenFilter.g.cs index 1b6ed9f45dd..660e55e2ce0 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/HunspellTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/HunspellTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class HunspellTokenFilter : ITokenFilterDefinition +public sealed partial class HunspellTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("dedup")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/HyphenationDecompounderTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/HyphenationDecompounderTokenFilter.g.cs index 86b5a873c18..d5c097e7e49 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/HyphenationDecompounderTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/HyphenationDecompounderTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class HyphenationDecompounderTokenFilter : ITokenFilterDefinition +public sealed partial class HyphenationDecompounderTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("hyphenation_patterns_path")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/IcuCollationTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/IcuCollationTokenFilter.g.cs index 949e6eb21cd..89a3c2fbbef 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/IcuCollationTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/IcuCollationTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class IcuCollationTokenFilter : ITokenFilterDefinition +public sealed partial class IcuCollationTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("alternate")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/IcuFoldingTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/IcuFoldingTokenFilter.g.cs index 4f484578a8b..5b2d5df31c4 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/IcuFoldingTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/IcuFoldingTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class IcuFoldingTokenFilter : ITokenFilterDefinition +public sealed partial class IcuFoldingTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("type")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/IcuNormalizationCharFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/IcuNormalizationCharFilter.g.cs index 689276fa598..7ad65529ecc 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/IcuNormalizationCharFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/IcuNormalizationCharFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class IcuNormalizationCharFilter : ICharFilterDefinition +public sealed partial class IcuNormalizationCharFilter : ICharFilter { [JsonInclude] [JsonPropertyName("mode")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/IcuNormalizationTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/IcuNormalizationTokenFilter.g.cs index 2d81ba74f85..f4520aeb3ee 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/IcuNormalizationTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/IcuNormalizationTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class IcuNormalizationTokenFilter : ITokenFilterDefinition +public sealed partial class IcuNormalizationTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("name")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/IcuTokenizer.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/IcuTokenizer.g.cs index 13ff816c7fc..1599be63e53 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/IcuTokenizer.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/IcuTokenizer.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class IcuTokenizer : ITokenFilterDefinition, ITokenizerDefinition +public sealed partial class IcuTokenizer : ITokenFilter, ITokenizer { [JsonInclude] [JsonPropertyName("rule_files")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/IcuTransformTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/IcuTransformTokenFilter.g.cs index c169364dd5f..428a571f367 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/IcuTransformTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/IcuTransformTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class IcuTransformTokenFilter : ITokenFilterDefinition +public sealed partial class IcuTransformTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("dir")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/KStemTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/KStemTokenFilter.g.cs index 5e9bba76224..9bcc371cbd9 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/KStemTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/KStemTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class KStemTokenFilter : ITokenFilterDefinition +public sealed partial class KStemTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("type")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/KeepTypesTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/KeepTypesTokenFilter.g.cs index f57c5230a9f..67bf1d9829e 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/KeepTypesTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/KeepTypesTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class KeepTypesTokenFilter : ITokenFilterDefinition +public sealed partial class KeepTypesTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("mode")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/KeepWordsTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/KeepWordsTokenFilter.g.cs index fc8e2c46f08..1241b0fccd3 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/KeepWordsTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/KeepWordsTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class KeepWordsTokenFilter : ITokenFilterDefinition +public sealed partial class KeepWordsTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("keep_words")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/KeywordMarkerTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/KeywordMarkerTokenFilter.g.cs index 38be4530e59..b97e590aebe 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/KeywordMarkerTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/KeywordMarkerTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class KeywordMarkerTokenFilter : ITokenFilterDefinition +public sealed partial class KeywordMarkerTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("ignore_case")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/KeywordTokenizer.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/KeywordTokenizer.g.cs index fb1924df986..2437c3223ff 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/KeywordTokenizer.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/KeywordTokenizer.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class KeywordTokenizer : ITokenizerDefinition +public sealed partial class KeywordTokenizer : ITokenizer { [JsonInclude] [JsonPropertyName("buffer_size")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/KuromojiIterationMarkCharFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/KuromojiIterationMarkCharFilter.g.cs index acf2d8fd91d..1b389fb30fa 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/KuromojiIterationMarkCharFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/KuromojiIterationMarkCharFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class KuromojiIterationMarkCharFilter : ICharFilterDefinition +public sealed partial class KuromojiIterationMarkCharFilter : ICharFilter { [JsonInclude] [JsonPropertyName("normalize_kana")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/KuromojiPartOfSpeechTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/KuromojiPartOfSpeechTokenFilter.g.cs index 3afae9b50f4..d6916277329 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/KuromojiPartOfSpeechTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/KuromojiPartOfSpeechTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class KuromojiPartOfSpeechTokenFilter : ITokenFilterDefinition +public sealed partial class KuromojiPartOfSpeechTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("stoptags")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/KuromojiReadingFormTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/KuromojiReadingFormTokenFilter.g.cs index 3a6b7a207f5..ba29742466e 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/KuromojiReadingFormTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/KuromojiReadingFormTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class KuromojiReadingFormTokenFilter : ITokenFilterDefinition +public sealed partial class KuromojiReadingFormTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("type")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/KuromojiStemmerTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/KuromojiStemmerTokenFilter.g.cs index 0404194e710..e3e49fc48cd 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/KuromojiStemmerTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/KuromojiStemmerTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class KuromojiStemmerTokenFilter : ITokenFilterDefinition +public sealed partial class KuromojiStemmerTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("minimum_length")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/KuromojiTokenizer.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/KuromojiTokenizer.g.cs index 4089b2299f1..42f86abafc2 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/KuromojiTokenizer.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/KuromojiTokenizer.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class KuromojiTokenizer : ITokenizerDefinition +public sealed partial class KuromojiTokenizer : ITokenizer { [JsonInclude] [JsonPropertyName("discard_compound_token")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/LengthTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/LengthTokenFilter.g.cs index 9433d80ccf1..cc9dd4ee0dd 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/LengthTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/LengthTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class LengthTokenFilter : ITokenFilterDefinition +public sealed partial class LengthTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("max")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/LetterTokenizer.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/LetterTokenizer.g.cs index bef0de91c7a..e8d013f0357 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/LetterTokenizer.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/LetterTokenizer.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class LetterTokenizer : ITokenizerDefinition +public sealed partial class LetterTokenizer : ITokenizer { [JsonInclude] [JsonPropertyName("type")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/LimitTokenCountTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/LimitTokenCountTokenFilter.g.cs index 34a8f37d325..b2b73eff473 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/LimitTokenCountTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/LimitTokenCountTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class LimitTokenCountTokenFilter : ITokenFilterDefinition +public sealed partial class LimitTokenCountTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("consume_all_tokens")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/LowercaseTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/LowercaseTokenFilter.g.cs index d0cf0fa71d9..03f78301ad9 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/LowercaseTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/LowercaseTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class LowercaseTokenFilter : ITokenFilterDefinition +public sealed partial class LowercaseTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("language")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/LowercaseTokenizer.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/LowercaseTokenizer.g.cs index cc23e788986..22de3d5814d 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/LowercaseTokenizer.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/LowercaseTokenizer.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class LowercaseTokenizer : ITokenizerDefinition +public sealed partial class LowercaseTokenizer : ITokenizer { [JsonInclude] [JsonPropertyName("type")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/MappingCharFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/MappingCharFilter.g.cs index 87dee038240..04662a4cb00 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/MappingCharFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/MappingCharFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class MappingCharFilter : ICharFilterDefinition +public sealed partial class MappingCharFilter : ICharFilter { [JsonInclude] [JsonPropertyName("mappings")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/MultiplexerTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/MultiplexerTokenFilter.g.cs index ae001e8b40d..738188af3f9 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/MultiplexerTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/MultiplexerTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class MultiplexerTokenFilter : ITokenFilterDefinition +public sealed partial class MultiplexerTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("filters")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/NGramTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/NGramTokenFilter.g.cs index 9bbf94e5345..786310deacb 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/NGramTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/NGramTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class NGramTokenFilter : ITokenFilterDefinition +public sealed partial class NGramTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("max_gram")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/NGramTokenizer.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/NGramTokenizer.g.cs index 82dff88defa..d91529ce3d9 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/NGramTokenizer.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/NGramTokenizer.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class NGramTokenizer : ITokenizerDefinition +public sealed partial class NGramTokenizer : ITokenizer { [JsonInclude] [JsonPropertyName("custom_token_chars")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/NoriPartOfSpeechTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/NoriPartOfSpeechTokenFilter.g.cs index b7703ee92fd..577e7385486 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/NoriPartOfSpeechTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/NoriPartOfSpeechTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class NoriPartOfSpeechTokenFilter : ITokenFilterDefinition +public sealed partial class NoriPartOfSpeechTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("stoptags")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/NoriTokenizer.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/NoriTokenizer.g.cs index c82e1a7ff5c..0e71bdfc011 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/NoriTokenizer.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/NoriTokenizer.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class NoriTokenizer : ITokenizerDefinition +public sealed partial class NoriTokenizer : ITokenizer { [JsonInclude] [JsonPropertyName("decompound_mode")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/PathHierarchyTokenizer.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/PathHierarchyTokenizer.g.cs index 9926c1ea789..e2ed28dfa21 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/PathHierarchyTokenizer.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/PathHierarchyTokenizer.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class PathHierarchyTokenizer : ITokenizerDefinition +public sealed partial class PathHierarchyTokenizer : ITokenizer { [JsonInclude] [JsonPropertyName("buffer_size")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/PatternCaptureTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/PatternCaptureTokenFilter.g.cs index 557373e98a4..0f229a919c8 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/PatternCaptureTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/PatternCaptureTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class PatternCaptureTokenFilter : ITokenFilterDefinition +public sealed partial class PatternCaptureTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("patterns")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/PatternReplaceCharFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/PatternReplaceCharFilter.g.cs index c805ef15b08..335f0e24393 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/PatternReplaceCharFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/PatternReplaceCharFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class PatternReplaceCharFilter : ICharFilterDefinition +public sealed partial class PatternReplaceCharFilter : ICharFilter { [JsonInclude] [JsonPropertyName("flags")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/PatternReplaceTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/PatternReplaceTokenFilter.g.cs index 952586946d1..63a1120d97d 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/PatternReplaceTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/PatternReplaceTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class PatternReplaceTokenFilter : ITokenFilterDefinition +public sealed partial class PatternReplaceTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("all")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/PatternTokenizer.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/PatternTokenizer.g.cs index 58878cfac75..123bbab5377 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/PatternTokenizer.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/PatternTokenizer.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class PatternTokenizer : ITokenizerDefinition +public sealed partial class PatternTokenizer : ITokenizer { [JsonInclude] [JsonPropertyName("flags")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/PhoneticTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/PhoneticTokenFilter.g.cs index e427b7337a4..24d4839cc8b 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/PhoneticTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/PhoneticTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class PhoneticTokenFilter : ITokenFilterDefinition +public sealed partial class PhoneticTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("encoder")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/PorterStemTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/PorterStemTokenFilter.g.cs index ea1015ca353..4b4c5bb4a25 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/PorterStemTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/PorterStemTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class PorterStemTokenFilter : ITokenFilterDefinition +public sealed partial class PorterStemTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("type")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/PredicateTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/PredicateTokenFilter.g.cs index a0a64b2bd9b..fd82276691b 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/PredicateTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/PredicateTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class PredicateTokenFilter : ITokenFilterDefinition +public sealed partial class PredicateTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("script")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/RemoveDuplicatesTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/RemoveDuplicatesTokenFilter.g.cs index 1f6fca0c32a..87c0045a51a 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/RemoveDuplicatesTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/RemoveDuplicatesTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class RemoveDuplicatesTokenFilter : ITokenFilterDefinition +public sealed partial class RemoveDuplicatesTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("type")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/ReverseTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/ReverseTokenFilter.g.cs index 48f0f469a00..aca6f31b3d5 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/ReverseTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/ReverseTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class ReverseTokenFilter : ITokenFilterDefinition +public sealed partial class ReverseTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("type")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/ShingleTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/ShingleTokenFilter.g.cs index f2fa9888c96..89021fd73f4 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/ShingleTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/ShingleTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class ShingleTokenFilter : ITokenFilterDefinition +public sealed partial class ShingleTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("filler_token")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/SnowballTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/SnowballTokenFilter.g.cs index 278e1579863..3504ad72669 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/SnowballTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/SnowballTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class SnowballTokenFilter : ITokenFilterDefinition +public sealed partial class SnowballTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("language")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/StandardTokenizer.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/StandardTokenizer.g.cs index 2c9e84c0c83..efdf4a19f4d 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/StandardTokenizer.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/StandardTokenizer.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class StandardTokenizer : ITokenizerDefinition +public sealed partial class StandardTokenizer : ITokenizer { [JsonInclude] [JsonPropertyName("max_token_length")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/StemmerOverrideTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/StemmerOverrideTokenFilter.g.cs index d937347e103..b131f67dcdb 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/StemmerOverrideTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/StemmerOverrideTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class StemmerOverrideTokenFilter : ITokenFilterDefinition +public sealed partial class StemmerOverrideTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("rules")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/StemmerTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/StemmerTokenFilter.g.cs index b1e10b28af1..56024820517 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/StemmerTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/StemmerTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class StemmerTokenFilter : ITokenFilterDefinition +public sealed partial class StemmerTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("language")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/StopTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/StopTokenFilter.g.cs index 4074fdf438a..397c2d4a984 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/StopTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/StopTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class StopTokenFilter : ITokenFilterDefinition +public sealed partial class StopTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("ignore_case")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/SynonymGraphTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/SynonymGraphTokenFilter.g.cs index 1265f511337..f5f03db6cd1 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/SynonymGraphTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/SynonymGraphTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class SynonymGraphTokenFilter : ITokenFilterDefinition +public sealed partial class SynonymGraphTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("expand")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/SynonymTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/SynonymTokenFilter.g.cs index 470e80579ef..65aa38281d4 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/SynonymTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/SynonymTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class SynonymTokenFilter : ITokenFilterDefinition +public sealed partial class SynonymTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("expand")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/TokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/TokenFilter.g.cs deleted file mode 100644 index 3d151c5e7b5..00000000000 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/TokenFilter.g.cs +++ /dev/null @@ -1,38 +0,0 @@ -// Licensed to Elasticsearch B.V under one or more agreements. -// Elasticsearch B.V licenses this file to you under the Apache 2.0 License. -// See the LICENSE file in the project root for more information. -// -// ███╗ ██╗ ██████╗ ████████╗██╗ ██████╗███████╗ -// ████╗ ██║██╔═══██╗╚══██╔══╝██║██╔════╝██╔════╝ -// ██╔██╗ ██║██║ ██║ ██║ ██║██║ █████╗ -// ██║╚██╗██║██║ ██║ ██║ ██║██║ ██╔══╝ -// ██║ ╚████║╚██████╔╝ ██║ ██║╚██████╗███████╗ -// ╚═╝ ╚═══╝ ╚═════╝ ╚═╝ ╚═╝ ╚═════╝╚══════╝ -// ------------------------------------------------ -// -// This file is automatically generated. -// Please do not edit these files manually. -// -// ------------------------------------------------ - -using Elastic.Clients.Elasticsearch.Fluent; -using Elastic.Clients.Elasticsearch.Serialization; -using Elastic.Transport; -using System; -using System.Collections.Generic; -using System.Linq.Expressions; -using System.Text.Json; -using System.Text.Json.Serialization; - -#nullable restore -namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class TokenFilter : Union -{ - public TokenFilter(string name) : base(name) - { - } - - public TokenFilter(Elastic.Clients.Elasticsearch.Analysis.TokenFilterDefinitions definition) : base(definition) - { - } -} \ No newline at end of file diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/TokenFilterDefinitions.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/TokenFilterDefinitions.g.cs deleted file mode 100644 index 35612d740c4..00000000000 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/TokenFilterDefinitions.g.cs +++ /dev/null @@ -1,494 +0,0 @@ -// Licensed to Elasticsearch B.V under one or more agreements. -// Elasticsearch B.V licenses this file to you under the Apache 2.0 License. -// See the LICENSE file in the project root for more information. -// -// ███╗ ██╗ ██████╗ ████████╗██╗ ██████╗███████╗ -// ████╗ ██║██╔═══██╗╚══██╔══╝██║██╔════╝██╔════╝ -// ██╔██╗ ██║██║ ██║ ██║ ██║██║ █████╗ -// ██║╚██╗██║██║ ██║ ██║ ██║██║ ██╔══╝ -// ██║ ╚████║╚██████╔╝ ██║ ██║╚██████╗███████╗ -// ╚═╝ ╚═══╝ ╚═════╝ ╚═╝ ╚═╝ ╚═════╝╚══════╝ -// ------------------------------------------------ -// -// This file is automatically generated. -// Please do not edit these files manually. -// -// ------------------------------------------------ - -using Elastic.Clients.Elasticsearch.Fluent; -using Elastic.Clients.Elasticsearch.Serialization; -using Elastic.Transport; -using System; -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Linq.Expressions; -using System.Text.Json; -using System.Text.Json.Serialization; - -#nullable restore -namespace Elastic.Clients.Elasticsearch.Analysis; -public partial class TokenFilterDefinitions : IsADictionary -{ - public TokenFilterDefinitions() - { - } - - public TokenFilterDefinitions(IDictionary container) : base(container) - { - } - - public void Add(string name, ITokenFilterDefinition tokenFilterDefinition) => BackingDictionary.Add(Sanitize(name), tokenFilterDefinition); - public bool TryGetTokenFilterDefinition(string name, [NotNullWhen(returnValue: true)] out ITokenFilterDefinition tokenFilterDefinition) => BackingDictionary.TryGetValue(Sanitize(name), out tokenFilterDefinition); - public bool TryGetTokenFilterDefinition(string name, [NotNullWhen(returnValue: true)] out T? tokenFilterDefinition) - where T : class, ITokenFilterDefinition - { - if (BackingDictionary.TryGetValue(Sanitize(name), out var matchedValue) && matchedValue is T finalValue) - { - tokenFilterDefinition = finalValue; - return true; - } - - tokenFilterDefinition = null; - return false; - } -} - -public sealed partial class TokenFilterDefinitionsDescriptor : IsADictionaryDescriptor -{ - public TokenFilterDefinitionsDescriptor() : base(new TokenFilterDefinitions()) - { - } - - public TokenFilterDefinitionsDescriptor(TokenFilterDefinitions tokenFilterDefinitions) : base(tokenFilterDefinitions ?? new TokenFilterDefinitions()) - { - } - - public TokenFilterDefinitionsDescriptor AsciiFoldingTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor AsciiFoldingTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor AsciiFoldingTokenFilter(string tokenFilterDefinitionName, AsciiFoldingTokenFilter asciiFoldingTokenFilter) => AssignVariant(tokenFilterDefinitionName, asciiFoldingTokenFilter); - public TokenFilterDefinitionsDescriptor CommonGramsTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor CommonGramsTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor CommonGramsTokenFilter(string tokenFilterDefinitionName, CommonGramsTokenFilter commonGramsTokenFilter) => AssignVariant(tokenFilterDefinitionName, commonGramsTokenFilter); - public TokenFilterDefinitionsDescriptor ConditionTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor ConditionTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor ConditionTokenFilter(string tokenFilterDefinitionName, ConditionTokenFilter conditionTokenFilter) => AssignVariant(tokenFilterDefinitionName, conditionTokenFilter); - public TokenFilterDefinitionsDescriptor DelimitedPayloadTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor DelimitedPayloadTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor DelimitedPayloadTokenFilter(string tokenFilterDefinitionName, DelimitedPayloadTokenFilter delimitedPayloadTokenFilter) => AssignVariant(tokenFilterDefinitionName, delimitedPayloadTokenFilter); - public TokenFilterDefinitionsDescriptor DictionaryDecompounderTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor DictionaryDecompounderTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor DictionaryDecompounderTokenFilter(string tokenFilterDefinitionName, DictionaryDecompounderTokenFilter dictionaryDecompounderTokenFilter) => AssignVariant(tokenFilterDefinitionName, dictionaryDecompounderTokenFilter); - public TokenFilterDefinitionsDescriptor EdgeNGramTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor EdgeNGramTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor EdgeNGramTokenFilter(string tokenFilterDefinitionName, EdgeNGramTokenFilter edgeNGramTokenFilter) => AssignVariant(tokenFilterDefinitionName, edgeNGramTokenFilter); - public TokenFilterDefinitionsDescriptor ElisionTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor ElisionTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor ElisionTokenFilter(string tokenFilterDefinitionName, ElisionTokenFilter elisionTokenFilter) => AssignVariant(tokenFilterDefinitionName, elisionTokenFilter); - public TokenFilterDefinitionsDescriptor FingerprintTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor FingerprintTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor FingerprintTokenFilter(string tokenFilterDefinitionName, FingerprintTokenFilter fingerprintTokenFilter) => AssignVariant(tokenFilterDefinitionName, fingerprintTokenFilter); - public TokenFilterDefinitionsDescriptor HunspellTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor HunspellTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor HunspellTokenFilter(string tokenFilterDefinitionName, HunspellTokenFilter hunspellTokenFilter) => AssignVariant(tokenFilterDefinitionName, hunspellTokenFilter); - public TokenFilterDefinitionsDescriptor HyphenationDecompounderTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor HyphenationDecompounderTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor HyphenationDecompounderTokenFilter(string tokenFilterDefinitionName, HyphenationDecompounderTokenFilter hyphenationDecompounderTokenFilter) => AssignVariant(tokenFilterDefinitionName, hyphenationDecompounderTokenFilter); - public TokenFilterDefinitionsDescriptor IcuCollationTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor IcuCollationTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor IcuCollationTokenFilter(string tokenFilterDefinitionName, IcuCollationTokenFilter icuCollationTokenFilter) => AssignVariant(tokenFilterDefinitionName, icuCollationTokenFilter); - public TokenFilterDefinitionsDescriptor IcuFoldingTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor IcuFoldingTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor IcuFoldingTokenFilter(string tokenFilterDefinitionName, IcuFoldingTokenFilter icuFoldingTokenFilter) => AssignVariant(tokenFilterDefinitionName, icuFoldingTokenFilter); - public TokenFilterDefinitionsDescriptor IcuNormalizationTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor IcuNormalizationTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor IcuNormalizationTokenFilter(string tokenFilterDefinitionName, IcuNormalizationTokenFilter icuNormalizationTokenFilter) => AssignVariant(tokenFilterDefinitionName, icuNormalizationTokenFilter); - public TokenFilterDefinitionsDescriptor IcuTokenizer(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor IcuTokenizer(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor IcuTokenizer(string tokenFilterDefinitionName, IcuTokenizer icuTokenizer) => AssignVariant(tokenFilterDefinitionName, icuTokenizer); - public TokenFilterDefinitionsDescriptor IcuTransformTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor IcuTransformTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor IcuTransformTokenFilter(string tokenFilterDefinitionName, IcuTransformTokenFilter icuTransformTokenFilter) => AssignVariant(tokenFilterDefinitionName, icuTransformTokenFilter); - public TokenFilterDefinitionsDescriptor KeepTypesTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor KeepTypesTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor KeepTypesTokenFilter(string tokenFilterDefinitionName, KeepTypesTokenFilter keepTypesTokenFilter) => AssignVariant(tokenFilterDefinitionName, keepTypesTokenFilter); - public TokenFilterDefinitionsDescriptor KeepWordsTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor KeepWordsTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor KeepWordsTokenFilter(string tokenFilterDefinitionName, KeepWordsTokenFilter keepWordsTokenFilter) => AssignVariant(tokenFilterDefinitionName, keepWordsTokenFilter); - public TokenFilterDefinitionsDescriptor KeywordMarkerTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor KeywordMarkerTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor KeywordMarkerTokenFilter(string tokenFilterDefinitionName, KeywordMarkerTokenFilter keywordMarkerTokenFilter) => AssignVariant(tokenFilterDefinitionName, keywordMarkerTokenFilter); - public TokenFilterDefinitionsDescriptor KStemTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor KStemTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor KStemTokenFilter(string tokenFilterDefinitionName, KStemTokenFilter kStemTokenFilter) => AssignVariant(tokenFilterDefinitionName, kStemTokenFilter); - public TokenFilterDefinitionsDescriptor KuromojiPartOfSpeechTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor KuromojiPartOfSpeechTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor KuromojiPartOfSpeechTokenFilter(string tokenFilterDefinitionName, KuromojiPartOfSpeechTokenFilter kuromojiPartOfSpeechTokenFilter) => AssignVariant(tokenFilterDefinitionName, kuromojiPartOfSpeechTokenFilter); - public TokenFilterDefinitionsDescriptor KuromojiReadingFormTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor KuromojiReadingFormTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor KuromojiReadingFormTokenFilter(string tokenFilterDefinitionName, KuromojiReadingFormTokenFilter kuromojiReadingFormTokenFilter) => AssignVariant(tokenFilterDefinitionName, kuromojiReadingFormTokenFilter); - public TokenFilterDefinitionsDescriptor KuromojiStemmerTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor KuromojiStemmerTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor KuromojiStemmerTokenFilter(string tokenFilterDefinitionName, KuromojiStemmerTokenFilter kuromojiStemmerTokenFilter) => AssignVariant(tokenFilterDefinitionName, kuromojiStemmerTokenFilter); - public TokenFilterDefinitionsDescriptor LengthTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor LengthTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor LengthTokenFilter(string tokenFilterDefinitionName, LengthTokenFilter lengthTokenFilter) => AssignVariant(tokenFilterDefinitionName, lengthTokenFilter); - public TokenFilterDefinitionsDescriptor LimitTokenCountTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor LimitTokenCountTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor LimitTokenCountTokenFilter(string tokenFilterDefinitionName, LimitTokenCountTokenFilter limitTokenCountTokenFilter) => AssignVariant(tokenFilterDefinitionName, limitTokenCountTokenFilter); - public TokenFilterDefinitionsDescriptor LowercaseTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor LowercaseTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor LowercaseTokenFilter(string tokenFilterDefinitionName, LowercaseTokenFilter lowercaseTokenFilter) => AssignVariant(tokenFilterDefinitionName, lowercaseTokenFilter); - public TokenFilterDefinitionsDescriptor MultiplexerTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor MultiplexerTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor MultiplexerTokenFilter(string tokenFilterDefinitionName, MultiplexerTokenFilter multiplexerTokenFilter) => AssignVariant(tokenFilterDefinitionName, multiplexerTokenFilter); - public TokenFilterDefinitionsDescriptor NGramTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor NGramTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor NGramTokenFilter(string tokenFilterDefinitionName, NGramTokenFilter nGramTokenFilter) => AssignVariant(tokenFilterDefinitionName, nGramTokenFilter); - public TokenFilterDefinitionsDescriptor NoriPartOfSpeechTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor NoriPartOfSpeechTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor NoriPartOfSpeechTokenFilter(string tokenFilterDefinitionName, NoriPartOfSpeechTokenFilter noriPartOfSpeechTokenFilter) => AssignVariant(tokenFilterDefinitionName, noriPartOfSpeechTokenFilter); - public TokenFilterDefinitionsDescriptor PatternCaptureTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor PatternCaptureTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor PatternCaptureTokenFilter(string tokenFilterDefinitionName, PatternCaptureTokenFilter patternCaptureTokenFilter) => AssignVariant(tokenFilterDefinitionName, patternCaptureTokenFilter); - public TokenFilterDefinitionsDescriptor PatternReplaceTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor PatternReplaceTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor PatternReplaceTokenFilter(string tokenFilterDefinitionName, PatternReplaceTokenFilter patternReplaceTokenFilter) => AssignVariant(tokenFilterDefinitionName, patternReplaceTokenFilter); - public TokenFilterDefinitionsDescriptor PhoneticTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor PhoneticTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor PhoneticTokenFilter(string tokenFilterDefinitionName, PhoneticTokenFilter phoneticTokenFilter) => AssignVariant(tokenFilterDefinitionName, phoneticTokenFilter); - public TokenFilterDefinitionsDescriptor PorterStemTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor PorterStemTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor PorterStemTokenFilter(string tokenFilterDefinitionName, PorterStemTokenFilter porterStemTokenFilter) => AssignVariant(tokenFilterDefinitionName, porterStemTokenFilter); - public TokenFilterDefinitionsDescriptor PredicateTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor PredicateTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor PredicateTokenFilter(string tokenFilterDefinitionName, PredicateTokenFilter predicateTokenFilter) => AssignVariant(tokenFilterDefinitionName, predicateTokenFilter); - public TokenFilterDefinitionsDescriptor RemoveDuplicatesTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor RemoveDuplicatesTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor RemoveDuplicatesTokenFilter(string tokenFilterDefinitionName, RemoveDuplicatesTokenFilter removeDuplicatesTokenFilter) => AssignVariant(tokenFilterDefinitionName, removeDuplicatesTokenFilter); - public TokenFilterDefinitionsDescriptor ReverseTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor ReverseTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor ReverseTokenFilter(string tokenFilterDefinitionName, ReverseTokenFilter reverseTokenFilter) => AssignVariant(tokenFilterDefinitionName, reverseTokenFilter); - public TokenFilterDefinitionsDescriptor ShingleTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor ShingleTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor ShingleTokenFilter(string tokenFilterDefinitionName, ShingleTokenFilter shingleTokenFilter) => AssignVariant(tokenFilterDefinitionName, shingleTokenFilter); - public TokenFilterDefinitionsDescriptor SnowballTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor SnowballTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor SnowballTokenFilter(string tokenFilterDefinitionName, SnowballTokenFilter snowballTokenFilter) => AssignVariant(tokenFilterDefinitionName, snowballTokenFilter); - public TokenFilterDefinitionsDescriptor StemmerOverrideTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor StemmerOverrideTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor StemmerOverrideTokenFilter(string tokenFilterDefinitionName, StemmerOverrideTokenFilter stemmerOverrideTokenFilter) => AssignVariant(tokenFilterDefinitionName, stemmerOverrideTokenFilter); - public TokenFilterDefinitionsDescriptor StemmerTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor StemmerTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor StemmerTokenFilter(string tokenFilterDefinitionName, StemmerTokenFilter stemmerTokenFilter) => AssignVariant(tokenFilterDefinitionName, stemmerTokenFilter); - public TokenFilterDefinitionsDescriptor StopTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor StopTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor StopTokenFilter(string tokenFilterDefinitionName, StopTokenFilter stopTokenFilter) => AssignVariant(tokenFilterDefinitionName, stopTokenFilter); - public TokenFilterDefinitionsDescriptor SynonymGraphTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor SynonymGraphTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor SynonymGraphTokenFilter(string tokenFilterDefinitionName, SynonymGraphTokenFilter synonymGraphTokenFilter) => AssignVariant(tokenFilterDefinitionName, synonymGraphTokenFilter); - public TokenFilterDefinitionsDescriptor SynonymTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor SynonymTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor SynonymTokenFilter(string tokenFilterDefinitionName, SynonymTokenFilter synonymTokenFilter) => AssignVariant(tokenFilterDefinitionName, synonymTokenFilter); - public TokenFilterDefinitionsDescriptor TrimTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor TrimTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor TrimTokenFilter(string tokenFilterDefinitionName, TrimTokenFilter trimTokenFilter) => AssignVariant(tokenFilterDefinitionName, trimTokenFilter); - public TokenFilterDefinitionsDescriptor TruncateTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor TruncateTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor TruncateTokenFilter(string tokenFilterDefinitionName, TruncateTokenFilter truncateTokenFilter) => AssignVariant(tokenFilterDefinitionName, truncateTokenFilter); - public TokenFilterDefinitionsDescriptor UniqueTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor UniqueTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor UniqueTokenFilter(string tokenFilterDefinitionName, UniqueTokenFilter uniqueTokenFilter) => AssignVariant(tokenFilterDefinitionName, uniqueTokenFilter); - public TokenFilterDefinitionsDescriptor UppercaseTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor UppercaseTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor UppercaseTokenFilter(string tokenFilterDefinitionName, UppercaseTokenFilter uppercaseTokenFilter) => AssignVariant(tokenFilterDefinitionName, uppercaseTokenFilter); - public TokenFilterDefinitionsDescriptor WordDelimiterGraphTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor WordDelimiterGraphTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor WordDelimiterGraphTokenFilter(string tokenFilterDefinitionName, WordDelimiterGraphTokenFilter wordDelimiterGraphTokenFilter) => AssignVariant(tokenFilterDefinitionName, wordDelimiterGraphTokenFilter); - public TokenFilterDefinitionsDescriptor WordDelimiterTokenFilter(string tokenFilterDefinitionName) => AssignVariant(tokenFilterDefinitionName, null); - public TokenFilterDefinitionsDescriptor WordDelimiterTokenFilter(string tokenFilterDefinitionName, Action configure) => AssignVariant(tokenFilterDefinitionName, configure); - public TokenFilterDefinitionsDescriptor WordDelimiterTokenFilter(string tokenFilterDefinitionName, WordDelimiterTokenFilter wordDelimiterTokenFilter) => AssignVariant(tokenFilterDefinitionName, wordDelimiterTokenFilter); -} - -internal sealed partial class TokenFilterDefinitionInterfaceConverter : JsonConverter -{ - public override ITokenFilterDefinition Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) - { - var copiedReader = reader; - string? type = null; - using var jsonDoc = JsonDocument.ParseValue(ref copiedReader); - if (jsonDoc is not null && jsonDoc.RootElement.TryGetProperty("type", out var readType) && readType.ValueKind == JsonValueKind.String) - { - type = readType.ToString(); - } - - switch (type) - { - case "dictionary_decompounder": - return JsonSerializer.Deserialize(ref reader, options); - case "phonetic": - return JsonSerializer.Deserialize(ref reader, options); - case "icu_transform": - return JsonSerializer.Deserialize(ref reader, options); - case "icu_normalizer": - return JsonSerializer.Deserialize(ref reader, options); - case "icu_folding": - return JsonSerializer.Deserialize(ref reader, options); - case "icu_collation": - return JsonSerializer.Deserialize(ref reader, options); - case "icu_tokenizer": - return JsonSerializer.Deserialize(ref reader, options); - case "kuromoji_part_of_speech": - return JsonSerializer.Deserialize(ref reader, options); - case "kuromoji_readingform": - return JsonSerializer.Deserialize(ref reader, options); - case "kuromoji_stemmer": - return JsonSerializer.Deserialize(ref reader, options); - case "word_delimiter": - return JsonSerializer.Deserialize(ref reader, options); - case "word_delimiter_graph": - return JsonSerializer.Deserialize(ref reader, options); - case "uppercase": - return JsonSerializer.Deserialize(ref reader, options); - case "unique": - return JsonSerializer.Deserialize(ref reader, options); - case "truncate": - return JsonSerializer.Deserialize(ref reader, options); - case "trim": - return JsonSerializer.Deserialize(ref reader, options); - case "synonym": - return JsonSerializer.Deserialize(ref reader, options); - case "synonym_graph": - return JsonSerializer.Deserialize(ref reader, options); - case "stop": - return JsonSerializer.Deserialize(ref reader, options); - case "stemmer": - return JsonSerializer.Deserialize(ref reader, options); - case "stemmer_override": - return JsonSerializer.Deserialize(ref reader, options); - case "snowball": - return JsonSerializer.Deserialize(ref reader, options); - case "shingle": - return JsonSerializer.Deserialize(ref reader, options); - case "reverse": - return JsonSerializer.Deserialize(ref reader, options); - case "remove_duplicates": - return JsonSerializer.Deserialize(ref reader, options); - case "predicate_token_filter": - return JsonSerializer.Deserialize(ref reader, options); - case "porter_stem": - return JsonSerializer.Deserialize(ref reader, options); - case "pattern_replace": - return JsonSerializer.Deserialize(ref reader, options); - case "pattern_capture": - return JsonSerializer.Deserialize(ref reader, options); - case "nori_part_of_speech": - return JsonSerializer.Deserialize(ref reader, options); - case "ngram": - return JsonSerializer.Deserialize(ref reader, options); - case "multiplexer": - return JsonSerializer.Deserialize(ref reader, options); - case "lowercase": - return JsonSerializer.Deserialize(ref reader, options); - case "limit": - return JsonSerializer.Deserialize(ref reader, options); - case "length": - return JsonSerializer.Deserialize(ref reader, options); - case "kstem": - return JsonSerializer.Deserialize(ref reader, options); - case "keyword_marker": - return JsonSerializer.Deserialize(ref reader, options); - case "keep": - return JsonSerializer.Deserialize(ref reader, options); - case "keep_types": - return JsonSerializer.Deserialize(ref reader, options); - case "hyphenation_decompounder": - return JsonSerializer.Deserialize(ref reader, options); - case "hunspell": - return JsonSerializer.Deserialize(ref reader, options); - case "fingerprint": - return JsonSerializer.Deserialize(ref reader, options); - case "elision": - return JsonSerializer.Deserialize(ref reader, options); - case "edge_ngram": - return JsonSerializer.Deserialize(ref reader, options); - case "delimited_payload": - return JsonSerializer.Deserialize(ref reader, options); - case "condition": - return JsonSerializer.Deserialize(ref reader, options); - case "common_grams": - return JsonSerializer.Deserialize(ref reader, options); - case "asciifolding": - return JsonSerializer.Deserialize(ref reader, options); - default: - ThrowHelper.ThrowUnknownTaggedUnionVariantJsonException(type, typeof(ITokenFilterDefinition)); - return null; - } - } - - public override void Write(Utf8JsonWriter writer, ITokenFilterDefinition value, JsonSerializerOptions options) - { - if (value is null) - { - writer.WriteNullValue(); - return; - } - - switch (value.Type) - { - case "dictionary_decompounder": - JsonSerializer.Serialize(writer, value, typeof(DictionaryDecompounderTokenFilter), options); - return; - case "phonetic": - JsonSerializer.Serialize(writer, value, typeof(PhoneticTokenFilter), options); - return; - case "icu_transform": - JsonSerializer.Serialize(writer, value, typeof(IcuTransformTokenFilter), options); - return; - case "icu_normalizer": - JsonSerializer.Serialize(writer, value, typeof(IcuNormalizationTokenFilter), options); - return; - case "icu_folding": - JsonSerializer.Serialize(writer, value, typeof(IcuFoldingTokenFilter), options); - return; - case "icu_collation": - JsonSerializer.Serialize(writer, value, typeof(IcuCollationTokenFilter), options); - return; - case "icu_tokenizer": - JsonSerializer.Serialize(writer, value, typeof(IcuTokenizer), options); - return; - case "kuromoji_part_of_speech": - JsonSerializer.Serialize(writer, value, typeof(KuromojiPartOfSpeechTokenFilter), options); - return; - case "kuromoji_readingform": - JsonSerializer.Serialize(writer, value, typeof(KuromojiReadingFormTokenFilter), options); - return; - case "kuromoji_stemmer": - JsonSerializer.Serialize(writer, value, typeof(KuromojiStemmerTokenFilter), options); - return; - case "word_delimiter": - JsonSerializer.Serialize(writer, value, typeof(WordDelimiterTokenFilter), options); - return; - case "word_delimiter_graph": - JsonSerializer.Serialize(writer, value, typeof(WordDelimiterGraphTokenFilter), options); - return; - case "uppercase": - JsonSerializer.Serialize(writer, value, typeof(UppercaseTokenFilter), options); - return; - case "unique": - JsonSerializer.Serialize(writer, value, typeof(UniqueTokenFilter), options); - return; - case "truncate": - JsonSerializer.Serialize(writer, value, typeof(TruncateTokenFilter), options); - return; - case "trim": - JsonSerializer.Serialize(writer, value, typeof(TrimTokenFilter), options); - return; - case "synonym": - JsonSerializer.Serialize(writer, value, typeof(SynonymTokenFilter), options); - return; - case "synonym_graph": - JsonSerializer.Serialize(writer, value, typeof(SynonymGraphTokenFilter), options); - return; - case "stop": - JsonSerializer.Serialize(writer, value, typeof(StopTokenFilter), options); - return; - case "stemmer": - JsonSerializer.Serialize(writer, value, typeof(StemmerTokenFilter), options); - return; - case "stemmer_override": - JsonSerializer.Serialize(writer, value, typeof(StemmerOverrideTokenFilter), options); - return; - case "snowball": - JsonSerializer.Serialize(writer, value, typeof(SnowballTokenFilter), options); - return; - case "shingle": - JsonSerializer.Serialize(writer, value, typeof(ShingleTokenFilter), options); - return; - case "reverse": - JsonSerializer.Serialize(writer, value, typeof(ReverseTokenFilter), options); - return; - case "remove_duplicates": - JsonSerializer.Serialize(writer, value, typeof(RemoveDuplicatesTokenFilter), options); - return; - case "predicate_token_filter": - JsonSerializer.Serialize(writer, value, typeof(PredicateTokenFilter), options); - return; - case "porter_stem": - JsonSerializer.Serialize(writer, value, typeof(PorterStemTokenFilter), options); - return; - case "pattern_replace": - JsonSerializer.Serialize(writer, value, typeof(PatternReplaceTokenFilter), options); - return; - case "pattern_capture": - JsonSerializer.Serialize(writer, value, typeof(PatternCaptureTokenFilter), options); - return; - case "nori_part_of_speech": - JsonSerializer.Serialize(writer, value, typeof(NoriPartOfSpeechTokenFilter), options); - return; - case "ngram": - JsonSerializer.Serialize(writer, value, typeof(NGramTokenFilter), options); - return; - case "multiplexer": - JsonSerializer.Serialize(writer, value, typeof(MultiplexerTokenFilter), options); - return; - case "lowercase": - JsonSerializer.Serialize(writer, value, typeof(LowercaseTokenFilter), options); - return; - case "limit": - JsonSerializer.Serialize(writer, value, typeof(LimitTokenCountTokenFilter), options); - return; - case "length": - JsonSerializer.Serialize(writer, value, typeof(LengthTokenFilter), options); - return; - case "kstem": - JsonSerializer.Serialize(writer, value, typeof(KStemTokenFilter), options); - return; - case "keyword_marker": - JsonSerializer.Serialize(writer, value, typeof(KeywordMarkerTokenFilter), options); - return; - case "keep": - JsonSerializer.Serialize(writer, value, typeof(KeepWordsTokenFilter), options); - return; - case "keep_types": - JsonSerializer.Serialize(writer, value, typeof(KeepTypesTokenFilter), options); - return; - case "hyphenation_decompounder": - JsonSerializer.Serialize(writer, value, typeof(HyphenationDecompounderTokenFilter), options); - return; - case "hunspell": - JsonSerializer.Serialize(writer, value, typeof(HunspellTokenFilter), options); - return; - case "fingerprint": - JsonSerializer.Serialize(writer, value, typeof(FingerprintTokenFilter), options); - return; - case "elision": - JsonSerializer.Serialize(writer, value, typeof(ElisionTokenFilter), options); - return; - case "edge_ngram": - JsonSerializer.Serialize(writer, value, typeof(EdgeNGramTokenFilter), options); - return; - case "delimited_payload": - JsonSerializer.Serialize(writer, value, typeof(DelimitedPayloadTokenFilter), options); - return; - case "condition": - JsonSerializer.Serialize(writer, value, typeof(ConditionTokenFilter), options); - return; - case "common_grams": - JsonSerializer.Serialize(writer, value, typeof(CommonGramsTokenFilter), options); - return; - case "asciifolding": - JsonSerializer.Serialize(writer, value, typeof(AsciiFoldingTokenFilter), options); - return; - default: - var type = value.GetType(); - JsonSerializer.Serialize(writer, value, type, options); - return; - } - } -} - -[JsonConverter(typeof(TokenFilterDefinitionInterfaceConverter))] -public partial interface ITokenFilterDefinition -{ - public string Type { get; } -} \ No newline at end of file diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/TokenFilters.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/TokenFilters.g.cs new file mode 100644 index 00000000000..fc0298cb22f --- /dev/null +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/TokenFilters.g.cs @@ -0,0 +1,494 @@ +// Licensed to Elasticsearch B.V under one or more agreements. +// Elasticsearch B.V licenses this file to you under the Apache 2.0 License. +// See the LICENSE file in the project root for more information. +// +// ███╗ ██╗ ██████╗ ████████╗██╗ ██████╗███████╗ +// ████╗ ██║██╔═══██╗╚══██╔══╝██║██╔════╝██╔════╝ +// ██╔██╗ ██║██║ ██║ ██║ ██║██║ █████╗ +// ██║╚██╗██║██║ ██║ ██║ ██║██║ ██╔══╝ +// ██║ ╚████║╚██████╔╝ ██║ ██║╚██████╗███████╗ +// ╚═╝ ╚═══╝ ╚═════╝ ╚═╝ ╚═╝ ╚═════╝╚══════╝ +// ------------------------------------------------ +// +// This file is automatically generated. +// Please do not edit these files manually. +// +// ------------------------------------------------ + +using Elastic.Clients.Elasticsearch.Fluent; +using Elastic.Clients.Elasticsearch.Serialization; +using Elastic.Transport; +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Linq.Expressions; +using System.Text.Json; +using System.Text.Json.Serialization; + +#nullable restore +namespace Elastic.Clients.Elasticsearch.Analysis; +public partial class TokenFilters : IsADictionary +{ + public TokenFilters() + { + } + + public TokenFilters(IDictionary container) : base(container) + { + } + + public void Add(string name, ITokenFilter tokenFilter) => BackingDictionary.Add(Sanitize(name), tokenFilter); + public bool TryGetTokenFilter(string name, [NotNullWhen(returnValue: true)] out ITokenFilter tokenFilter) => BackingDictionary.TryGetValue(Sanitize(name), out tokenFilter); + public bool TryGetTokenFilter(string name, [NotNullWhen(returnValue: true)] out T? tokenFilter) + where T : class, ITokenFilter + { + if (BackingDictionary.TryGetValue(Sanitize(name), out var matchedValue) && matchedValue is T finalValue) + { + tokenFilter = finalValue; + return true; + } + + tokenFilter = null; + return false; + } +} + +public sealed partial class TokenFiltersDescriptor : IsADictionaryDescriptor +{ + public TokenFiltersDescriptor() : base(new TokenFilters()) + { + } + + public TokenFiltersDescriptor(TokenFilters tokenFilters) : base(tokenFilters ?? new TokenFilters()) + { + } + + public TokenFiltersDescriptor AsciiFolding(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor AsciiFolding(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor AsciiFolding(string tokenFilterName, AsciiFoldingTokenFilter asciiFoldingTokenFilter) => AssignVariant(tokenFilterName, asciiFoldingTokenFilter); + public TokenFiltersDescriptor CommonGrams(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor CommonGrams(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor CommonGrams(string tokenFilterName, CommonGramsTokenFilter commonGramsTokenFilter) => AssignVariant(tokenFilterName, commonGramsTokenFilter); + public TokenFiltersDescriptor Condition(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor Condition(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor Condition(string tokenFilterName, ConditionTokenFilter conditionTokenFilter) => AssignVariant(tokenFilterName, conditionTokenFilter); + public TokenFiltersDescriptor DelimitedPayload(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor DelimitedPayload(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor DelimitedPayload(string tokenFilterName, DelimitedPayloadTokenFilter delimitedPayloadTokenFilter) => AssignVariant(tokenFilterName, delimitedPayloadTokenFilter); + public TokenFiltersDescriptor DictionaryDecompounder(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor DictionaryDecompounder(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor DictionaryDecompounder(string tokenFilterName, DictionaryDecompounderTokenFilter dictionaryDecompounderTokenFilter) => AssignVariant(tokenFilterName, dictionaryDecompounderTokenFilter); + public TokenFiltersDescriptor EdgeNGram(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor EdgeNGram(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor EdgeNGram(string tokenFilterName, EdgeNGramTokenFilter edgeNGramTokenFilter) => AssignVariant(tokenFilterName, edgeNGramTokenFilter); + public TokenFiltersDescriptor Elision(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor Elision(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor Elision(string tokenFilterName, ElisionTokenFilter elisionTokenFilter) => AssignVariant(tokenFilterName, elisionTokenFilter); + public TokenFiltersDescriptor Fingerprint(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor Fingerprint(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor Fingerprint(string tokenFilterName, FingerprintTokenFilter fingerprintTokenFilter) => AssignVariant(tokenFilterName, fingerprintTokenFilter); + public TokenFiltersDescriptor Hunspell(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor Hunspell(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor Hunspell(string tokenFilterName, HunspellTokenFilter hunspellTokenFilter) => AssignVariant(tokenFilterName, hunspellTokenFilter); + public TokenFiltersDescriptor HyphenationDecompounder(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor HyphenationDecompounder(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor HyphenationDecompounder(string tokenFilterName, HyphenationDecompounderTokenFilter hyphenationDecompounderTokenFilter) => AssignVariant(tokenFilterName, hyphenationDecompounderTokenFilter); + public TokenFiltersDescriptor IcuCollation(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor IcuCollation(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor IcuCollation(string tokenFilterName, IcuCollationTokenFilter icuCollationTokenFilter) => AssignVariant(tokenFilterName, icuCollationTokenFilter); + public TokenFiltersDescriptor IcuFolding(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor IcuFolding(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor IcuFolding(string tokenFilterName, IcuFoldingTokenFilter icuFoldingTokenFilter) => AssignVariant(tokenFilterName, icuFoldingTokenFilter); + public TokenFiltersDescriptor IcuNormalization(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor IcuNormalization(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor IcuNormalization(string tokenFilterName, IcuNormalizationTokenFilter icuNormalizationTokenFilter) => AssignVariant(tokenFilterName, icuNormalizationTokenFilter); + public TokenFiltersDescriptor IcuTokenizer(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor IcuTokenizer(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor IcuTokenizer(string tokenFilterName, IcuTokenizer icuTokenizer) => AssignVariant(tokenFilterName, icuTokenizer); + public TokenFiltersDescriptor IcuTransform(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor IcuTransform(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor IcuTransform(string tokenFilterName, IcuTransformTokenFilter icuTransformTokenFilter) => AssignVariant(tokenFilterName, icuTransformTokenFilter); + public TokenFiltersDescriptor KeepTypes(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor KeepTypes(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor KeepTypes(string tokenFilterName, KeepTypesTokenFilter keepTypesTokenFilter) => AssignVariant(tokenFilterName, keepTypesTokenFilter); + public TokenFiltersDescriptor KeepWords(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor KeepWords(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor KeepWords(string tokenFilterName, KeepWordsTokenFilter keepWordsTokenFilter) => AssignVariant(tokenFilterName, keepWordsTokenFilter); + public TokenFiltersDescriptor KeywordMarker(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor KeywordMarker(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor KeywordMarker(string tokenFilterName, KeywordMarkerTokenFilter keywordMarkerTokenFilter) => AssignVariant(tokenFilterName, keywordMarkerTokenFilter); + public TokenFiltersDescriptor KStem(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor KStem(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor KStem(string tokenFilterName, KStemTokenFilter kStemTokenFilter) => AssignVariant(tokenFilterName, kStemTokenFilter); + public TokenFiltersDescriptor KuromojiPartOfSpeech(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor KuromojiPartOfSpeech(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor KuromojiPartOfSpeech(string tokenFilterName, KuromojiPartOfSpeechTokenFilter kuromojiPartOfSpeechTokenFilter) => AssignVariant(tokenFilterName, kuromojiPartOfSpeechTokenFilter); + public TokenFiltersDescriptor KuromojiReadingForm(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor KuromojiReadingForm(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor KuromojiReadingForm(string tokenFilterName, KuromojiReadingFormTokenFilter kuromojiReadingFormTokenFilter) => AssignVariant(tokenFilterName, kuromojiReadingFormTokenFilter); + public TokenFiltersDescriptor KuromojiStemmer(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor KuromojiStemmer(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor KuromojiStemmer(string tokenFilterName, KuromojiStemmerTokenFilter kuromojiStemmerTokenFilter) => AssignVariant(tokenFilterName, kuromojiStemmerTokenFilter); + public TokenFiltersDescriptor Length(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor Length(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor Length(string tokenFilterName, LengthTokenFilter lengthTokenFilter) => AssignVariant(tokenFilterName, lengthTokenFilter); + public TokenFiltersDescriptor LimitTokenCount(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor LimitTokenCount(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor LimitTokenCount(string tokenFilterName, LimitTokenCountTokenFilter limitTokenCountTokenFilter) => AssignVariant(tokenFilterName, limitTokenCountTokenFilter); + public TokenFiltersDescriptor Lowercase(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor Lowercase(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor Lowercase(string tokenFilterName, LowercaseTokenFilter lowercaseTokenFilter) => AssignVariant(tokenFilterName, lowercaseTokenFilter); + public TokenFiltersDescriptor Multiplexer(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor Multiplexer(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor Multiplexer(string tokenFilterName, MultiplexerTokenFilter multiplexerTokenFilter) => AssignVariant(tokenFilterName, multiplexerTokenFilter); + public TokenFiltersDescriptor NGram(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor NGram(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor NGram(string tokenFilterName, NGramTokenFilter nGramTokenFilter) => AssignVariant(tokenFilterName, nGramTokenFilter); + public TokenFiltersDescriptor NoriPartOfSpeech(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor NoriPartOfSpeech(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor NoriPartOfSpeech(string tokenFilterName, NoriPartOfSpeechTokenFilter noriPartOfSpeechTokenFilter) => AssignVariant(tokenFilterName, noriPartOfSpeechTokenFilter); + public TokenFiltersDescriptor PatternCapture(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor PatternCapture(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor PatternCapture(string tokenFilterName, PatternCaptureTokenFilter patternCaptureTokenFilter) => AssignVariant(tokenFilterName, patternCaptureTokenFilter); + public TokenFiltersDescriptor PatternReplace(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor PatternReplace(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor PatternReplace(string tokenFilterName, PatternReplaceTokenFilter patternReplaceTokenFilter) => AssignVariant(tokenFilterName, patternReplaceTokenFilter); + public TokenFiltersDescriptor Phonetic(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor Phonetic(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor Phonetic(string tokenFilterName, PhoneticTokenFilter phoneticTokenFilter) => AssignVariant(tokenFilterName, phoneticTokenFilter); + public TokenFiltersDescriptor PorterStem(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor PorterStem(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor PorterStem(string tokenFilterName, PorterStemTokenFilter porterStemTokenFilter) => AssignVariant(tokenFilterName, porterStemTokenFilter); + public TokenFiltersDescriptor Predicate(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor Predicate(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor Predicate(string tokenFilterName, PredicateTokenFilter predicateTokenFilter) => AssignVariant(tokenFilterName, predicateTokenFilter); + public TokenFiltersDescriptor RemoveDuplicates(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor RemoveDuplicates(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor RemoveDuplicates(string tokenFilterName, RemoveDuplicatesTokenFilter removeDuplicatesTokenFilter) => AssignVariant(tokenFilterName, removeDuplicatesTokenFilter); + public TokenFiltersDescriptor Reverse(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor Reverse(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor Reverse(string tokenFilterName, ReverseTokenFilter reverseTokenFilter) => AssignVariant(tokenFilterName, reverseTokenFilter); + public TokenFiltersDescriptor Shingle(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor Shingle(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor Shingle(string tokenFilterName, ShingleTokenFilter shingleTokenFilter) => AssignVariant(tokenFilterName, shingleTokenFilter); + public TokenFiltersDescriptor Snowball(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor Snowball(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor Snowball(string tokenFilterName, SnowballTokenFilter snowballTokenFilter) => AssignVariant(tokenFilterName, snowballTokenFilter); + public TokenFiltersDescriptor StemmerOverride(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor StemmerOverride(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor StemmerOverride(string tokenFilterName, StemmerOverrideTokenFilter stemmerOverrideTokenFilter) => AssignVariant(tokenFilterName, stemmerOverrideTokenFilter); + public TokenFiltersDescriptor Stemmer(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor Stemmer(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor Stemmer(string tokenFilterName, StemmerTokenFilter stemmerTokenFilter) => AssignVariant(tokenFilterName, stemmerTokenFilter); + public TokenFiltersDescriptor Stop(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor Stop(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor Stop(string tokenFilterName, StopTokenFilter stopTokenFilter) => AssignVariant(tokenFilterName, stopTokenFilter); + public TokenFiltersDescriptor SynonymGraph(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor SynonymGraph(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor SynonymGraph(string tokenFilterName, SynonymGraphTokenFilter synonymGraphTokenFilter) => AssignVariant(tokenFilterName, synonymGraphTokenFilter); + public TokenFiltersDescriptor Synonym(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor Synonym(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor Synonym(string tokenFilterName, SynonymTokenFilter synonymTokenFilter) => AssignVariant(tokenFilterName, synonymTokenFilter); + public TokenFiltersDescriptor Trim(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor Trim(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor Trim(string tokenFilterName, TrimTokenFilter trimTokenFilter) => AssignVariant(tokenFilterName, trimTokenFilter); + public TokenFiltersDescriptor Truncate(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor Truncate(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor Truncate(string tokenFilterName, TruncateTokenFilter truncateTokenFilter) => AssignVariant(tokenFilterName, truncateTokenFilter); + public TokenFiltersDescriptor Unique(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor Unique(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor Unique(string tokenFilterName, UniqueTokenFilter uniqueTokenFilter) => AssignVariant(tokenFilterName, uniqueTokenFilter); + public TokenFiltersDescriptor Uppercase(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor Uppercase(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor Uppercase(string tokenFilterName, UppercaseTokenFilter uppercaseTokenFilter) => AssignVariant(tokenFilterName, uppercaseTokenFilter); + public TokenFiltersDescriptor WordDelimiterGraph(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor WordDelimiterGraph(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor WordDelimiterGraph(string tokenFilterName, WordDelimiterGraphTokenFilter wordDelimiterGraphTokenFilter) => AssignVariant(tokenFilterName, wordDelimiterGraphTokenFilter); + public TokenFiltersDescriptor WordDelimiter(string tokenFilterName) => AssignVariant(tokenFilterName, null); + public TokenFiltersDescriptor WordDelimiter(string tokenFilterName, Action configure) => AssignVariant(tokenFilterName, configure); + public TokenFiltersDescriptor WordDelimiter(string tokenFilterName, WordDelimiterTokenFilter wordDelimiterTokenFilter) => AssignVariant(tokenFilterName, wordDelimiterTokenFilter); +} + +internal sealed partial class TokenFilterInterfaceConverter : JsonConverter +{ + public override ITokenFilter Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + { + var copiedReader = reader; + string? type = null; + using var jsonDoc = JsonDocument.ParseValue(ref copiedReader); + if (jsonDoc is not null && jsonDoc.RootElement.TryGetProperty("type", out var readType) && readType.ValueKind == JsonValueKind.String) + { + type = readType.ToString(); + } + + switch (type) + { + case "dictionary_decompounder": + return JsonSerializer.Deserialize(ref reader, options); + case "phonetic": + return JsonSerializer.Deserialize(ref reader, options); + case "icu_transform": + return JsonSerializer.Deserialize(ref reader, options); + case "icu_normalizer": + return JsonSerializer.Deserialize(ref reader, options); + case "icu_folding": + return JsonSerializer.Deserialize(ref reader, options); + case "icu_collation": + return JsonSerializer.Deserialize(ref reader, options); + case "icu_tokenizer": + return JsonSerializer.Deserialize(ref reader, options); + case "kuromoji_part_of_speech": + return JsonSerializer.Deserialize(ref reader, options); + case "kuromoji_readingform": + return JsonSerializer.Deserialize(ref reader, options); + case "kuromoji_stemmer": + return JsonSerializer.Deserialize(ref reader, options); + case "word_delimiter": + return JsonSerializer.Deserialize(ref reader, options); + case "word_delimiter_graph": + return JsonSerializer.Deserialize(ref reader, options); + case "uppercase": + return JsonSerializer.Deserialize(ref reader, options); + case "unique": + return JsonSerializer.Deserialize(ref reader, options); + case "truncate": + return JsonSerializer.Deserialize(ref reader, options); + case "trim": + return JsonSerializer.Deserialize(ref reader, options); + case "synonym": + return JsonSerializer.Deserialize(ref reader, options); + case "synonym_graph": + return JsonSerializer.Deserialize(ref reader, options); + case "stop": + return JsonSerializer.Deserialize(ref reader, options); + case "stemmer": + return JsonSerializer.Deserialize(ref reader, options); + case "stemmer_override": + return JsonSerializer.Deserialize(ref reader, options); + case "snowball": + return JsonSerializer.Deserialize(ref reader, options); + case "shingle": + return JsonSerializer.Deserialize(ref reader, options); + case "reverse": + return JsonSerializer.Deserialize(ref reader, options); + case "remove_duplicates": + return JsonSerializer.Deserialize(ref reader, options); + case "predicate_token_filter": + return JsonSerializer.Deserialize(ref reader, options); + case "porter_stem": + return JsonSerializer.Deserialize(ref reader, options); + case "pattern_replace": + return JsonSerializer.Deserialize(ref reader, options); + case "pattern_capture": + return JsonSerializer.Deserialize(ref reader, options); + case "nori_part_of_speech": + return JsonSerializer.Deserialize(ref reader, options); + case "ngram": + return JsonSerializer.Deserialize(ref reader, options); + case "multiplexer": + return JsonSerializer.Deserialize(ref reader, options); + case "lowercase": + return JsonSerializer.Deserialize(ref reader, options); + case "limit": + return JsonSerializer.Deserialize(ref reader, options); + case "length": + return JsonSerializer.Deserialize(ref reader, options); + case "kstem": + return JsonSerializer.Deserialize(ref reader, options); + case "keyword_marker": + return JsonSerializer.Deserialize(ref reader, options); + case "keep": + return JsonSerializer.Deserialize(ref reader, options); + case "keep_types": + return JsonSerializer.Deserialize(ref reader, options); + case "hyphenation_decompounder": + return JsonSerializer.Deserialize(ref reader, options); + case "hunspell": + return JsonSerializer.Deserialize(ref reader, options); + case "fingerprint": + return JsonSerializer.Deserialize(ref reader, options); + case "elision": + return JsonSerializer.Deserialize(ref reader, options); + case "edge_ngram": + return JsonSerializer.Deserialize(ref reader, options); + case "delimited_payload": + return JsonSerializer.Deserialize(ref reader, options); + case "condition": + return JsonSerializer.Deserialize(ref reader, options); + case "common_grams": + return JsonSerializer.Deserialize(ref reader, options); + case "asciifolding": + return JsonSerializer.Deserialize(ref reader, options); + default: + ThrowHelper.ThrowUnknownTaggedUnionVariantJsonException(type, typeof(ITokenFilter)); + return null; + } + } + + public override void Write(Utf8JsonWriter writer, ITokenFilter value, JsonSerializerOptions options) + { + if (value is null) + { + writer.WriteNullValue(); + return; + } + + switch (value.Type) + { + case "dictionary_decompounder": + JsonSerializer.Serialize(writer, value, typeof(DictionaryDecompounderTokenFilter), options); + return; + case "phonetic": + JsonSerializer.Serialize(writer, value, typeof(PhoneticTokenFilter), options); + return; + case "icu_transform": + JsonSerializer.Serialize(writer, value, typeof(IcuTransformTokenFilter), options); + return; + case "icu_normalizer": + JsonSerializer.Serialize(writer, value, typeof(IcuNormalizationTokenFilter), options); + return; + case "icu_folding": + JsonSerializer.Serialize(writer, value, typeof(IcuFoldingTokenFilter), options); + return; + case "icu_collation": + JsonSerializer.Serialize(writer, value, typeof(IcuCollationTokenFilter), options); + return; + case "icu_tokenizer": + JsonSerializer.Serialize(writer, value, typeof(IcuTokenizer), options); + return; + case "kuromoji_part_of_speech": + JsonSerializer.Serialize(writer, value, typeof(KuromojiPartOfSpeechTokenFilter), options); + return; + case "kuromoji_readingform": + JsonSerializer.Serialize(writer, value, typeof(KuromojiReadingFormTokenFilter), options); + return; + case "kuromoji_stemmer": + JsonSerializer.Serialize(writer, value, typeof(KuromojiStemmerTokenFilter), options); + return; + case "word_delimiter": + JsonSerializer.Serialize(writer, value, typeof(WordDelimiterTokenFilter), options); + return; + case "word_delimiter_graph": + JsonSerializer.Serialize(writer, value, typeof(WordDelimiterGraphTokenFilter), options); + return; + case "uppercase": + JsonSerializer.Serialize(writer, value, typeof(UppercaseTokenFilter), options); + return; + case "unique": + JsonSerializer.Serialize(writer, value, typeof(UniqueTokenFilter), options); + return; + case "truncate": + JsonSerializer.Serialize(writer, value, typeof(TruncateTokenFilter), options); + return; + case "trim": + JsonSerializer.Serialize(writer, value, typeof(TrimTokenFilter), options); + return; + case "synonym": + JsonSerializer.Serialize(writer, value, typeof(SynonymTokenFilter), options); + return; + case "synonym_graph": + JsonSerializer.Serialize(writer, value, typeof(SynonymGraphTokenFilter), options); + return; + case "stop": + JsonSerializer.Serialize(writer, value, typeof(StopTokenFilter), options); + return; + case "stemmer": + JsonSerializer.Serialize(writer, value, typeof(StemmerTokenFilter), options); + return; + case "stemmer_override": + JsonSerializer.Serialize(writer, value, typeof(StemmerOverrideTokenFilter), options); + return; + case "snowball": + JsonSerializer.Serialize(writer, value, typeof(SnowballTokenFilter), options); + return; + case "shingle": + JsonSerializer.Serialize(writer, value, typeof(ShingleTokenFilter), options); + return; + case "reverse": + JsonSerializer.Serialize(writer, value, typeof(ReverseTokenFilter), options); + return; + case "remove_duplicates": + JsonSerializer.Serialize(writer, value, typeof(RemoveDuplicatesTokenFilter), options); + return; + case "predicate_token_filter": + JsonSerializer.Serialize(writer, value, typeof(PredicateTokenFilter), options); + return; + case "porter_stem": + JsonSerializer.Serialize(writer, value, typeof(PorterStemTokenFilter), options); + return; + case "pattern_replace": + JsonSerializer.Serialize(writer, value, typeof(PatternReplaceTokenFilter), options); + return; + case "pattern_capture": + JsonSerializer.Serialize(writer, value, typeof(PatternCaptureTokenFilter), options); + return; + case "nori_part_of_speech": + JsonSerializer.Serialize(writer, value, typeof(NoriPartOfSpeechTokenFilter), options); + return; + case "ngram": + JsonSerializer.Serialize(writer, value, typeof(NGramTokenFilter), options); + return; + case "multiplexer": + JsonSerializer.Serialize(writer, value, typeof(MultiplexerTokenFilter), options); + return; + case "lowercase": + JsonSerializer.Serialize(writer, value, typeof(LowercaseTokenFilter), options); + return; + case "limit": + JsonSerializer.Serialize(writer, value, typeof(LimitTokenCountTokenFilter), options); + return; + case "length": + JsonSerializer.Serialize(writer, value, typeof(LengthTokenFilter), options); + return; + case "kstem": + JsonSerializer.Serialize(writer, value, typeof(KStemTokenFilter), options); + return; + case "keyword_marker": + JsonSerializer.Serialize(writer, value, typeof(KeywordMarkerTokenFilter), options); + return; + case "keep": + JsonSerializer.Serialize(writer, value, typeof(KeepWordsTokenFilter), options); + return; + case "keep_types": + JsonSerializer.Serialize(writer, value, typeof(KeepTypesTokenFilter), options); + return; + case "hyphenation_decompounder": + JsonSerializer.Serialize(writer, value, typeof(HyphenationDecompounderTokenFilter), options); + return; + case "hunspell": + JsonSerializer.Serialize(writer, value, typeof(HunspellTokenFilter), options); + return; + case "fingerprint": + JsonSerializer.Serialize(writer, value, typeof(FingerprintTokenFilter), options); + return; + case "elision": + JsonSerializer.Serialize(writer, value, typeof(ElisionTokenFilter), options); + return; + case "edge_ngram": + JsonSerializer.Serialize(writer, value, typeof(EdgeNGramTokenFilter), options); + return; + case "delimited_payload": + JsonSerializer.Serialize(writer, value, typeof(DelimitedPayloadTokenFilter), options); + return; + case "condition": + JsonSerializer.Serialize(writer, value, typeof(ConditionTokenFilter), options); + return; + case "common_grams": + JsonSerializer.Serialize(writer, value, typeof(CommonGramsTokenFilter), options); + return; + case "asciifolding": + JsonSerializer.Serialize(writer, value, typeof(AsciiFoldingTokenFilter), options); + return; + default: + var type = value.GetType(); + JsonSerializer.Serialize(writer, value, type, options); + return; + } + } +} + +[JsonConverter(typeof(TokenFilterInterfaceConverter))] +public partial interface ITokenFilter +{ + public string Type { get; } +} \ No newline at end of file diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/Tokenizer.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/Tokenizer.g.cs deleted file mode 100644 index d6d6a2f18bb..00000000000 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/Tokenizer.g.cs +++ /dev/null @@ -1,38 +0,0 @@ -// Licensed to Elasticsearch B.V under one or more agreements. -// Elasticsearch B.V licenses this file to you under the Apache 2.0 License. -// See the LICENSE file in the project root for more information. -// -// ███╗ ██╗ ██████╗ ████████╗██╗ ██████╗███████╗ -// ████╗ ██║██╔═══██╗╚══██╔══╝██║██╔════╝██╔════╝ -// ██╔██╗ ██║██║ ██║ ██║ ██║██║ █████╗ -// ██║╚██╗██║██║ ██║ ██║ ██║██║ ██╔══╝ -// ██║ ╚████║╚██████╔╝ ██║ ██║╚██████╗███████╗ -// ╚═╝ ╚═══╝ ╚═════╝ ╚═╝ ╚═╝ ╚═════╝╚══════╝ -// ------------------------------------------------ -// -// This file is automatically generated. -// Please do not edit these files manually. -// -// ------------------------------------------------ - -using Elastic.Clients.Elasticsearch.Fluent; -using Elastic.Clients.Elasticsearch.Serialization; -using Elastic.Transport; -using System; -using System.Collections.Generic; -using System.Linq.Expressions; -using System.Text.Json; -using System.Text.Json.Serialization; - -#nullable restore -namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class Tokenizer : Union -{ - public Tokenizer(string name) : base(name) - { - } - - public Tokenizer(Elastic.Clients.Elasticsearch.Analysis.TokenizerDefinitions definition) : base(definition) - { - } -} \ No newline at end of file diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/TokenizerDefinitions.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/TokenizerDefinitions.g.cs deleted file mode 100644 index e43c980054b..00000000000 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/TokenizerDefinitions.g.cs +++ /dev/null @@ -1,222 +0,0 @@ -// Licensed to Elasticsearch B.V under one or more agreements. -// Elasticsearch B.V licenses this file to you under the Apache 2.0 License. -// See the LICENSE file in the project root for more information. -// -// ███╗ ██╗ ██████╗ ████████╗██╗ ██████╗███████╗ -// ████╗ ██║██╔═══██╗╚══██╔══╝██║██╔════╝██╔════╝ -// ██╔██╗ ██║██║ ██║ ██║ ██║██║ █████╗ -// ██║╚██╗██║██║ ██║ ██║ ██║██║ ██╔══╝ -// ██║ ╚████║╚██████╔╝ ██║ ██║╚██████╗███████╗ -// ╚═╝ ╚═══╝ ╚═════╝ ╚═╝ ╚═╝ ╚═════╝╚══════╝ -// ------------------------------------------------ -// -// This file is automatically generated. -// Please do not edit these files manually. -// -// ------------------------------------------------ - -using Elastic.Clients.Elasticsearch.Fluent; -using Elastic.Clients.Elasticsearch.Serialization; -using Elastic.Transport; -using System; -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Linq.Expressions; -using System.Text.Json; -using System.Text.Json.Serialization; - -#nullable restore -namespace Elastic.Clients.Elasticsearch.Analysis; -public partial class TokenizerDefinitions : IsADictionary -{ - public TokenizerDefinitions() - { - } - - public TokenizerDefinitions(IDictionary container) : base(container) - { - } - - public void Add(string name, ITokenizerDefinition tokenizerDefinition) => BackingDictionary.Add(Sanitize(name), tokenizerDefinition); - public bool TryGetTokenizerDefinition(string name, [NotNullWhen(returnValue: true)] out ITokenizerDefinition tokenizerDefinition) => BackingDictionary.TryGetValue(Sanitize(name), out tokenizerDefinition); - public bool TryGetTokenizerDefinition(string name, [NotNullWhen(returnValue: true)] out T? tokenizerDefinition) - where T : class, ITokenizerDefinition - { - if (BackingDictionary.TryGetValue(Sanitize(name), out var matchedValue) && matchedValue is T finalValue) - { - tokenizerDefinition = finalValue; - return true; - } - - tokenizerDefinition = null; - return false; - } -} - -public sealed partial class TokenizerDefinitionsDescriptor : IsADictionaryDescriptor -{ - public TokenizerDefinitionsDescriptor() : base(new TokenizerDefinitions()) - { - } - - public TokenizerDefinitionsDescriptor(TokenizerDefinitions tokenizerDefinitions) : base(tokenizerDefinitions ?? new TokenizerDefinitions()) - { - } - - public TokenizerDefinitionsDescriptor CharGroupTokenizer(string tokenizerDefinitionName) => AssignVariant(tokenizerDefinitionName, null); - public TokenizerDefinitionsDescriptor CharGroupTokenizer(string tokenizerDefinitionName, Action configure) => AssignVariant(tokenizerDefinitionName, configure); - public TokenizerDefinitionsDescriptor CharGroupTokenizer(string tokenizerDefinitionName, CharGroupTokenizer charGroupTokenizer) => AssignVariant(tokenizerDefinitionName, charGroupTokenizer); - public TokenizerDefinitionsDescriptor EdgeNGramTokenizer(string tokenizerDefinitionName) => AssignVariant(tokenizerDefinitionName, null); - public TokenizerDefinitionsDescriptor EdgeNGramTokenizer(string tokenizerDefinitionName, Action configure) => AssignVariant(tokenizerDefinitionName, configure); - public TokenizerDefinitionsDescriptor EdgeNGramTokenizer(string tokenizerDefinitionName, EdgeNGramTokenizer edgeNGramTokenizer) => AssignVariant(tokenizerDefinitionName, edgeNGramTokenizer); - public TokenizerDefinitionsDescriptor IcuTokenizer(string tokenizerDefinitionName) => AssignVariant(tokenizerDefinitionName, null); - public TokenizerDefinitionsDescriptor IcuTokenizer(string tokenizerDefinitionName, Action configure) => AssignVariant(tokenizerDefinitionName, configure); - public TokenizerDefinitionsDescriptor IcuTokenizer(string tokenizerDefinitionName, IcuTokenizer icuTokenizer) => AssignVariant(tokenizerDefinitionName, icuTokenizer); - public TokenizerDefinitionsDescriptor KeywordTokenizer(string tokenizerDefinitionName) => AssignVariant(tokenizerDefinitionName, null); - public TokenizerDefinitionsDescriptor KeywordTokenizer(string tokenizerDefinitionName, Action configure) => AssignVariant(tokenizerDefinitionName, configure); - public TokenizerDefinitionsDescriptor KeywordTokenizer(string tokenizerDefinitionName, KeywordTokenizer keywordTokenizer) => AssignVariant(tokenizerDefinitionName, keywordTokenizer); - public TokenizerDefinitionsDescriptor KuromojiTokenizer(string tokenizerDefinitionName) => AssignVariant(tokenizerDefinitionName, null); - public TokenizerDefinitionsDescriptor KuromojiTokenizer(string tokenizerDefinitionName, Action configure) => AssignVariant(tokenizerDefinitionName, configure); - public TokenizerDefinitionsDescriptor KuromojiTokenizer(string tokenizerDefinitionName, KuromojiTokenizer kuromojiTokenizer) => AssignVariant(tokenizerDefinitionName, kuromojiTokenizer); - public TokenizerDefinitionsDescriptor LetterTokenizer(string tokenizerDefinitionName) => AssignVariant(tokenizerDefinitionName, null); - public TokenizerDefinitionsDescriptor LetterTokenizer(string tokenizerDefinitionName, Action configure) => AssignVariant(tokenizerDefinitionName, configure); - public TokenizerDefinitionsDescriptor LetterTokenizer(string tokenizerDefinitionName, LetterTokenizer letterTokenizer) => AssignVariant(tokenizerDefinitionName, letterTokenizer); - public TokenizerDefinitionsDescriptor LowercaseTokenizer(string tokenizerDefinitionName) => AssignVariant(tokenizerDefinitionName, null); - public TokenizerDefinitionsDescriptor LowercaseTokenizer(string tokenizerDefinitionName, Action configure) => AssignVariant(tokenizerDefinitionName, configure); - public TokenizerDefinitionsDescriptor LowercaseTokenizer(string tokenizerDefinitionName, LowercaseTokenizer lowercaseTokenizer) => AssignVariant(tokenizerDefinitionName, lowercaseTokenizer); - public TokenizerDefinitionsDescriptor NGramTokenizer(string tokenizerDefinitionName) => AssignVariant(tokenizerDefinitionName, null); - public TokenizerDefinitionsDescriptor NGramTokenizer(string tokenizerDefinitionName, Action configure) => AssignVariant(tokenizerDefinitionName, configure); - public TokenizerDefinitionsDescriptor NGramTokenizer(string tokenizerDefinitionName, NGramTokenizer nGramTokenizer) => AssignVariant(tokenizerDefinitionName, nGramTokenizer); - public TokenizerDefinitionsDescriptor NoriTokenizer(string tokenizerDefinitionName) => AssignVariant(tokenizerDefinitionName, null); - public TokenizerDefinitionsDescriptor NoriTokenizer(string tokenizerDefinitionName, Action configure) => AssignVariant(tokenizerDefinitionName, configure); - public TokenizerDefinitionsDescriptor NoriTokenizer(string tokenizerDefinitionName, NoriTokenizer noriTokenizer) => AssignVariant(tokenizerDefinitionName, noriTokenizer); - public TokenizerDefinitionsDescriptor PathHierarchyTokenizer(string tokenizerDefinitionName) => AssignVariant(tokenizerDefinitionName, null); - public TokenizerDefinitionsDescriptor PathHierarchyTokenizer(string tokenizerDefinitionName, Action configure) => AssignVariant(tokenizerDefinitionName, configure); - public TokenizerDefinitionsDescriptor PathHierarchyTokenizer(string tokenizerDefinitionName, PathHierarchyTokenizer pathHierarchyTokenizer) => AssignVariant(tokenizerDefinitionName, pathHierarchyTokenizer); - public TokenizerDefinitionsDescriptor PatternTokenizer(string tokenizerDefinitionName) => AssignVariant(tokenizerDefinitionName, null); - public TokenizerDefinitionsDescriptor PatternTokenizer(string tokenizerDefinitionName, Action configure) => AssignVariant(tokenizerDefinitionName, configure); - public TokenizerDefinitionsDescriptor PatternTokenizer(string tokenizerDefinitionName, PatternTokenizer patternTokenizer) => AssignVariant(tokenizerDefinitionName, patternTokenizer); - public TokenizerDefinitionsDescriptor StandardTokenizer(string tokenizerDefinitionName) => AssignVariant(tokenizerDefinitionName, null); - public TokenizerDefinitionsDescriptor StandardTokenizer(string tokenizerDefinitionName, Action configure) => AssignVariant(tokenizerDefinitionName, configure); - public TokenizerDefinitionsDescriptor StandardTokenizer(string tokenizerDefinitionName, StandardTokenizer standardTokenizer) => AssignVariant(tokenizerDefinitionName, standardTokenizer); - public TokenizerDefinitionsDescriptor UaxEmailUrlTokenizer(string tokenizerDefinitionName) => AssignVariant(tokenizerDefinitionName, null); - public TokenizerDefinitionsDescriptor UaxEmailUrlTokenizer(string tokenizerDefinitionName, Action configure) => AssignVariant(tokenizerDefinitionName, configure); - public TokenizerDefinitionsDescriptor UaxEmailUrlTokenizer(string tokenizerDefinitionName, UaxEmailUrlTokenizer uaxEmailUrlTokenizer) => AssignVariant(tokenizerDefinitionName, uaxEmailUrlTokenizer); - public TokenizerDefinitionsDescriptor WhitespaceTokenizer(string tokenizerDefinitionName) => AssignVariant(tokenizerDefinitionName, null); - public TokenizerDefinitionsDescriptor WhitespaceTokenizer(string tokenizerDefinitionName, Action configure) => AssignVariant(tokenizerDefinitionName, configure); - public TokenizerDefinitionsDescriptor WhitespaceTokenizer(string tokenizerDefinitionName, WhitespaceTokenizer whitespaceTokenizer) => AssignVariant(tokenizerDefinitionName, whitespaceTokenizer); -} - -internal sealed partial class TokenizerDefinitionInterfaceConverter : JsonConverter -{ - public override ITokenizerDefinition Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) - { - var copiedReader = reader; - string? type = null; - using var jsonDoc = JsonDocument.ParseValue(ref copiedReader); - if (jsonDoc is not null && jsonDoc.RootElement.TryGetProperty("type", out var readType) && readType.ValueKind == JsonValueKind.String) - { - type = readType.ToString(); - } - - switch (type) - { - case "icu_tokenizer": - return JsonSerializer.Deserialize(ref reader, options); - case "pattern": - return JsonSerializer.Deserialize(ref reader, options); - case "kuromoji_tokenizer": - return JsonSerializer.Deserialize(ref reader, options); - case "whitespace": - return JsonSerializer.Deserialize(ref reader, options); - case "uax_url_email": - return JsonSerializer.Deserialize(ref reader, options); - case "standard": - return JsonSerializer.Deserialize(ref reader, options); - case "path_hierarchy": - return JsonSerializer.Deserialize(ref reader, options); - case "nori_tokenizer": - return JsonSerializer.Deserialize(ref reader, options); - case "ngram": - return JsonSerializer.Deserialize(ref reader, options); - case "lowercase": - return JsonSerializer.Deserialize(ref reader, options); - case "letter": - return JsonSerializer.Deserialize(ref reader, options); - case "keyword": - return JsonSerializer.Deserialize(ref reader, options); - case "edge_ngram": - return JsonSerializer.Deserialize(ref reader, options); - case "char_group": - return JsonSerializer.Deserialize(ref reader, options); - default: - ThrowHelper.ThrowUnknownTaggedUnionVariantJsonException(type, typeof(ITokenizerDefinition)); - return null; - } - } - - public override void Write(Utf8JsonWriter writer, ITokenizerDefinition value, JsonSerializerOptions options) - { - if (value is null) - { - writer.WriteNullValue(); - return; - } - - switch (value.Type) - { - case "icu_tokenizer": - JsonSerializer.Serialize(writer, value, typeof(IcuTokenizer), options); - return; - case "pattern": - JsonSerializer.Serialize(writer, value, typeof(PatternTokenizer), options); - return; - case "kuromoji_tokenizer": - JsonSerializer.Serialize(writer, value, typeof(KuromojiTokenizer), options); - return; - case "whitespace": - JsonSerializer.Serialize(writer, value, typeof(WhitespaceTokenizer), options); - return; - case "uax_url_email": - JsonSerializer.Serialize(writer, value, typeof(UaxEmailUrlTokenizer), options); - return; - case "standard": - JsonSerializer.Serialize(writer, value, typeof(StandardTokenizer), options); - return; - case "path_hierarchy": - JsonSerializer.Serialize(writer, value, typeof(PathHierarchyTokenizer), options); - return; - case "nori_tokenizer": - JsonSerializer.Serialize(writer, value, typeof(NoriTokenizer), options); - return; - case "ngram": - JsonSerializer.Serialize(writer, value, typeof(NGramTokenizer), options); - return; - case "lowercase": - JsonSerializer.Serialize(writer, value, typeof(LowercaseTokenizer), options); - return; - case "letter": - JsonSerializer.Serialize(writer, value, typeof(LetterTokenizer), options); - return; - case "keyword": - JsonSerializer.Serialize(writer, value, typeof(KeywordTokenizer), options); - return; - case "edge_ngram": - JsonSerializer.Serialize(writer, value, typeof(EdgeNGramTokenizer), options); - return; - case "char_group": - JsonSerializer.Serialize(writer, value, typeof(CharGroupTokenizer), options); - return; - default: - var type = value.GetType(); - JsonSerializer.Serialize(writer, value, type, options); - return; - } - } -} - -[JsonConverter(typeof(TokenizerDefinitionInterfaceConverter))] -public partial interface ITokenizerDefinition -{ - public string Type { get; } -} \ No newline at end of file diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/Tokenizers.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/Tokenizers.g.cs new file mode 100644 index 00000000000..73ca8318ccc --- /dev/null +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/Tokenizers.g.cs @@ -0,0 +1,222 @@ +// Licensed to Elasticsearch B.V under one or more agreements. +// Elasticsearch B.V licenses this file to you under the Apache 2.0 License. +// See the LICENSE file in the project root for more information. +// +// ███╗ ██╗ ██████╗ ████████╗██╗ ██████╗███████╗ +// ████╗ ██║██╔═══██╗╚══██╔══╝██║██╔════╝██╔════╝ +// ██╔██╗ ██║██║ ██║ ██║ ██║██║ █████╗ +// ██║╚██╗██║██║ ██║ ██║ ██║██║ ██╔══╝ +// ██║ ╚████║╚██████╔╝ ██║ ██║╚██████╗███████╗ +// ╚═╝ ╚═══╝ ╚═════╝ ╚═╝ ╚═╝ ╚═════╝╚══════╝ +// ------------------------------------------------ +// +// This file is automatically generated. +// Please do not edit these files manually. +// +// ------------------------------------------------ + +using Elastic.Clients.Elasticsearch.Fluent; +using Elastic.Clients.Elasticsearch.Serialization; +using Elastic.Transport; +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Linq.Expressions; +using System.Text.Json; +using System.Text.Json.Serialization; + +#nullable restore +namespace Elastic.Clients.Elasticsearch.Analysis; +public partial class Tokenizers : IsADictionary +{ + public Tokenizers() + { + } + + public Tokenizers(IDictionary container) : base(container) + { + } + + public void Add(string name, ITokenizer tokenizer) => BackingDictionary.Add(Sanitize(name), tokenizer); + public bool TryGetTokenizer(string name, [NotNullWhen(returnValue: true)] out ITokenizer tokenizer) => BackingDictionary.TryGetValue(Sanitize(name), out tokenizer); + public bool TryGetTokenizer(string name, [NotNullWhen(returnValue: true)] out T? tokenizer) + where T : class, ITokenizer + { + if (BackingDictionary.TryGetValue(Sanitize(name), out var matchedValue) && matchedValue is T finalValue) + { + tokenizer = finalValue; + return true; + } + + tokenizer = null; + return false; + } +} + +public sealed partial class TokenizersDescriptor : IsADictionaryDescriptor +{ + public TokenizersDescriptor() : base(new Tokenizers()) + { + } + + public TokenizersDescriptor(Tokenizers tokenizers) : base(tokenizers ?? new Tokenizers()) + { + } + + public TokenizersDescriptor CharGroup(string tokenizerName) => AssignVariant(tokenizerName, null); + public TokenizersDescriptor CharGroup(string tokenizerName, Action configure) => AssignVariant(tokenizerName, configure); + public TokenizersDescriptor CharGroup(string tokenizerName, CharGroupTokenizer charGroupTokenizer) => AssignVariant(tokenizerName, charGroupTokenizer); + public TokenizersDescriptor EdgeNGram(string tokenizerName) => AssignVariant(tokenizerName, null); + public TokenizersDescriptor EdgeNGram(string tokenizerName, Action configure) => AssignVariant(tokenizerName, configure); + public TokenizersDescriptor EdgeNGram(string tokenizerName, EdgeNGramTokenizer edgeNGramTokenizer) => AssignVariant(tokenizerName, edgeNGramTokenizer); + public TokenizersDescriptor Icu(string tokenizerName) => AssignVariant(tokenizerName, null); + public TokenizersDescriptor Icu(string tokenizerName, Action configure) => AssignVariant(tokenizerName, configure); + public TokenizersDescriptor Icu(string tokenizerName, IcuTokenizer icuTokenizer) => AssignVariant(tokenizerName, icuTokenizer); + public TokenizersDescriptor Keyword(string tokenizerName) => AssignVariant(tokenizerName, null); + public TokenizersDescriptor Keyword(string tokenizerName, Action configure) => AssignVariant(tokenizerName, configure); + public TokenizersDescriptor Keyword(string tokenizerName, KeywordTokenizer keywordTokenizer) => AssignVariant(tokenizerName, keywordTokenizer); + public TokenizersDescriptor Kuromoji(string tokenizerName) => AssignVariant(tokenizerName, null); + public TokenizersDescriptor Kuromoji(string tokenizerName, Action configure) => AssignVariant(tokenizerName, configure); + public TokenizersDescriptor Kuromoji(string tokenizerName, KuromojiTokenizer kuromojiTokenizer) => AssignVariant(tokenizerName, kuromojiTokenizer); + public TokenizersDescriptor Letter(string tokenizerName) => AssignVariant(tokenizerName, null); + public TokenizersDescriptor Letter(string tokenizerName, Action configure) => AssignVariant(tokenizerName, configure); + public TokenizersDescriptor Letter(string tokenizerName, LetterTokenizer letterTokenizer) => AssignVariant(tokenizerName, letterTokenizer); + public TokenizersDescriptor Lowercase(string tokenizerName) => AssignVariant(tokenizerName, null); + public TokenizersDescriptor Lowercase(string tokenizerName, Action configure) => AssignVariant(tokenizerName, configure); + public TokenizersDescriptor Lowercase(string tokenizerName, LowercaseTokenizer lowercaseTokenizer) => AssignVariant(tokenizerName, lowercaseTokenizer); + public TokenizersDescriptor NGram(string tokenizerName) => AssignVariant(tokenizerName, null); + public TokenizersDescriptor NGram(string tokenizerName, Action configure) => AssignVariant(tokenizerName, configure); + public TokenizersDescriptor NGram(string tokenizerName, NGramTokenizer nGramTokenizer) => AssignVariant(tokenizerName, nGramTokenizer); + public TokenizersDescriptor Nori(string tokenizerName) => AssignVariant(tokenizerName, null); + public TokenizersDescriptor Nori(string tokenizerName, Action configure) => AssignVariant(tokenizerName, configure); + public TokenizersDescriptor Nori(string tokenizerName, NoriTokenizer noriTokenizer) => AssignVariant(tokenizerName, noriTokenizer); + public TokenizersDescriptor PathHierarchy(string tokenizerName) => AssignVariant(tokenizerName, null); + public TokenizersDescriptor PathHierarchy(string tokenizerName, Action configure) => AssignVariant(tokenizerName, configure); + public TokenizersDescriptor PathHierarchy(string tokenizerName, PathHierarchyTokenizer pathHierarchyTokenizer) => AssignVariant(tokenizerName, pathHierarchyTokenizer); + public TokenizersDescriptor Pattern(string tokenizerName) => AssignVariant(tokenizerName, null); + public TokenizersDescriptor Pattern(string tokenizerName, Action configure) => AssignVariant(tokenizerName, configure); + public TokenizersDescriptor Pattern(string tokenizerName, PatternTokenizer patternTokenizer) => AssignVariant(tokenizerName, patternTokenizer); + public TokenizersDescriptor Standard(string tokenizerName) => AssignVariant(tokenizerName, null); + public TokenizersDescriptor Standard(string tokenizerName, Action configure) => AssignVariant(tokenizerName, configure); + public TokenizersDescriptor Standard(string tokenizerName, StandardTokenizer standardTokenizer) => AssignVariant(tokenizerName, standardTokenizer); + public TokenizersDescriptor UaxEmailUrl(string tokenizerName) => AssignVariant(tokenizerName, null); + public TokenizersDescriptor UaxEmailUrl(string tokenizerName, Action configure) => AssignVariant(tokenizerName, configure); + public TokenizersDescriptor UaxEmailUrl(string tokenizerName, UaxEmailUrlTokenizer uaxEmailUrlTokenizer) => AssignVariant(tokenizerName, uaxEmailUrlTokenizer); + public TokenizersDescriptor Whitespace(string tokenizerName) => AssignVariant(tokenizerName, null); + public TokenizersDescriptor Whitespace(string tokenizerName, Action configure) => AssignVariant(tokenizerName, configure); + public TokenizersDescriptor Whitespace(string tokenizerName, WhitespaceTokenizer whitespaceTokenizer) => AssignVariant(tokenizerName, whitespaceTokenizer); +} + +internal sealed partial class TokenizerInterfaceConverter : JsonConverter +{ + public override ITokenizer Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + { + var copiedReader = reader; + string? type = null; + using var jsonDoc = JsonDocument.ParseValue(ref copiedReader); + if (jsonDoc is not null && jsonDoc.RootElement.TryGetProperty("type", out var readType) && readType.ValueKind == JsonValueKind.String) + { + type = readType.ToString(); + } + + switch (type) + { + case "icu_tokenizer": + return JsonSerializer.Deserialize(ref reader, options); + case "pattern": + return JsonSerializer.Deserialize(ref reader, options); + case "kuromoji_tokenizer": + return JsonSerializer.Deserialize(ref reader, options); + case "whitespace": + return JsonSerializer.Deserialize(ref reader, options); + case "uax_url_email": + return JsonSerializer.Deserialize(ref reader, options); + case "standard": + return JsonSerializer.Deserialize(ref reader, options); + case "path_hierarchy": + return JsonSerializer.Deserialize(ref reader, options); + case "nori_tokenizer": + return JsonSerializer.Deserialize(ref reader, options); + case "ngram": + return JsonSerializer.Deserialize(ref reader, options); + case "lowercase": + return JsonSerializer.Deserialize(ref reader, options); + case "letter": + return JsonSerializer.Deserialize(ref reader, options); + case "keyword": + return JsonSerializer.Deserialize(ref reader, options); + case "edge_ngram": + return JsonSerializer.Deserialize(ref reader, options); + case "char_group": + return JsonSerializer.Deserialize(ref reader, options); + default: + ThrowHelper.ThrowUnknownTaggedUnionVariantJsonException(type, typeof(ITokenizer)); + return null; + } + } + + public override void Write(Utf8JsonWriter writer, ITokenizer value, JsonSerializerOptions options) + { + if (value is null) + { + writer.WriteNullValue(); + return; + } + + switch (value.Type) + { + case "icu_tokenizer": + JsonSerializer.Serialize(writer, value, typeof(IcuTokenizer), options); + return; + case "pattern": + JsonSerializer.Serialize(writer, value, typeof(PatternTokenizer), options); + return; + case "kuromoji_tokenizer": + JsonSerializer.Serialize(writer, value, typeof(KuromojiTokenizer), options); + return; + case "whitespace": + JsonSerializer.Serialize(writer, value, typeof(WhitespaceTokenizer), options); + return; + case "uax_url_email": + JsonSerializer.Serialize(writer, value, typeof(UaxEmailUrlTokenizer), options); + return; + case "standard": + JsonSerializer.Serialize(writer, value, typeof(StandardTokenizer), options); + return; + case "path_hierarchy": + JsonSerializer.Serialize(writer, value, typeof(PathHierarchyTokenizer), options); + return; + case "nori_tokenizer": + JsonSerializer.Serialize(writer, value, typeof(NoriTokenizer), options); + return; + case "ngram": + JsonSerializer.Serialize(writer, value, typeof(NGramTokenizer), options); + return; + case "lowercase": + JsonSerializer.Serialize(writer, value, typeof(LowercaseTokenizer), options); + return; + case "letter": + JsonSerializer.Serialize(writer, value, typeof(LetterTokenizer), options); + return; + case "keyword": + JsonSerializer.Serialize(writer, value, typeof(KeywordTokenizer), options); + return; + case "edge_ngram": + JsonSerializer.Serialize(writer, value, typeof(EdgeNGramTokenizer), options); + return; + case "char_group": + JsonSerializer.Serialize(writer, value, typeof(CharGroupTokenizer), options); + return; + default: + var type = value.GetType(); + JsonSerializer.Serialize(writer, value, type, options); + return; + } + } +} + +[JsonConverter(typeof(TokenizerInterfaceConverter))] +public partial interface ITokenizer +{ + public string Type { get; } +} \ No newline at end of file diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/TrimTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/TrimTokenFilter.g.cs index 43b9894ebba..6c4f1f21c63 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/TrimTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/TrimTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class TrimTokenFilter : ITokenFilterDefinition +public sealed partial class TrimTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("type")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/TruncateTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/TruncateTokenFilter.g.cs index 8de68d95f82..e9b1180c004 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/TruncateTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/TruncateTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class TruncateTokenFilter : ITokenFilterDefinition +public sealed partial class TruncateTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("length")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/UaxEmailUrlTokenizer.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/UaxEmailUrlTokenizer.g.cs index a89937b85a6..dcb65c0cc0d 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/UaxEmailUrlTokenizer.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/UaxEmailUrlTokenizer.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class UaxEmailUrlTokenizer : ITokenizerDefinition +public sealed partial class UaxEmailUrlTokenizer : ITokenizer { [JsonInclude] [JsonPropertyName("max_token_length")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/UniqueTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/UniqueTokenFilter.g.cs index bdfc5cd99f4..5080db90aa9 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/UniqueTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/UniqueTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class UniqueTokenFilter : ITokenFilterDefinition +public sealed partial class UniqueTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("only_on_same_position")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/UppercaseTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/UppercaseTokenFilter.g.cs index dc58118af03..6bf7eaf6d6f 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/UppercaseTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/UppercaseTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class UppercaseTokenFilter : ITokenFilterDefinition +public sealed partial class UppercaseTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("type")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/WhitespaceTokenizer.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/WhitespaceTokenizer.g.cs index 0f9c4c8d53e..e0a86587ebd 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/WhitespaceTokenizer.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/WhitespaceTokenizer.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class WhitespaceTokenizer : ITokenizerDefinition +public sealed partial class WhitespaceTokenizer : ITokenizer { [JsonInclude] [JsonPropertyName("max_token_length")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/WordDelimiterGraphTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/WordDelimiterGraphTokenFilter.g.cs index 024e52b97d2..f8ab59d9a21 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/WordDelimiterGraphTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/WordDelimiterGraphTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class WordDelimiterGraphTokenFilter : ITokenFilterDefinition +public sealed partial class WordDelimiterGraphTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("adjust_offsets")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/WordDelimiterTokenFilter.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/WordDelimiterTokenFilter.g.cs index 8ff2f3ff34d..73ae34e68b7 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/WordDelimiterTokenFilter.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/Analysis/WordDelimiterTokenFilter.g.cs @@ -25,7 +25,7 @@ #nullable restore namespace Elastic.Clients.Elasticsearch.Analysis; -public sealed partial class WordDelimiterTokenFilter : ITokenFilterDefinition +public sealed partial class WordDelimiterTokenFilter : ITokenFilter { [JsonInclude] [JsonPropertyName("catenate_all")] diff --git a/src/Elastic.Clients.Elasticsearch/_Generated/Types/IndexManagement/IndexSettingsAnalysis.g.cs b/src/Elastic.Clients.Elasticsearch/_Generated/Types/IndexManagement/IndexSettingsAnalysis.g.cs index e82528ae65e..7b666899f08 100644 --- a/src/Elastic.Clients.Elasticsearch/_Generated/Types/IndexManagement/IndexSettingsAnalysis.g.cs +++ b/src/Elastic.Clients.Elasticsearch/_Generated/Types/IndexManagement/IndexSettingsAnalysis.g.cs @@ -29,23 +29,23 @@ public sealed partial class IndexSettingsAnalysis { [JsonInclude] [JsonPropertyName("analyzer")] - public Elastic.Clients.Elasticsearch.Analysis.Analyzers? Analyzer { get; set; } + public Elastic.Clients.Elasticsearch.Analysis.Analyzers? Analyzers { get; set; } [JsonInclude] [JsonPropertyName("char_filter")] - public IDictionary? CharFilter { get; set; } + public Elastic.Clients.Elasticsearch.Analysis.CharFilters? CharFilters { get; set; } [JsonInclude] [JsonPropertyName("filter")] - public IDictionary? Filter { get; set; } + public Elastic.Clients.Elasticsearch.Analysis.TokenFilters? TokenFilters { get; set; } [JsonInclude] [JsonPropertyName("normalizer")] - public Elastic.Clients.Elasticsearch.Analysis.Normalizers? Normalizer { get; set; } + public Elastic.Clients.Elasticsearch.Analysis.Normalizers? Normalizers { get; set; } [JsonInclude] [JsonPropertyName("tokenizer")] - public IDictionary? Tokenizer { get; set; } + public Elastic.Clients.Elasticsearch.Analysis.Tokenizers? Tokenizers { get; set; } } public sealed partial class IndexSettingsAnalysisDescriptor : SerializableDescriptor @@ -55,105 +55,147 @@ public IndexSettingsAnalysisDescriptor() : base() { } - private Elastic.Clients.Elasticsearch.Analysis.Analyzers? AnalyzerValue { get; set; } + private Elastic.Clients.Elasticsearch.Analysis.Analyzers? AnalyzersValue { get; set; } - private IDictionary? CharFilterValue { get; set; } + private Elastic.Clients.Elasticsearch.Analysis.CharFilters? CharFiltersValue { get; set; } - private IDictionary? FilterValue { get; set; } + private Elastic.Clients.Elasticsearch.Analysis.TokenFilters? TokenFiltersValue { get; set; } - private Elastic.Clients.Elasticsearch.Analysis.Normalizers? NormalizerValue { get; set; } + private Elastic.Clients.Elasticsearch.Analysis.Normalizers? NormalizersValue { get; set; } - private IDictionary? TokenizerValue { get; set; } + private Elastic.Clients.Elasticsearch.Analysis.Tokenizers? TokenizersValue { get; set; } - public IndexSettingsAnalysisDescriptor Analyzer(Elastic.Clients.Elasticsearch.Analysis.Analyzers? analyzer) + public IndexSettingsAnalysisDescriptor Analyzers(Elastic.Clients.Elasticsearch.Analysis.Analyzers? analyzers) { - AnalyzerValue = analyzer; + AnalyzersValue = analyzers; return Self; } - public IndexSettingsAnalysisDescriptor Analyzer(Analysis.AnalyzersDescriptor descriptor) + public IndexSettingsAnalysisDescriptor Analyzers(Analysis.AnalyzersDescriptor descriptor) { - AnalyzerValue = descriptor.PromisedValue; + AnalyzersValue = descriptor.PromisedValue; return Self; } - public IndexSettingsAnalysisDescriptor Analyzer(Action configure) + public IndexSettingsAnalysisDescriptor Analyzers(Action configure) { var descriptor = new Analysis.AnalyzersDescriptor(); configure?.Invoke(descriptor); - AnalyzerValue = descriptor.PromisedValue; + AnalyzersValue = descriptor.PromisedValue; return Self; } - public IndexSettingsAnalysisDescriptor CharFilter(Func, FluentDictionary> selector) + public IndexSettingsAnalysisDescriptor CharFilters(Elastic.Clients.Elasticsearch.Analysis.CharFilters? charFilters) { - CharFilterValue = selector?.Invoke(new FluentDictionary()); + CharFiltersValue = charFilters; return Self; } - public IndexSettingsAnalysisDescriptor Filter(Func, FluentDictionary> selector) + public IndexSettingsAnalysisDescriptor CharFilters(Analysis.CharFiltersDescriptor descriptor) { - FilterValue = selector?.Invoke(new FluentDictionary()); + CharFiltersValue = descriptor.PromisedValue; return Self; } - public IndexSettingsAnalysisDescriptor Normalizer(Elastic.Clients.Elasticsearch.Analysis.Normalizers? normalizer) + public IndexSettingsAnalysisDescriptor CharFilters(Action configure) { - NormalizerValue = normalizer; + var descriptor = new Analysis.CharFiltersDescriptor(); + configure?.Invoke(descriptor); + CharFiltersValue = descriptor.PromisedValue; + return Self; + } + + public IndexSettingsAnalysisDescriptor TokenFilters(Elastic.Clients.Elasticsearch.Analysis.TokenFilters? tokenFilters) + { + TokenFiltersValue = tokenFilters; + return Self; + } + + public IndexSettingsAnalysisDescriptor TokenFilters(Analysis.TokenFiltersDescriptor descriptor) + { + TokenFiltersValue = descriptor.PromisedValue; + return Self; + } + + public IndexSettingsAnalysisDescriptor TokenFilters(Action configure) + { + var descriptor = new Analysis.TokenFiltersDescriptor(); + configure?.Invoke(descriptor); + TokenFiltersValue = descriptor.PromisedValue; return Self; } - public IndexSettingsAnalysisDescriptor Normalizer(Analysis.NormalizersDescriptor descriptor) + public IndexSettingsAnalysisDescriptor Normalizers(Elastic.Clients.Elasticsearch.Analysis.Normalizers? normalizers) { - NormalizerValue = descriptor.PromisedValue; + NormalizersValue = normalizers; return Self; } - public IndexSettingsAnalysisDescriptor Normalizer(Action configure) + public IndexSettingsAnalysisDescriptor Normalizers(Analysis.NormalizersDescriptor descriptor) + { + NormalizersValue = descriptor.PromisedValue; + return Self; + } + + public IndexSettingsAnalysisDescriptor Normalizers(Action configure) { var descriptor = new Analysis.NormalizersDescriptor(); configure?.Invoke(descriptor); - NormalizerValue = descriptor.PromisedValue; + NormalizersValue = descriptor.PromisedValue; return Self; } - public IndexSettingsAnalysisDescriptor Tokenizer(Func, FluentDictionary> selector) + public IndexSettingsAnalysisDescriptor Tokenizers(Elastic.Clients.Elasticsearch.Analysis.Tokenizers? tokenizers) { - TokenizerValue = selector?.Invoke(new FluentDictionary()); + TokenizersValue = tokenizers; + return Self; + } + + public IndexSettingsAnalysisDescriptor Tokenizers(Analysis.TokenizersDescriptor descriptor) + { + TokenizersValue = descriptor.PromisedValue; + return Self; + } + + public IndexSettingsAnalysisDescriptor Tokenizers(Action configure) + { + var descriptor = new Analysis.TokenizersDescriptor(); + configure?.Invoke(descriptor); + TokenizersValue = descriptor.PromisedValue; return Self; } protected override void Serialize(Utf8JsonWriter writer, JsonSerializerOptions options, IElasticsearchClientSettings settings) { writer.WriteStartObject(); - if (AnalyzerValue is not null) + if (AnalyzersValue is not null) { writer.WritePropertyName("analyzer"); - JsonSerializer.Serialize(writer, AnalyzerValue, options); + JsonSerializer.Serialize(writer, AnalyzersValue, options); } - if (CharFilterValue is not null) + if (CharFiltersValue is not null) { writer.WritePropertyName("char_filter"); - JsonSerializer.Serialize(writer, CharFilterValue, options); + JsonSerializer.Serialize(writer, CharFiltersValue, options); } - if (FilterValue is not null) + if (TokenFiltersValue is not null) { writer.WritePropertyName("filter"); - JsonSerializer.Serialize(writer, FilterValue, options); + JsonSerializer.Serialize(writer, TokenFiltersValue, options); } - if (NormalizerValue is not null) + if (NormalizersValue is not null) { writer.WritePropertyName("normalizer"); - JsonSerializer.Serialize(writer, NormalizerValue, options); + JsonSerializer.Serialize(writer, NormalizersValue, options); } - if (TokenizerValue is not null) + if (TokenizersValue is not null) { writer.WritePropertyName("tokenizer"); - JsonSerializer.Serialize(writer, TokenizerValue, options); + JsonSerializer.Serialize(writer, TokenizersValue, options); } writer.WriteEndObject(); diff --git a/tests/Tests/IndexManagement/CreateIndexSerializationTests.cs b/tests/Tests/IndexManagement/CreateIndexSerializationTests.cs index 407165f017a..5259036bec7 100644 --- a/tests/Tests/IndexManagement/CreateIndexSerializationTests.cs +++ b/tests/Tests/IndexManagement/CreateIndexSerializationTests.cs @@ -2,6 +2,7 @@ // Elasticsearch B.V licenses this file to you under the Apache 2.0 License. // See the LICENSE file in the project root for more information. +using Elastic.Clients.Elasticsearch.Analysis; using Elastic.Clients.Elasticsearch.IndexManagement; using Elastic.Clients.Elasticsearch.QueryDsl; using System.Collections.Generic; @@ -41,4 +42,45 @@ public async Task CreateIndexWithAliases_SerializesCorrectly() var objectJson = await SerializeAndGetJsonStringAsync(createRequest); objectJson.Should().Be(json); } + + [U] + public async Task CreateIndexWithAnalysisSettings_SerializesCorrectly() + { + var descriptor = new CreateIndexRequestDescriptor("test") + .Settings(s => s + .Analysis(a => a + .Analyzers(a => a + .Stop("stop-name", stop => stop.StopwordsPath("path.txt")) + .Pattern("pattern-name", pattern => pattern.Version("version")) + .Custom("my-custom-analyzer", c => c + .Filter(new[] { "stop", "synonym" }) + .Tokenizer("standard"))) + .TokenFilters(f => f + .Synonym("synonym", synonym => synonym + .SynonymsPath("analysis/synonym.txt"))))); + + var json = await SerializeAndGetJsonStringAsync(descriptor); + + await Verifier.VerifyJson(json); + + var createRequest = new CreateIndexRequest("test") + { + Settings = new IndexSettings + { + Analysis = new IndexSettingsAnalysis + { + Analyzers = new Analyzers + { + { "stop-name", new StopAnalyzer { StopwordsPath = "path.txt" }}, + { "pattern-name", new PatternAnalyzer { Version = "version" }}, + { "my-custom-analyzer", new CustomAnalyzer { Filter = new[] { "stop", "synonym" }, Tokenizer = "standard" }} + }, + TokenFilters = new TokenFilters {{ "synonym", new SynonymTokenFilter { SynonymsPath = "analysis/synonym.txt" }}} + } + } + }; + + var objectJson = await SerializeAndGetJsonStringAsync(createRequest); + objectJson.Should().Be(json); + } } diff --git a/tests/Tests/IndexManagement/IndexSettingsSerializationTests.cs b/tests/Tests/IndexManagement/IndexSettingsSerializationTests.cs index 94a390f85f2..147d2784e4c 100644 --- a/tests/Tests/IndexManagement/IndexSettingsSerializationTests.cs +++ b/tests/Tests/IndexManagement/IndexSettingsSerializationTests.cs @@ -19,7 +19,7 @@ public class IndexSettingsSerializationTests : SerializerTestBase private const string IndexSettingsJson = @"{""creation_date"":""1655895084631""}"; [U] - public async Task CanSerializerIndexSettingsWithCustomAnalyzer() + public async Task CanSerialize_IndexSettingsWithCustomAnalyzer() { // Test case for https://github.com/elastic/elasticsearch-net/issues/6739 // Resolved after improved code-generation of internally-tagged untions to include @@ -27,7 +27,7 @@ public async Task CanSerializerIndexSettingsWithCustomAnalyzer() var descriptor = new IndexSettingsDescriptor() .Analysis(a => a - .Analyzer(a => a + .Analyzers(a => a .Custom("whitespace_lowercase", wl => wl .Tokenizer("whitespace") .Filter(new[] { "lowercase" }) @@ -39,7 +39,7 @@ public async Task CanSerializerIndexSettingsWithCustomAnalyzer() await Verifier.VerifyJson(json); var indexSettings = DeserializeJsonString(json); - var analyzer = indexSettings.Analysis.Analyzer["whitespace_lowercase"]; + var analyzer = indexSettings.Analysis.Analyzers["whitespace_lowercase"]; var customAnalyzer = analyzer.Should().BeAssignableTo().Subject; customAnalyzer.Tokenizer.Should().Be("whitespace"); customAnalyzer.Filter.Should().ContainSingle("lowercase"); diff --git a/tests/Tests/_VerifySnapshots/CreateIndexSerializationTests.CreateIndexWithAnalysisSettings_SerializesCorrectly.verified.txt b/tests/Tests/_VerifySnapshots/CreateIndexSerializationTests.CreateIndexWithAnalysisSettings_SerializesCorrectly.verified.txt new file mode 100644 index 00000000000..428aa8b28ef --- /dev/null +++ b/tests/Tests/_VerifySnapshots/CreateIndexSerializationTests.CreateIndexWithAnalysisSettings_SerializesCorrectly.verified.txt @@ -0,0 +1,30 @@ +{ + settings: { + analysis: { + analyzer: { + my-custom-analyzer: { + filter: [ + stop, + synonym + ], + tokenizer: standard, + type: custom + }, + pattern-name: { + type: pattern, + version: version + }, + stop-name: { + stopwords_path: path.txt, + type: stop + } + }, + filter: { + synonym: { + synonyms_path: analysis/synonym.txt, + type: synonym + } + } + } + } +} \ No newline at end of file diff --git a/tests/Tests/_VerifySnapshots/IndexSettingsSerializationTests.CanSerializerIndexSettingsWithCustomAnalyzer.verified.txt b/tests/Tests/_VerifySnapshots/IndexSettingsSerializationTests.CanSerialize_IndexSettingsWithCustomAnalyzer.verified.txt similarity index 100% rename from tests/Tests/_VerifySnapshots/IndexSettingsSerializationTests.CanSerializerIndexSettingsWithCustomAnalyzer.verified.txt rename to tests/Tests/_VerifySnapshots/IndexSettingsSerializationTests.CanSerialize_IndexSettingsWithCustomAnalyzer.verified.txt