[Rename] ElasticsearchParseException class in server module (#169)

This commit refactors ElasticsearchParseException class in the server module to
OpenSearchParseException. References and usages throughout the rest of the
codebase are fully refactored.

Signed-off-by: Nicholas Knize <nknize@amazon.com>
This commit is contained in:
Nick Knize 2021-03-03 22:53:14 -06:00
parent 92d138309a
commit 2aa9906c42
155 changed files with 743 additions and 743 deletions

View File

@ -20,7 +20,7 @@
package org.elasticsearch.client.indices;
import org.elasticsearch.OpenSearchGenerationException;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.action.admin.indices.alias.Alias;
import org.elasticsearch.action.support.ActiveShardCount;
import org.elasticsearch.client.TimedRequest;
@ -239,7 +239,7 @@ public class CreateIndexRequest extends TimedRequest implements Validatable, ToX
}
return this;
} catch(IOException e) {
throw new ElasticsearchParseException("Failed to parse aliases", e);
throw new OpenSearchParseException("Failed to parse aliases", e);
}
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.client.indices;
import org.elasticsearch.OpenSearchGenerationException;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.IndicesRequest;
import org.elasticsearch.action.admin.indices.alias.Alias;
@ -320,7 +320,7 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
} else if (name.equals("aliases")) {
aliases((Map<String, Object>) entry.getValue());
} else {
throw new ElasticsearchParseException("unknown key [{}] in the template ", name);
throw new OpenSearchParseException("unknown key [{}] in the template ", name);
}
}
return this;
@ -400,7 +400,7 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
}
return this;
} catch(IOException e) {
throw new ElasticsearchParseException("Failed to parse aliases", e);
throw new OpenSearchParseException("Failed to parse aliases", e);
}
}

View File

@ -172,7 +172,7 @@ public class TimeValueTests extends ESTestCase {
public void testFailOnUnknownUnits() {
try {
TimeValue.parseTimeValue("23tw", null, "test");
fail("Expected ElasticsearchParseException");
fail("Expected OpenSearchParseException");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), containsString("failed to parse"));
}
@ -181,7 +181,7 @@ public class TimeValueTests extends ESTestCase {
public void testFailOnMissingUnits() {
try {
TimeValue.parseTimeValue("42", null, "test");
fail("Expected ElasticsearchParseException");
fail("Expected OpenSearchParseException");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), containsString("failed to parse"));
}
@ -190,7 +190,7 @@ public class TimeValueTests extends ESTestCase {
public void testNoDotsAllowed() {
try {
TimeValue.parseTimeValue("42ms.", null, "test");
fail("Expected ElasticsearchParseException");
fail("Expected OpenSearchParseException");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), containsString("failed to parse"));
}

View File

@ -164,7 +164,7 @@ public class IngestRestartIT extends ESIntegTestCase {
.get());
assertThat(exception.getMessage(),
equalTo("pipeline with id [" + pipelineIdWithScript + "] could not be loaded, caused by " +
"[ElasticsearchParseException[Error updating pipeline with id [" + pipelineIdWithScript + "]]; " +
"[OpenSearchParseException[Error updating pipeline with id [" + pipelineIdWithScript + "]]; " +
"nested: OpenSearchException[java.lang.IllegalArgumentException: cannot execute [inline] scripts]; " +
"nested: IllegalArgumentException[cannot execute [inline] scripts];; " +
"OpenSearchException[java.lang.IllegalArgumentException: cannot execute [inline] scripts]; " +

View File

@ -19,7 +19,7 @@
package org.elasticsearch.ingest.common;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.ingest.RandomDocumentPicks;
import org.elasticsearch.test.ESTestCase;
@ -96,7 +96,7 @@ public abstract class AbstractStringProcessorFactoryTestCase extends ESTestCase
try {
factory.create(null, null, null, config);
fail("factory create should have failed");
} catch(ElasticsearchParseException e) {
} catch(OpenSearchParseException e) {
assertThat(e.getMessage(), equalTo("[field] required property is missing"));
}
}

View File

@ -20,7 +20,7 @@
package org.elasticsearch.ingest.common;
import org.elasticsearch.OpenSearchException;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.ingest.TestTemplateService;
import org.elasticsearch.test.ESTestCase;
import org.junit.Before;
@ -64,7 +64,7 @@ public class AppendProcessorFactoryTests extends ESTestCase {
try {
factory.create(null, null, null, config);
fail("factory create should have failed");
} catch(ElasticsearchParseException e) {
} catch(OpenSearchParseException e) {
assertThat(e.getMessage(), equalTo("[field] required property is missing"));
}
}
@ -75,7 +75,7 @@ public class AppendProcessorFactoryTests extends ESTestCase {
try {
factory.create(null, null, null, config);
fail("factory create should have failed");
} catch(ElasticsearchParseException e) {
} catch(OpenSearchParseException e) {
assertThat(e.getMessage(), equalTo("[value] required property is missing"));
}
}
@ -87,7 +87,7 @@ public class AppendProcessorFactoryTests extends ESTestCase {
try {
factory.create(null, null, null, config);
fail("factory create should have failed");
} catch(ElasticsearchParseException e) {
} catch(OpenSearchParseException e) {
assertThat(e.getMessage(), equalTo("[value] required property is missing"));
}
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.ingest.common;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.test.ESTestCase;
import org.hamcrest.Matchers;
@ -56,7 +56,7 @@ public class ConvertProcessorFactoryTests extends ESTestCase {
try {
factory.create(null, null, null, config);
fail("factory create should have failed");
} catch (ElasticsearchParseException e) {
} catch (OpenSearchParseException e) {
assertThat(e.getMessage(), Matchers.equalTo("[type] type [" + type + "] not supported, cannot convert field."));
assertThat(e.getMetadata("es.processor_type").get(0), equalTo(ConvertProcessor.TYPE));
assertThat(e.getMetadata("es.property_name").get(0), equalTo("type"));
@ -72,7 +72,7 @@ public class ConvertProcessorFactoryTests extends ESTestCase {
try {
factory.create(null, null, null, config);
fail("factory create should have failed");
} catch (ElasticsearchParseException e) {
} catch (OpenSearchParseException e) {
assertThat(e.getMessage(), Matchers.equalTo("[field] required property is missing"));
}
}
@ -84,7 +84,7 @@ public class ConvertProcessorFactoryTests extends ESTestCase {
try {
factory.create(null, null, null, config);
fail("factory create should have failed");
} catch (ElasticsearchParseException e) {
} catch (OpenSearchParseException e) {
assertThat(e.getMessage(), Matchers.equalTo("[type] required property is missing"));
}
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.ingest.common;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.ingest.TestTemplateService;
import org.elasticsearch.test.ESTestCase;
import org.hamcrest.Matchers;
@ -89,12 +89,12 @@ public class DateIndexNameFactoryTests extends ESTestCase {
DateIndexNameProcessor.Factory factory = new DateIndexNameProcessor.Factory(TestTemplateService.instance());
Map<String, Object> config = new HashMap<>();
config.put("date_rounding", "y");
ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, null, config));
OpenSearchParseException e = expectThrows(OpenSearchParseException.class, () -> factory.create(null, null, null, config));
assertThat(e.getMessage(), Matchers.equalTo("[field] required property is missing"));
config.clear();
config.put("field", "_field");
e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, null, config));
e = expectThrows(OpenSearchParseException.class, () -> factory.create(null, null, null, config));
assertThat(e.getMessage(), Matchers.equalTo("[date_rounding] required property is missing"));
}
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.ingest.common;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.ingest.TestTemplateService;
import org.elasticsearch.test.ESTestCase;
import org.junit.Before;
@ -67,7 +67,7 @@ public class DateProcessorFactoryTests extends ESTestCase {
try {
factory.create(null, null, null, config);
fail("processor creation should have failed");
} catch(ElasticsearchParseException e) {
} catch(OpenSearchParseException e) {
assertThat(e.getMessage(), containsString("[field] required property is missing"));
}
}
@ -82,7 +82,7 @@ public class DateProcessorFactoryTests extends ESTestCase {
try {
factory.create(null, null, null, config);
fail("processor creation should have failed");
} catch(ElasticsearchParseException e) {
} catch(OpenSearchParseException e) {
assertThat(e.getMessage(), containsString("[formats] required property is missing"));
}
}
@ -130,7 +130,7 @@ public class DateProcessorFactoryTests extends ESTestCase {
try {
factory.create(null, null, null, config);
fail("processor creation should have failed");
} catch(ElasticsearchParseException e) {
} catch(OpenSearchParseException e) {
assertThat(e.getMessage(), containsString("[formats] property isn't a list, but of type [java.lang.String]"));
}
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.ingest.common;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.dissect.DissectException;
import org.elasticsearch.ingest.RandomDocumentPicks;
import org.elasticsearch.test.ESTestCase;
@ -60,7 +60,7 @@ public class DissectProcessorFactoryTests extends ESTestCase {
DissectProcessor.Factory factory = new DissectProcessor.Factory();
Map<String, Object> config = new HashMap<>();
config.put("pattern", "%{a},%{b},%{c}");
Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, "_tag", null, config));
Exception e = expectThrows(OpenSearchParseException.class, () -> factory.create(null, "_tag", null, config));
assertThat(e.getMessage(), Matchers.equalTo("[field] required property is missing"));
}
@ -68,7 +68,7 @@ public class DissectProcessorFactoryTests extends ESTestCase {
DissectProcessor.Factory factory = new DissectProcessor.Factory();
Map<String, Object> config = new HashMap<>();
config.put("field", randomAlphaOfLength(10));
Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, "_tag", null, config));
Exception e = expectThrows(OpenSearchParseException.class, () -> factory.create(null, "_tag", null, config));
assertThat(e.getMessage(), Matchers.equalTo("[pattern] required property is missing"));
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.ingest.common;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.test.ESTestCase;
import java.util.HashMap;
@ -66,7 +66,7 @@ public class DotExpanderProcessorFactoryTests extends ESTestCase {
Map<String, Object> config = new HashMap<>();
config.put("path", "_path");
Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, "_tag", null, config));
Exception e = expectThrows(OpenSearchParseException.class, () -> factory.create(null, "_tag", null, config));
assertThat(e.getMessage(), equalTo("[field] required property is missing"));
}
@ -76,7 +76,7 @@ public class DotExpanderProcessorFactoryTests extends ESTestCase {
for (String field : fields) {
Map<String, Object> config = new HashMap<>();
config.put("field", field);
Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, "_tag", null, config));
Exception e = expectThrows(OpenSearchParseException.class, () -> factory.create(null, "_tag", null, config));
assertThat(e.getMessage(), equalTo("[field] field does not contain a dot"));
}
@ -84,7 +84,7 @@ public class DotExpanderProcessorFactoryTests extends ESTestCase {
for (String field : fields) {
Map<String, Object> config = new HashMap<>();
config.put("field", field);
Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, "_tag", null, config));
Exception e = expectThrows(OpenSearchParseException.class, () -> factory.create(null, "_tag", null, config));
assertThat(e.getMessage(), equalTo("[field] Field can't start or end with a dot"));
}
@ -92,7 +92,7 @@ public class DotExpanderProcessorFactoryTests extends ESTestCase {
for (String field : fields) {
Map<String, Object> config = new HashMap<>();
config.put("field", field);
Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, "_tag", null, config));
Exception e = expectThrows(OpenSearchParseException.class, () -> factory.create(null, "_tag", null, config));
assertThat(e.getMessage(), equalTo("[field] No space between dots"));
}
}

View File

@ -20,7 +20,7 @@
package org.elasticsearch.ingest.common;
import org.elasticsearch.OpenSearchException;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.ingest.TestTemplateService;
import org.elasticsearch.test.ESTestCase;
import org.junit.Before;
@ -54,7 +54,7 @@ public class FailProcessorFactoryTests extends ESTestCase {
try {
factory.create(null, null, null, config);
fail("factory create should have failed");
} catch(ElasticsearchParseException e) {
} catch(OpenSearchParseException e) {
assertThat(e.getMessage(), equalTo("[message] required property is missing"));
}
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.ingest.common;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.ingest.Processor;
import org.elasticsearch.ingest.TestProcessor;
import org.elasticsearch.script.ScriptService;
@ -85,7 +85,7 @@ public class ForEachProcessorFactoryTests extends ESTestCase {
processorTypes.put("_first", Collections.emptyMap());
processorTypes.put("_second", Collections.emptyMap());
config.put("processor", processorTypes);
Exception exception = expectThrows(ElasticsearchParseException.class, () -> forEachFactory.create(registry, null, null, config));
Exception exception = expectThrows(OpenSearchParseException.class, () -> forEachFactory.create(registry, null, null, config));
assertThat(exception.getMessage(), equalTo("[processor] Must specify exactly one processor type"));
}
@ -94,7 +94,7 @@ public class ForEachProcessorFactoryTests extends ESTestCase {
Map<String, Object> config = new HashMap<>();
config.put("field", "_field");
config.put("processor", Collections.singletonMap("_name", Collections.emptyMap()));
Exception expectedException = expectThrows(ElasticsearchParseException.class,
Exception expectedException = expectThrows(OpenSearchParseException.class,
() -> forEachFactory.create(Collections.emptyMap(), null, null, config));
assertThat(expectedException.getMessage(), equalTo("No processor type exists with name [_name]"));
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.ingest.common;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.grok.MatcherWatchdog;
import org.elasticsearch.test.ESTestCase;
@ -66,7 +66,7 @@ public class GrokProcessorFactoryTests extends ESTestCase {
GrokProcessor.Factory factory = new GrokProcessor.Factory(Collections.emptyMap(), MatcherWatchdog.noop());
Map<String, Object> config = new HashMap<>();
config.put("patterns", Collections.singletonList("(?<foo>\\w+)"));
ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, null, config));
OpenSearchParseException e = expectThrows(OpenSearchParseException.class, () -> factory.create(null, null, null, config));
assertThat(e.getMessage(), equalTo("[field] required property is missing"));
}
@ -74,7 +74,7 @@ public class GrokProcessorFactoryTests extends ESTestCase {
GrokProcessor.Factory factory = new GrokProcessor.Factory(Collections.emptyMap(), MatcherWatchdog.noop());
Map<String, Object> config = new HashMap<>();
config.put("field", "foo");
ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, null, config));
OpenSearchParseException e = expectThrows(OpenSearchParseException.class, () -> factory.create(null, null, null, config));
assertThat(e.getMessage(), equalTo("[patterns] required property is missing"));
}
@ -83,7 +83,7 @@ public class GrokProcessorFactoryTests extends ESTestCase {
Map<String, Object> config = new HashMap<>();
config.put("field", "foo");
config.put("patterns", Collections.emptyList());
ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, null, config));
OpenSearchParseException e = expectThrows(OpenSearchParseException.class, () -> factory.create(null, null, null, config));
assertThat(e.getMessage(), equalTo("[patterns] List of patterns must not be empty"));
}
@ -105,7 +105,7 @@ public class GrokProcessorFactoryTests extends ESTestCase {
Map<String, Object> config = new HashMap<>();
config.put("field", "_field");
config.put("patterns", Collections.singletonList("["));
ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, null, config));
OpenSearchParseException e = expectThrows(OpenSearchParseException.class, () -> factory.create(null, null, null, config));
assertThat(e.getMessage(), equalTo("[patterns] Invalid regex pattern found in: [[]. premature end of char-class"));
}
@ -115,7 +115,7 @@ public class GrokProcessorFactoryTests extends ESTestCase {
config.put("field", "_field");
config.put("patterns", Collections.singletonList("%{MY_PATTERN:name}!"));
config.put("pattern_definitions", Collections.singletonMap("MY_PATTERN", "["));
ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, null, config));
OpenSearchParseException e = expectThrows(OpenSearchParseException.class, () -> factory.create(null, null, null, config));
assertThat(e.getMessage(),
equalTo("[patterns] Invalid regex pattern found in: [%{MY_PATTERN:name}!]. premature end of char-class"));
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.ingest.common;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import java.util.HashMap;
import java.util.Map;
@ -56,7 +56,7 @@ public class GsubProcessorFactoryTests extends AbstractStringProcessorFactoryTes
try {
factory.create(null, null, null, config);
fail("factory create should have failed");
} catch(ElasticsearchParseException e) {
} catch(OpenSearchParseException e) {
assertThat(e.getMessage(), equalTo("[pattern] required property is missing"));
}
}
@ -69,7 +69,7 @@ public class GsubProcessorFactoryTests extends AbstractStringProcessorFactoryTes
try {
factory.create(null, null, null, config);
fail("factory create should have failed");
} catch(ElasticsearchParseException e) {
} catch(OpenSearchParseException e) {
assertThat(e.getMessage(), equalTo("[replacement] required property is missing"));
}
}
@ -83,7 +83,7 @@ public class GsubProcessorFactoryTests extends AbstractStringProcessorFactoryTes
try {
factory.create(null, null, null, config);
fail("factory create should have failed");
} catch(ElasticsearchParseException e) {
} catch(OpenSearchParseException e) {
assertThat(e.getMessage(), containsString("[pattern] Invalid regex pattern. Unclosed character class"));
}
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.ingest.common;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.test.ESTestCase;
import java.util.HashMap;
@ -49,7 +49,7 @@ public class JoinProcessorFactoryTests extends ESTestCase {
try {
factory.create(null, null, null, config);
fail("factory create should have failed");
} catch (ElasticsearchParseException e) {
} catch (OpenSearchParseException e) {
assertThat(e.getMessage(), equalTo("[field] required property is missing"));
}
}
@ -61,7 +61,7 @@ public class JoinProcessorFactoryTests extends ESTestCase {
try {
factory.create(null, null, null, config);
fail("factory create should have failed");
} catch (ElasticsearchParseException e) {
} catch (OpenSearchParseException e) {
assertThat(e.getMessage(), equalTo("[separator] required property is missing"));
}
}

View File

@ -20,7 +20,7 @@
package org.elasticsearch.ingest.common;
import org.elasticsearch.OpenSearchException;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.test.ESTestCase;
import java.util.HashMap;
@ -72,7 +72,7 @@ public class JsonProcessorFactoryTests extends ESTestCase {
public void testCreateWithMissingField() throws Exception {
Map<String, Object> config = new HashMap<>();
String processorTag = randomAlphaOfLength(10);
OpenSearchException exception = expectThrows(ElasticsearchParseException.class,
OpenSearchException exception = expectThrows(OpenSearchParseException.class,
() -> FACTORY.create(null, processorTag, null, config));
assertThat(exception.getMessage(), equalTo("[field] required property is missing"));
}
@ -84,7 +84,7 @@ public class JsonProcessorFactoryTests extends ESTestCase {
config.put("field", randomField);
config.put("target_field", randomTargetField);
config.put("add_to_root", true);
OpenSearchException exception = expectThrows(ElasticsearchParseException.class,
OpenSearchException exception = expectThrows(OpenSearchParseException.class,
() -> FACTORY.create(null, randomAlphaOfLength(10), null, config));
assertThat(exception.getMessage(), equalTo("[target_field] Cannot set a target field while also setting `add_to_root` to true"));
}

View File

@ -20,7 +20,7 @@
package org.elasticsearch.ingest.common;
import org.elasticsearch.OpenSearchException;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.test.ESTestCase;
@ -78,7 +78,7 @@ public class KeyValueProcessorFactoryTests extends ESTestCase {
KeyValueProcessor.Factory factory = new KeyValueProcessor.Factory();
Map<String, Object> config = new HashMap<>();
String processorTag = randomAlphaOfLength(10);
OpenSearchException exception = expectThrows(ElasticsearchParseException.class,
OpenSearchException exception = expectThrows(OpenSearchParseException.class,
() -> factory.create(null, processorTag, null, config));
assertThat(exception.getMessage(), equalTo("[field] required property is missing"));
}
@ -88,7 +88,7 @@ public class KeyValueProcessorFactoryTests extends ESTestCase {
Map<String, Object> config = new HashMap<>();
config.put("field", "field1");
String processorTag = randomAlphaOfLength(10);
OpenSearchException exception = expectThrows(ElasticsearchParseException.class,
OpenSearchException exception = expectThrows(OpenSearchParseException.class,
() -> factory.create(null, processorTag, null, config));
assertThat(exception.getMessage(), equalTo("[field_split] required property is missing"));
}
@ -99,7 +99,7 @@ public class KeyValueProcessorFactoryTests extends ESTestCase {
config.put("field", "field1");
config.put("field_split", "&");
String processorTag = randomAlphaOfLength(10);
OpenSearchException exception = expectThrows(ElasticsearchParseException.class,
OpenSearchException exception = expectThrows(OpenSearchParseException.class,
() -> factory.create(null, processorTag, null, config));
assertThat(exception.getMessage(), equalTo("[value_split] required property is missing"));
}

View File

@ -20,7 +20,7 @@
package org.elasticsearch.ingest.common;
import org.elasticsearch.OpenSearchException;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.ingest.TestTemplateService;
import org.elasticsearch.test.ESTestCase;
import org.junit.Before;
@ -67,7 +67,7 @@ public class RemoveProcessorFactoryTests extends ESTestCase {
try {
factory.create(null, null, null, config);
fail("factory create should have failed");
} catch(ElasticsearchParseException e) {
} catch(OpenSearchParseException e) {
assertThat(e.getMessage(), equalTo("[field] required property is missing"));
}
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.ingest.common;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.ingest.TestTemplateService;
import org.elasticsearch.test.ESTestCase;
import org.junit.Before;
@ -70,7 +70,7 @@ public class RenameProcessorFactoryTests extends ESTestCase {
try {
factory.create(null, null, null, config);
fail("factory create should have failed");
} catch(ElasticsearchParseException e) {
} catch(OpenSearchParseException e) {
assertThat(e.getMessage(), equalTo("[field] required property is missing"));
}
}
@ -81,7 +81,7 @@ public class RenameProcessorFactoryTests extends ESTestCase {
try {
factory.create(null, null, null, config);
fail("factory create should have failed");
} catch(ElasticsearchParseException e) {
} catch(OpenSearchParseException e) {
assertThat(e.getMessage(), equalTo("[target_field] required property is missing"));
}
}

View File

@ -20,7 +20,7 @@
package org.elasticsearch.ingest.common;
import org.elasticsearch.OpenSearchException;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.ingest.TestTemplateService;
import org.elasticsearch.test.ESTestCase;
import org.junit.Before;
@ -72,7 +72,7 @@ public class SetProcessorFactoryTests extends ESTestCase {
try {
factory.create(null, null, null, config);
fail("factory create should have failed");
} catch(ElasticsearchParseException e) {
} catch(OpenSearchParseException e) {
assertThat(e.getMessage(), equalTo("[field] required property is missing"));
}
}
@ -83,7 +83,7 @@ public class SetProcessorFactoryTests extends ESTestCase {
try {
factory.create(null, null, null, config);
fail("factory create should have failed");
} catch(ElasticsearchParseException e) {
} catch(OpenSearchParseException e) {
assertThat(e.getMessage(), equalTo("[value] required property is missing"));
}
}
@ -95,7 +95,7 @@ public class SetProcessorFactoryTests extends ESTestCase {
try {
factory.create(null, null, null, config);
fail("factory create should have failed");
} catch(ElasticsearchParseException e) {
} catch(OpenSearchParseException e) {
assertThat(e.getMessage(), equalTo("[value] required property is missing"));
}
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.ingest.common;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.ingest.RandomDocumentPicks;
import org.elasticsearch.test.ESTestCase;
@ -90,7 +90,7 @@ public class SortProcessorFactoryTests extends ESTestCase {
try {
factory.create(null, processorTag, null, config);
fail("factory create should have failed");
} catch (ElasticsearchParseException e) {
} catch (OpenSearchParseException e) {
assertThat(e.getMessage(), equalTo("[order] Sort direction [invalid] not recognized. Valid values are: [asc, desc]"));
}
}
@ -101,7 +101,7 @@ public class SortProcessorFactoryTests extends ESTestCase {
try {
factory.create(null, null, null, config);
fail("factory create should have failed");
} catch(ElasticsearchParseException e) {
} catch(OpenSearchParseException e) {
assertThat(e.getMessage(), equalTo("[field] required property is missing"));
}
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.ingest.common;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.test.ESTestCase;
import java.util.HashMap;
@ -50,7 +50,7 @@ public class SplitProcessorFactoryTests extends ESTestCase {
try {
factory.create(null, null, null, config);
fail("factory create should have failed");
} catch(ElasticsearchParseException e) {
} catch(OpenSearchParseException e) {
assertThat(e.getMessage(), equalTo("[field] required property is missing"));
}
}
@ -62,7 +62,7 @@ public class SplitProcessorFactoryTests extends ESTestCase {
try {
factory.create(null, null, null, config);
fail("factory create should have failed");
} catch(ElasticsearchParseException e) {
} catch(OpenSearchParseException e) {
assertThat(e.getMessage(), equalTo("[separator] required property is missing"));
}
}

View File

@ -29,7 +29,7 @@ import com.maxmind.geoip2.record.Continent;
import com.maxmind.geoip2.record.Country;
import com.maxmind.geoip2.record.Location;
import com.maxmind.geoip2.record.Subdivision;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.SpecialPermission;
import org.elasticsearch.common.network.InetAddresses;
import org.elasticsearch.common.network.NetworkAddress;
@ -173,7 +173,7 @@ public final class GeoIpProcessor extends AbstractProcessor {
geoData = Collections.emptyMap();
}
} else {
throw new ElasticsearchParseException("Unsupported database type [" + lazyLoader.getDatabaseType()
throw new OpenSearchParseException("Unsupported database type [" + lazyLoader.getDatabaseType()
+ "]", new IllegalStateException());
}
return geoData;

View File

@ -20,7 +20,7 @@
package org.elasticsearch.ingest.geoip;
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.common.Randomness;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.ingest.IngestDocument;
@ -173,7 +173,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase {
asnOnlyProperties.remove(GeoIpProcessor.Property.IP);
String asnProperty = RandomPicks.randomFrom(Randomness.get(), asnOnlyProperties).toString();
config.put("properties", Collections.singletonList(asnProperty));
Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, null, config));
Exception e = expectThrows(OpenSearchParseException.class, () -> factory.create(null, null, null, config));
assertThat(e.getMessage(), equalTo("[properties] illegal property value [" + asnProperty +
"]. valid values are [IP, COUNTRY_ISO_CODE, COUNTRY_NAME, CONTINENT_NAME]"));
}
@ -187,7 +187,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase {
cityOnlyProperties.remove(GeoIpProcessor.Property.IP);
String cityProperty = RandomPicks.randomFrom(Randomness.get(), cityOnlyProperties).toString();
config.put("properties", Collections.singletonList(cityProperty));
Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, null, config));
Exception e = expectThrows(OpenSearchParseException.class, () -> factory.create(null, null, null, config));
assertThat(e.getMessage(), equalTo("[properties] illegal property value [" + cityProperty +
"]. valid values are [IP, ASN, ORGANIZATION_NAME, NETWORK]"));
}
@ -198,7 +198,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase {
Map<String, Object> config = new HashMap<>();
config.put("field", "_field");
config.put("database_file", "does-not-exist.mmdb");
Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, null, config));
Exception e = expectThrows(OpenSearchParseException.class, () -> factory.create(null, null, null, config));
assertThat(e.getMessage(), equalTo("[database_file] database file [does-not-exist.mmdb] doesn't exist"));
}
@ -232,14 +232,14 @@ public class GeoIpProcessorFactoryTests extends ESTestCase {
Map<String, Object> config1 = new HashMap<>();
config1.put("field", "_field");
config1.put("properties", Collections.singletonList("invalid"));
Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, null, config1));
Exception e = expectThrows(OpenSearchParseException.class, () -> factory.create(null, null, null, config1));
assertThat(e.getMessage(), equalTo("[properties] illegal property value [invalid]. valid values are [IP, COUNTRY_ISO_CODE, " +
"COUNTRY_NAME, CONTINENT_NAME, REGION_ISO_CODE, REGION_NAME, CITY_NAME, TIMEZONE, LOCATION]"));
Map<String, Object> config2 = new HashMap<>();
config2.put("field", "_field");
config2.put("properties", "invalid");
e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, null, config2));
e = expectThrows(OpenSearchParseException.class, () -> factory.create(null, null, null, config2));
assertThat(e.getMessage(), equalTo("[properties] property isn't a list, but of type [java.lang.String]"));
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.ingest.useragent;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentFactory;
@ -49,7 +49,7 @@ final class UserAgentParser {
try {
init(regexStream);
} catch (IOException e) {
throw new ElasticsearchParseException("error parsing regular expression file", e);
throw new OpenSearchParseException("error parsing regular expression file", e);
}
}
@ -94,7 +94,7 @@ final class UserAgentParser {
}
if (uaPatterns.isEmpty() && osPatterns.isEmpty() && devicePatterns.isEmpty()) {
throw new ElasticsearchParseException("not a valid regular expression file");
throw new OpenSearchParseException("not a valid regular expression file");
}
}
@ -112,19 +112,19 @@ final class UserAgentParser {
XContentParser.Token token = yamlParser.nextToken();
if (token != XContentParser.Token.START_ARRAY) {
throw new ElasticsearchParseException("malformed regular expression file, should continue with 'array' after 'object'");
throw new OpenSearchParseException("malformed regular expression file, should continue with 'array' after 'object'");
}
token = yamlParser.nextToken();
if (token != XContentParser.Token.START_OBJECT) {
throw new ElasticsearchParseException("malformed regular expression file, expecting 'object'");
throw new OpenSearchParseException("malformed regular expression file, expecting 'object'");
}
while (token == XContentParser.Token.START_OBJECT) {
token = yamlParser.nextToken();
if (token != XContentParser.Token.FIELD_NAME) {
throw new ElasticsearchParseException("malformed regular expression file, should continue with 'field_name' after 'array'");
throw new OpenSearchParseException("malformed regular expression file, should continue with 'field_name' after 'array'");
}
Map<String, String> regexMap = new HashMap<>();

View File

@ -19,7 +19,7 @@
package org.elasticsearch.ingest.useragent;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.test.ESTestCase;
import org.junit.BeforeClass;
@ -153,7 +153,7 @@ public class UserAgentProcessorFactoryTests extends ESTestCase {
config.put("field", "_field");
config.put("regex_file", "does-not-exist.yml");
ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, null, config));
OpenSearchParseException e = expectThrows(OpenSearchParseException.class, () -> factory.create(null, null, null, config));
assertThat(e.getMessage(), equalTo("[regex_file] regex file [does-not-exist.yml] doesn't exist (has to exist at node startup)"));
}
@ -198,7 +198,7 @@ public class UserAgentProcessorFactoryTests extends ESTestCase {
config.put("field", "_field");
config.put("properties", Collections.singletonList("invalid"));
ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, null, config));
OpenSearchParseException e = expectThrows(OpenSearchParseException.class, () -> factory.create(null, null, null, config));
assertThat(e.getMessage(), equalTo("[properties] illegal property value [invalid]. valid values are [NAME, MAJOR, MINOR, "
+ "PATCH, OS, OS_NAME, OS_MAJOR, OS_MINOR, DEVICE, BUILD, ORIGINAL, VERSION]"));
}
@ -210,7 +210,7 @@ public class UserAgentProcessorFactoryTests extends ESTestCase {
config.put("field", "_field");
config.put("properties", "invalid");
ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, null, config));
OpenSearchParseException e = expectThrows(OpenSearchParseException.class, () -> factory.create(null, null, null, config));
assertThat(e.getMessage(), equalTo("[properties] property isn't a list, but of type [java.lang.String]"));
}
}

View File

@ -23,7 +23,7 @@ import org.apache.tika.exception.ZeroByteFileException;
import org.apache.tika.language.LanguageIdentifier;
import org.apache.tika.metadata.Metadata;
import org.apache.tika.metadata.TikaCoreProperties;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.ingest.AbstractProcessor;
import org.elasticsearch.ingest.IngestDocument;
@ -103,7 +103,7 @@ public final class AttachmentProcessor extends AbstractProcessor {
// tika 1.17 throws an exception when the InputStream has 0 bytes.
// previously, it did not mind. This is here to preserve that behavior.
} catch (Exception e) {
throw new ElasticsearchParseException("Error parsing document in field [{}]", e, field);
throw new OpenSearchParseException("Error parsing document in field [{}]", e, field);
}
if (properties.contains(Property.CONTENT) && Strings.hasLength(parsedContent)) {

View File

@ -19,7 +19,7 @@
package org.elasticsearch.ingest.attachment;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.test.ESTestCase;
import java.util.ArrayList;
@ -102,7 +102,7 @@ public class AttachmentProcessorFactoryTests extends ESTestCase {
try {
factory.create(null, null, null, config);
fail("exception expected");
} catch (ElasticsearchParseException e) {
} catch (OpenSearchParseException e) {
assertThat(e.getMessage(), containsString("[properties] illegal field option [invalid]"));
// ensure allowed fields are mentioned
for (AttachmentProcessor.Property property : AttachmentProcessor.Property.values()) {
@ -116,7 +116,7 @@ public class AttachmentProcessorFactoryTests extends ESTestCase {
try {
factory.create(null, null, null, config);
fail("exception expected");
} catch (ElasticsearchParseException e) {
} catch (OpenSearchParseException e) {
assertThat(e.getMessage(), equalTo("[properties] property isn't a list, but of type [java.lang.String]"));
}
}

View File

@ -20,7 +20,7 @@
package org.elasticsearch.ingest.attachment;
import org.apache.commons.io.IOUtils;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.ingest.IngestDocument;
import org.elasticsearch.ingest.Processor;
import org.elasticsearch.ingest.RandomDocumentPicks;
@ -173,7 +173,7 @@ public class AttachmentProcessorTests extends ESTestCase {
}
public void testEncryptedPdf() throws Exception {
ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> parseDocument("encrypted.pdf", processor));
OpenSearchParseException e = expectThrows(OpenSearchParseException.class, () -> parseDocument("encrypted.pdf", processor));
assertThat(e.getDetailedMessage(), containsString("document is encrypted"));
}

View File

@ -31,7 +31,7 @@ import org.apache.lucene.analysis.tokenattributes.TypeAttribute;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.index.analysis.AnalyzerScope;
import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.index.analysis.NamedAnalyzer;
@ -193,7 +193,7 @@ public class AnnotatedTextFieldMapper extends ParametrizedFieldMapper {
String[] kv = pair.split("=");
try {
if (kv.length == 2) {
throw new ElasticsearchParseException("key=value pairs are not supported in annotations");
throw new OpenSearchParseException("key=value pairs are not supported in annotations");
}
if (kv.length == 1) {
//Check "=" sign wasn't in the pair string
@ -206,7 +206,7 @@ public class AnnotatedTextFieldMapper extends ParametrizedFieldMapper {
annotations.add(new AnnotationToken(startOffset, endOffset, value));
}
} catch (UnsupportedEncodingException e) {
throw new ElasticsearchParseException("Unsupported encoding parsing annotated text", e);
throw new OpenSearchParseException("Unsupported encoding parsing annotated text", e);
}
}
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.index.mapper.annotatedtext;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapper.AnnotatedText;
import org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapper.AnnotatedText.AnnotationToken;
import org.elasticsearch.test.ESTestCase;
@ -29,7 +29,7 @@ import java.util.List;
import static org.hamcrest.Matchers.equalTo;
public class AnnotatedTextParsingTests extends ESTestCase {
private void checkParsing(String markup, String expectedPlainText, AnnotationToken... expectedTokens) {
AnnotatedText at = AnnotatedText.parse(markup);
assertEquals(expectedPlainText, at.textMinusMarkup);
@ -38,36 +38,36 @@ public class AnnotatedTextParsingTests extends ESTestCase {
for (int i = 0; i < expectedTokens.length; i++) {
assertEquals(expectedTokens[i], actualAnnotations.get(i));
}
}
}
public void testSingleValueMarkup() {
checkParsing("foo [bar](Y)", "foo bar", new AnnotationToken(4,7,"Y"));
}
}
public void testMultiValueMarkup() {
checkParsing("foo [bar](Y&B)", "foo bar", new AnnotationToken(4,7,"Y"),
checkParsing("foo [bar](Y&B)", "foo bar", new AnnotationToken(4,7,"Y"),
new AnnotationToken(4,7,"B"));
}
}
public void testBlankTextAnnotation() {
checkParsing("It sounded like this:[](theSoundOfOneHandClapping)", "It sounded like this:",
checkParsing("It sounded like this:[](theSoundOfOneHandClapping)", "It sounded like this:",
new AnnotationToken(21,21,"theSoundOfOneHandClapping"));
}
}
public void testMissingBracket() {
checkParsing("[foo](MissingEndBracket bar",
"[foo](MissingEndBracket bar", new AnnotationToken[0]);
}
public void testAnnotationWithType() {
Exception expectedException = expectThrows(ElasticsearchParseException.class,
Exception expectedException = expectThrows(OpenSearchParseException.class,
() -> checkParsing("foo [bar](type=foo) baz", "foo bar baz", new AnnotationToken(4,7, "noType")));
assertThat(expectedException.getMessage(), equalTo("key=value pairs are not supported in annotations"));
}
public void testMissingValue() {
checkParsing("[foo]() bar", "foo bar", new AnnotationToken[0]);
}
}
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.cluster.coordination;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionFuture;
import org.elasticsearch.action.ActionRequest;
@ -251,7 +251,7 @@ public class RareClusterStateIT extends ESIntegTestCase {
Object properties;
try {
properties = typeMappings.getSourceAsMap().get("properties");
} catch (ElasticsearchParseException e) {
} catch (OpenSearchParseException e) {
throw new AssertionError(e);
}
assertNotNull(properties);

View File

@ -20,7 +20,7 @@
package org.elasticsearch.ingest;
import org.elasticsearch.OpenSearchException;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.DocWriteResponse;
import org.elasticsearch.action.bulk.BulkItemResponse;
@ -269,7 +269,7 @@ public class IngestClientIT extends ESIntegTestCase {
.endArray()
.endObject());
PutPipelineRequest putPipelineRequest = new PutPipelineRequest("_id2", source, XContentType.JSON);
Exception e = expectThrows(ElasticsearchParseException.class,
Exception e = expectThrows(OpenSearchParseException.class,
() -> client().admin().cluster().putPipeline(putPipelineRequest).actionGet());
assertThat(e.getMessage(), equalTo("processor [test] doesn't support one or more provided configuration parameters [unused]"));

View File

@ -19,7 +19,7 @@
package org.elasticsearch.ingest;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.XContentType;
@ -71,7 +71,7 @@ public class IngestProcessorNotInstalledOnAllNodesIT extends ESIntegTestCase {
try {
client().admin().cluster().preparePutPipeline("_id", pipelineSource, XContentType.JSON).get();
fail("exception expected");
} catch (ElasticsearchParseException e) {
} catch (OpenSearchParseException e) {
assertThat(e.getMessage(), containsString("Processor type [test] is not installed on node"));
}
}
@ -84,7 +84,7 @@ public class IngestProcessorNotInstalledOnAllNodesIT extends ESIntegTestCase {
try {
client().admin().cluster().preparePutPipeline("_id", pipelineSource, XContentType.JSON).get();
fail("exception expected");
} catch (ElasticsearchParseException e) {
} catch (OpenSearchParseException e) {
assertThat(e.getMessage(), equalTo("No processor type exists with name [test]"));
}
}

View File

@ -799,8 +799,8 @@ public class OpenSearchException extends RuntimeException implements ToXContentF
org.elasticsearch.indices.IndexPrimaryShardNotAllocatedException::new, 33, UNKNOWN_VERSION_ADDED),
TRANSPORT_EXCEPTION(org.elasticsearch.transport.TransportException.class,
org.elasticsearch.transport.TransportException::new, 34, UNKNOWN_VERSION_ADDED),
ELASTICSEARCH_PARSE_EXCEPTION(org.elasticsearch.ElasticsearchParseException.class,
org.elasticsearch.ElasticsearchParseException::new, 35, UNKNOWN_VERSION_ADDED),
ELASTICSEARCH_PARSE_EXCEPTION(org.elasticsearch.OpenSearchParseException.class,
org.elasticsearch.OpenSearchParseException::new, 35, UNKNOWN_VERSION_ADDED),
SEARCH_EXCEPTION(org.elasticsearch.search.SearchException.class,
org.elasticsearch.search.SearchException::new, 36, UNKNOWN_VERSION_ADDED),
MAPPER_EXCEPTION(org.elasticsearch.index.mapper.MapperException.class,

View File

@ -27,17 +27,17 @@ import java.io.IOException;
/**
* Unchecked exception that is translated into a {@code 400 BAD REQUEST} error when it bubbles out over HTTP.
*/
public class ElasticsearchParseException extends OpenSearchException {
public class OpenSearchParseException extends OpenSearchException {
public ElasticsearchParseException(String msg, Object... args) {
public OpenSearchParseException(String msg, Object... args) {
super(msg, args);
}
public ElasticsearchParseException(String msg, Throwable cause, Object... args) {
public OpenSearchParseException(String msg, Throwable cause, Object... args) {
super(msg, cause, args);
}
public ElasticsearchParseException(StreamInput in) throws IOException {
public OpenSearchParseException(StreamInput in) throws IOException {
super(in);
}
@Override

View File

@ -19,7 +19,7 @@
package org.elasticsearch.action.admin.cluster.snapshots.status;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.action.support.broadcast.BroadcastShardResponse;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.ParseField;
@ -177,7 +177,7 @@ public class SnapshotIndexShardStatus extends BroadcastShardResponse implements
try {
stage = SnapshotIndexShardStage.valueOf(rawStage);
} catch (IllegalArgumentException iae) {
throw new ElasticsearchParseException(
throw new OpenSearchParseException(
"failed to parse snapshot index shard status [{}][{}], unknown stage [{}]",
shard.getIndex().getName(), shard.getId(), rawStage);
}
@ -195,7 +195,7 @@ public class SnapshotIndexShardStatus extends BroadcastShardResponse implements
try {
shard = Integer.parseInt(shardName);
} catch (NumberFormatException nfe) {
throw new ElasticsearchParseException(
throw new OpenSearchParseException(
"failed to parse snapshot index shard status [{}], expected numeric shard id but got [{}]", indexId, shardName);
}
ShardId shardId = new ShardId(new Index(indexId, IndexMetadata.INDEX_UUID_NA_VALUE), shard);

View File

@ -20,7 +20,7 @@
package org.elasticsearch.action.admin.indices.create;
import org.elasticsearch.OpenSearchGenerationException;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.IndicesRequest;
@ -339,7 +339,7 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
}
return this;
} catch(IOException e) {
throw new ElasticsearchParseException("Failed to parse aliases", e);
throw new OpenSearchParseException("Failed to parse aliases", e);
}
}
@ -397,7 +397,7 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
String name = entry.getKey();
if (SETTINGS.match(name, deprecationHandler)) {
if (entry.getValue() instanceof Map == false) {
throw new ElasticsearchParseException("key [settings] must be an object");
throw new OpenSearchParseException("key [settings] must be an object");
}
settings((Map<String, Object>) entry.getValue());
} else if (MAPPINGS.match(name, deprecationHandler)) {
@ -408,7 +408,7 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
} else if (ALIASES.match(name, deprecationHandler)) {
aliases((Map<String, Object>) entry.getValue());
} else {
throw new ElasticsearchParseException("unknown key [{}] for create index", name);
throw new OpenSearchParseException("unknown key [{}] for create index", name);
}
}
return this;

View File

@ -19,7 +19,7 @@
package org.elasticsearch.action.admin.indices.template.put;
import org.elasticsearch.OpenSearchGenerationException;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.IndicesRequest;
@ -374,7 +374,7 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
} else if (name.equals("aliases")) {
aliases((Map<String, Object>) entry.getValue());
} else {
throw new ElasticsearchParseException("unknown key [{}] in the template ", name);
throw new OpenSearchParseException("unknown key [{}] in the template ", name);
}
}
return this;
@ -453,7 +453,7 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
}
return this;
} catch(IOException e) {
throw new ElasticsearchParseException("Failed to parse aliases", e);
throw new OpenSearchParseException("Failed to parse aliases", e);
}
}

View File

@ -23,7 +23,7 @@ import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.SparseFixedBitSet;
import org.elasticsearch.Assertions;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.ResourceAlreadyExistsException;
import org.elasticsearch.Version;
@ -505,7 +505,7 @@ public class TransportBulkAction extends HandledTransportAction<BulkRequest, Bul
break;
default: throw new AssertionError("request type not supported: [" + docWriteRequest.opType() + "]");
}
} catch (ElasticsearchParseException | IllegalArgumentException | RoutingMissingException e) {
} catch (OpenSearchParseException | IllegalArgumentException | RoutingMissingException e) {
BulkItemResponse.Failure failure = new BulkItemResponse.Failure(concreteIndex.getName(), docWriteRequest.type(),
docWriteRequest.id(), e);
BulkItemResponse bulkItemResponse = new BulkItemResponse(i, docWriteRequest.opType(), failure);

View File

@ -19,7 +19,7 @@
package org.elasticsearch.action.get;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings;
@ -145,7 +145,7 @@ public class GetResponse extends ActionResponse implements Iterable<DocumentFiel
/**
* The source of the document (As a map).
*/
public Map<String, Object> getSourceAsMap() throws ElasticsearchParseException {
public Map<String, Object> getSourceAsMap() throws OpenSearchParseException {
return getResult.sourceAsMap();
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.action.get;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
@ -453,10 +453,10 @@ public class MultiGetRequest extends ActionRequest
fetchSourceContext = new FetchSourceContext(fetchSourceContext.fetchSource(),
new String[]{parser.text()}, fetchSourceContext.excludes());
} else {
throw new ElasticsearchParseException("illegal type for _source: [{}]", token);
throw new OpenSearchParseException("illegal type for _source: [{}]", token);
}
} else {
throw new ElasticsearchParseException("failed to parse multi get request. unknown field [{}]", currentFieldName);
throw new OpenSearchParseException("failed to parse multi get request. unknown field [{}]", currentFieldName);
}
} else if (token == Token.START_ARRAY) {
if (FIELDS.match(currentFieldName, parser.getDeprecationHandler())) {
@ -488,7 +488,7 @@ public class MultiGetRequest extends ActionRequest
} else if ("excludes".equals(currentFieldName) || "exclude".equals(currentFieldName)) {
currentList = excludes != null ? excludes : (excludes = new ArrayList<>(2));
} else {
throw new ElasticsearchParseException("source definition may not contain [{}]", parser.text());
throw new OpenSearchParseException("source definition may not contain [{}]", parser.text());
}
} else if (token == Token.START_ARRAY) {
while ((token = parser.nextToken()) != Token.END_ARRAY) {
@ -497,7 +497,7 @@ public class MultiGetRequest extends ActionRequest
} else if (token.isValue()) {
currentList.add(parser.text());
} else {
throw new ElasticsearchParseException("unexpected token while parsing source settings");
throw new OpenSearchParseException("unexpected token while parsing source settings");
}
}

View File

@ -18,8 +18,8 @@
*/
package org.elasticsearch.action.support;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@ -395,7 +395,7 @@ public class IndicesOptions implements ToXContentFragment {
Token token = parser.currentToken() == Token.START_OBJECT ? parser.currentToken() : parser.nextToken();
String currentFieldName = null;
if (token != Token.START_OBJECT) {
throw new ElasticsearchParseException("expected START_OBJECT as the token but was " + token);
throw new OpenSearchParseException("expected START_OBJECT as the token but was " + token);
}
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
@ -408,15 +408,15 @@ public class IndicesOptions implements ToXContentFragment {
if (token.isValue()) {
WildcardStates.updateSetForValue(wildcardStates, parser.text());
} else {
throw new ElasticsearchParseException("expected values within array for " +
throw new OpenSearchParseException("expected values within array for " +
EXPAND_WILDCARDS_FIELD.getPreferredName());
}
}
} else {
throw new ElasticsearchParseException("already parsed expand_wildcards");
throw new OpenSearchParseException("already parsed expand_wildcards");
}
} else {
throw new ElasticsearchParseException(EXPAND_WILDCARDS_FIELD.getPreferredName() +
throw new OpenSearchParseException(EXPAND_WILDCARDS_FIELD.getPreferredName() +
" is the only field that is an array in IndicesOptions");
}
} else if (token.isValue()) {
@ -425,7 +425,7 @@ public class IndicesOptions implements ToXContentFragment {
wildcardStates = EnumSet.noneOf(WildcardStates.class);
WildcardStates.updateSetForValue(wildcardStates, parser.text());
} else {
throw new ElasticsearchParseException("already parsed expand_wildcards");
throw new OpenSearchParseException("already parsed expand_wildcards");
}
} else if (IGNORE_UNAVAILABLE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
ignoreUnavailable = parser.booleanValue();
@ -434,24 +434,24 @@ public class IndicesOptions implements ToXContentFragment {
} else if (IGNORE_THROTTLED_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
ignoreThrottled = parser.booleanValue();
} else {
throw new ElasticsearchParseException("could not read indices options. unexpected index option [" +
throw new OpenSearchParseException("could not read indices options. unexpected index option [" +
currentFieldName + "]");
}
} else {
throw new ElasticsearchParseException("could not read indices options. unexpected object field [" +
throw new OpenSearchParseException("could not read indices options. unexpected object field [" +
currentFieldName + "]");
}
}
if (wildcardStates == null) {
throw new ElasticsearchParseException("indices options xcontent did not contain " + EXPAND_WILDCARDS_FIELD.getPreferredName());
throw new OpenSearchParseException("indices options xcontent did not contain " + EXPAND_WILDCARDS_FIELD.getPreferredName());
}
if (ignoreUnavailable == null) {
throw new ElasticsearchParseException("indices options xcontent did not contain " +
throw new OpenSearchParseException("indices options xcontent did not contain " +
IGNORE_UNAVAILABLE_FIELD.getPreferredName());
}
if (allowNoIndices == null) {
throw new ElasticsearchParseException("indices options xcontent did not contain " +
throw new OpenSearchParseException("indices options xcontent did not contain " +
ALLOW_NO_INDICES_FIELD.getPreferredName());
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.action.termvectors;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.CompositeIndicesRequest;
@ -128,16 +128,16 @@ public class MultiTermVectorsRequest extends ActionRequest
ids.add(parser.text());
}
} else {
throw new ElasticsearchParseException("no parameter named [{}] and type ARRAY", currentFieldName);
throw new OpenSearchParseException("no parameter named [{}] and type ARRAY", currentFieldName);
}
} else if (token == XContentParser.Token.START_OBJECT && currentFieldName != null) {
if ("parameters".equals(currentFieldName)) {
TermVectorsRequest.parseRequest(template, parser);
} else {
throw new ElasticsearchParseException("no parameter named [{}] and type OBJECT", currentFieldName);
throw new OpenSearchParseException("no parameter named [{}] and type OBJECT", currentFieldName);
}
} else if (currentFieldName != null) {
throw new ElasticsearchParseException("_mtermvectors: Parameter [{}] not supported", currentFieldName);
throw new OpenSearchParseException("_mtermvectors: Parameter [{}] not supported", currentFieldName);
}
}
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.action.termvectors;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.RealtimeRequest;
@ -590,7 +590,7 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
fields.add(parser.text());
}
} else {
throw new ElasticsearchParseException("failed to parse term vectors request. field [fields] must be an array");
throw new OpenSearchParseException("failed to parse term vectors request. field [fields] must be an array");
}
} else if (OFFSETS.match(currentFieldName, parser.getDeprecationHandler())) {
termVectorsRequest.offsets(parser.booleanValue());
@ -617,13 +617,13 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
RestTermVectorsAction.TYPES_DEPRECATION_MESSAGE);
} else if (ID.match(currentFieldName, parser.getDeprecationHandler())) {
if (termVectorsRequest.doc != null) {
throw new ElasticsearchParseException("failed to parse term vectors request. " +
throw new OpenSearchParseException("failed to parse term vectors request. " +
"either [id] or [doc] can be specified, but not both!");
}
termVectorsRequest.id = parser.text();
} else if (DOC.match(currentFieldName, parser.getDeprecationHandler())) {
if (termVectorsRequest.id != null) {
throw new ElasticsearchParseException("failed to parse term vectors request. " +
throw new OpenSearchParseException("failed to parse term vectors request. " +
"either [id] or [doc] can be specified, but not both!");
}
termVectorsRequest.doc(jsonBuilder().copyCurrentStructure(parser));
@ -634,7 +634,7 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
} else if (VERSION_TYPE.match(currentFieldName, parser.getDeprecationHandler())) {
termVectorsRequest.versionType = VersionType.fromString(parser.text());
} else {
throw new ElasticsearchParseException("failed to parse term vectors request. unknown field [{}]", currentFieldName);
throw new OpenSearchParseException("failed to parse term vectors request. unknown field [{}]", currentFieldName);
}
}
}
@ -650,7 +650,7 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
if (e.getValue() instanceof String) {
mapStrStr.put(e.getKey(), (String) e.getValue());
} else {
throw new ElasticsearchParseException("expecting the analyzer at [{}] to be a String, but found [{}] instead",
throw new OpenSearchParseException("expecting the analyzer at [{}] to be a String, but found [{}] instead",
e.getKey(), e.getValue().getClass());
}
}
@ -680,7 +680,7 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
} else if (currentFieldName.equals("max_word_length")) {
settings.maxWordLength = parser.intValue();
} else {
throw new ElasticsearchParseException("failed to parse term vectors request. " +
throw new OpenSearchParseException("failed to parse term vectors request. " +
"the field [{}] is not valid for filter parameter for term vector request", currentFieldName);
}
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.cluster.metadata;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.action.IndicesRequest;
import org.elasticsearch.action.support.IndicesOptions;
@ -1123,7 +1123,7 @@ public class IndexNameExpressionResolver {
inDateFormat = true;
inPlaceHolderSb.append(c);
} else {
throw new ElasticsearchParseException("invalid dynamic name expression [{}]." +
throw new OpenSearchParseException("invalid dynamic name expression [{}]." +
" invalid character in placeholder at position [{}]", new String(text, from, length), i);
}
break;
@ -1147,11 +1147,11 @@ public class IndexNameExpressionResolver {
timeZone = ZoneOffset.UTC;
} else {
if (inPlaceHolderString.lastIndexOf(RIGHT_BOUND) != inPlaceHolderString.length() - 1) {
throw new ElasticsearchParseException("invalid dynamic name expression [{}]. missing closing `}`" +
throw new OpenSearchParseException("invalid dynamic name expression [{}]. missing closing `}`" +
" for date math format", inPlaceHolderString);
}
if (dateTimeFormatLeftBoundIndex == inPlaceHolderString.length() - 2) {
throw new ElasticsearchParseException("invalid dynamic name expression [{}]. missing date format",
throw new OpenSearchParseException("invalid dynamic name expression [{}]. missing date format",
inPlaceHolderString);
}
mathExpression = inPlaceHolderString.substring(0, dateTimeFormatLeftBoundIndex);
@ -1194,7 +1194,7 @@ public class IndexNameExpressionResolver {
case RIGHT_BOUND:
if (!escapedChar) {
throw new ElasticsearchParseException("invalid dynamic name expression [{}]." +
throw new OpenSearchParseException("invalid dynamic name expression [{}]." +
" invalid character at position [{}]. `{` and `}` are reserved characters and" +
" should be escaped when used as part of the index name using `\\` (e.g. `\\{text\\}`)",
new String(text, from, length), i);
@ -1206,11 +1206,11 @@ public class IndexNameExpressionResolver {
}
if (inPlaceHolder) {
throw new ElasticsearchParseException("invalid dynamic name expression [{}]. date math placeholder is open ended",
throw new OpenSearchParseException("invalid dynamic name expression [{}]. date math placeholder is open ended",
new String(text, from, length));
}
if (beforePlaceHolderSb.length() == 0) {
throw new ElasticsearchParseException("nothing captured");
throw new OpenSearchParseException("nothing captured");
}
return beforePlaceHolderSb.toString();
}

View File

@ -20,7 +20,7 @@ package org.elasticsearch.cluster.metadata;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.AbstractDiffable;
import org.elasticsearch.cluster.Diff;
@ -498,7 +498,7 @@ public class IndexTemplateMetadata extends AbstractDiffable<IndexTemplateMetadat
builder.putAlias(AliasMetadata.Builder.fromXContent(parser));
}
} else {
throw new ElasticsearchParseException("unknown key [{}] for index template", currentFieldName);
throw new OpenSearchParseException("unknown key [{}] for index template", currentFieldName);
}
} else if (token == XContentParser.Token.START_ARRAY) {
if ("mappings".equals(currentFieldName)) {

View File

@ -19,7 +19,7 @@
package org.elasticsearch.cluster.metadata;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.AbstractDiffable;
import org.elasticsearch.cluster.Diff;
@ -144,7 +144,7 @@ public class MappingMetadata extends AbstractDiffable<MappingMetadata> {
/**
* Converts the serialized compressed form of the mappings into a parsed map.
*/
public Map<String, Object> sourceAsMap() throws ElasticsearchParseException {
public Map<String, Object> sourceAsMap() throws OpenSearchParseException {
Map<String, Object> mapping = XContentHelper.convertToMap(source.compressedReference(), true).v2();
if (mapping.size() == 1 && mapping.containsKey(type())) {
// the type name is the root value, reduce it
@ -156,7 +156,7 @@ public class MappingMetadata extends AbstractDiffable<MappingMetadata> {
/**
* Converts the serialized compressed form of the mappings into a parsed map.
*/
public Map<String, Object> getSourceAsMap() throws ElasticsearchParseException {
public Map<String, Object> getSourceAsMap() throws OpenSearchParseException {
return sourceAsMap();
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.cluster.metadata;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.AbstractNamedDiffable;
import org.elasticsearch.cluster.NamedDiff;
@ -186,7 +186,7 @@ public class RepositoriesMetadata extends AbstractNamedDiffable<Custom> implemen
if (token == XContentParser.Token.FIELD_NAME) {
String name = parser.currentName();
if (parser.nextToken() != XContentParser.Token.START_OBJECT) {
throw new ElasticsearchParseException("failed to parse repository [{}], expected object", name);
throw new OpenSearchParseException("failed to parse repository [{}], expected object", name);
}
String type = null;
Settings settings = Settings.EMPTY;
@ -197,38 +197,38 @@ public class RepositoriesMetadata extends AbstractNamedDiffable<Custom> implemen
String currentFieldName = parser.currentName();
if ("type".equals(currentFieldName)) {
if (parser.nextToken() != XContentParser.Token.VALUE_STRING) {
throw new ElasticsearchParseException("failed to parse repository [{}], unknown type", name);
throw new OpenSearchParseException("failed to parse repository [{}], unknown type", name);
}
type = parser.text();
} else if ("settings".equals(currentFieldName)) {
if (parser.nextToken() != XContentParser.Token.START_OBJECT) {
throw new ElasticsearchParseException("failed to parse repository [{}], incompatible params", name);
throw new OpenSearchParseException("failed to parse repository [{}], incompatible params", name);
}
settings = Settings.fromXContent(parser);
} else if ("generation".equals(currentFieldName)) {
if (parser.nextToken() != XContentParser.Token.VALUE_NUMBER) {
throw new ElasticsearchParseException("failed to parse repository [{}], unknown type", name);
throw new OpenSearchParseException("failed to parse repository [{}], unknown type", name);
}
generation = parser.longValue();
} else if ("pending_generation".equals(currentFieldName)) {
if (parser.nextToken() != XContentParser.Token.VALUE_NUMBER) {
throw new ElasticsearchParseException("failed to parse repository [{}], unknown type", name);
throw new OpenSearchParseException("failed to parse repository [{}], unknown type", name);
}
pendingGeneration = parser.longValue();
} else {
throw new ElasticsearchParseException("failed to parse repository [{}], unknown field [{}]",
throw new OpenSearchParseException("failed to parse repository [{}], unknown field [{}]",
name, currentFieldName);
}
} else {
throw new ElasticsearchParseException("failed to parse repository [{}]", name);
throw new OpenSearchParseException("failed to parse repository [{}]", name);
}
}
if (type == null) {
throw new ElasticsearchParseException("failed to parse repository [{}], missing repository type", name);
throw new OpenSearchParseException("failed to parse repository [{}], missing repository type", name);
}
repository.add(new RepositoryMetadata(name, type, settings, generation, pendingGeneration));
} else {
throw new ElasticsearchParseException("failed to parse repositories");
throw new OpenSearchParseException("failed to parse repositories");
}
}
return new RepositoriesMetadata(repository);

View File

@ -19,7 +19,7 @@
package org.elasticsearch.cluster.routing.allocation;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Setting;
@ -183,12 +183,12 @@ public class DiskThresholdSettings {
try {
doValidateAsPercentage(low, high, flood);
return; // early return so that we do not try to parse as bytes
} catch (final ElasticsearchParseException e) {
} catch (final OpenSearchParseException e) {
// swallow as we are now going to try to parse as bytes
}
try {
doValidateAsBytes(low, high, flood);
} catch (final ElasticsearchParseException e) {
} catch (final OpenSearchParseException e) {
final String message = String.format(
Locale.ROOT,
"unable to consistently parse [%s=%s], [%s=%s], and [%s=%s] as percentage or bytes",
@ -350,7 +350,7 @@ public class DiskThresholdSettings {
/**
* Attempts to parse the watermark into a percentage, returning 100.0% if it can not be parsed and the specified lenient parameter is
* true, otherwise throwing an {@link ElasticsearchParseException}.
* true, otherwise throwing an {@link OpenSearchParseException}.
*
* @param watermark the watermark to parse as a percentage
* @param lenient true if lenient parsing should be applied
@ -359,7 +359,7 @@ public class DiskThresholdSettings {
private static double thresholdPercentageFromWatermark(String watermark, boolean lenient) {
try {
return RatioValue.parseRatioValue(watermark).getAsPercent();
} catch (ElasticsearchParseException ex) {
} catch (OpenSearchParseException ex) {
// NOTE: this is not end-user leniency, since up above we check that it's a valid byte or percentage, and then store the two
// cases separately
if (lenient) {
@ -379,7 +379,7 @@ public class DiskThresholdSettings {
/**
* Attempts to parse the watermark into a {@link ByteSizeValue}, returning zero bytes if it can not be parsed and the specified lenient
* parameter is true, otherwise throwing an {@link ElasticsearchParseException}.
* parameter is true, otherwise throwing an {@link OpenSearchParseException}.
*
* @param watermark the watermark to parse as a byte size
* @param settingName the name of the setting
@ -389,7 +389,7 @@ public class DiskThresholdSettings {
private static ByteSizeValue thresholdBytesFromWatermark(String watermark, String settingName, boolean lenient) {
try {
return ByteSizeValue.parseBytesSizeValue(watermark, settingName);
} catch (ElasticsearchParseException ex) {
} catch (OpenSearchParseException ex) {
// NOTE: this is not end-user leniency, since up above we check that it's a valid byte or percentage, and then store the two
// cases separately
if (lenient) {
@ -406,10 +406,10 @@ public class DiskThresholdSettings {
private static String validWatermarkSetting(String watermark, String settingName) {
try {
RatioValue.parseRatioValue(watermark);
} catch (ElasticsearchParseException e) {
} catch (OpenSearchParseException e) {
try {
ByteSizeValue.parseBytesSizeValue(watermark, settingName);
} catch (ElasticsearchParseException ex) {
} catch (OpenSearchParseException ex) {
ex.addSuppressed(e);
throw ex;
}

View File

@ -19,8 +19,8 @@
package org.elasticsearch.cluster.routing.allocation.command;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
import org.elasticsearch.cluster.routing.allocation.RoutingExplanations;
import org.elasticsearch.common.Strings;
@ -135,20 +135,20 @@ public class AllocationCommands implements ToXContentFragment {
XContentParser.Token token = parser.currentToken();
if (token == null) {
throw new ElasticsearchParseException("No commands");
throw new OpenSearchParseException("No commands");
}
if (token == XContentParser.Token.FIELD_NAME) {
if (!parser.currentName().equals("commands")) {
throw new ElasticsearchParseException("expected field name to be named [commands], got [{}] instead", parser.currentName());
throw new OpenSearchParseException("expected field name to be named [commands], got [{}] instead", parser.currentName());
}
token = parser.nextToken();
if (token != XContentParser.Token.START_ARRAY) {
throw new ElasticsearchParseException("commands should follow with an array element");
throw new OpenSearchParseException("commands should follow with an array element");
}
} else if (token == XContentParser.Token.START_ARRAY) {
// ok...
} else {
throw new ElasticsearchParseException("expected either field name [commands], or start array, got [{}] instead", token);
throw new OpenSearchParseException("expected either field name [commands], or start array, got [{}] instead", token);
}
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token == XContentParser.Token.START_OBJECT) {
@ -159,11 +159,11 @@ public class AllocationCommands implements ToXContentFragment {
commands.add(parser.namedObject(AllocationCommand.class, commandName, null));
// move to the end object one
if (parser.nextToken() != XContentParser.Token.END_OBJECT) {
throw new ElasticsearchParseException("allocation command is malformed, done parsing a command," +
throw new OpenSearchParseException("allocation command is malformed, done parsing a command," +
" but didn't get END_OBJECT, got [{}] instead", token);
}
} else {
throw new ElasticsearchParseException("allocation command is malformed, got [{}] instead", token);
throw new OpenSearchParseException("allocation command is malformed, got [{}] instead", token);
}
}
return commands;

View File

@ -20,7 +20,7 @@
package org.elasticsearch.cluster.routing.allocation.command;
import org.apache.logging.log4j.LogManager;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.RoutingNode;
@ -193,20 +193,20 @@ public class CancelAllocationCommand implements AllocationCommand {
} else if ("allow_primary".equals(currentFieldName) || "allowPrimary".equals(currentFieldName)) {
allowPrimary = parser.booleanValue();
} else {
throw new ElasticsearchParseException("[{}] command does not support field [{}]", NAME, currentFieldName);
throw new OpenSearchParseException("[{}] command does not support field [{}]", NAME, currentFieldName);
}
} else {
throw new ElasticsearchParseException("[{}] command does not support complex json tokens [{}]", NAME, token);
throw new OpenSearchParseException("[{}] command does not support complex json tokens [{}]", NAME, token);
}
}
if (index == null) {
throw new ElasticsearchParseException("[{}] command missing the index parameter", NAME);
throw new OpenSearchParseException("[{}] command missing the index parameter", NAME);
}
if (shardId == -1) {
throw new ElasticsearchParseException("[{}] command missing the shard parameter", NAME);
throw new OpenSearchParseException("[{}] command missing the shard parameter", NAME);
}
if (nodeId == null) {
throw new ElasticsearchParseException("[{}] command missing the node parameter", NAME);
throw new OpenSearchParseException("[{}] command missing the node parameter", NAME);
}
return new CancelAllocationCommand(index, shardId, nodeId, allowPrimary);
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.cluster.routing.allocation.command;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.RoutingNode;
import org.elasticsearch.cluster.routing.ShardRouting;
@ -190,23 +190,23 @@ public class MoveAllocationCommand implements AllocationCommand {
} else if ("to_node".equals(currentFieldName) || "toNode".equals(currentFieldName)) {
toNode = parser.text();
} else {
throw new ElasticsearchParseException("[{}] command does not support field [{}]", NAME, currentFieldName);
throw new OpenSearchParseException("[{}] command does not support field [{}]", NAME, currentFieldName);
}
} else {
throw new ElasticsearchParseException("[{}] command does not support complex json tokens [{}]", NAME, token);
throw new OpenSearchParseException("[{}] command does not support complex json tokens [{}]", NAME, token);
}
}
if (index == null) {
throw new ElasticsearchParseException("[{}] command missing the index parameter", NAME);
throw new OpenSearchParseException("[{}] command missing the index parameter", NAME);
}
if (shardId == -1) {
throw new ElasticsearchParseException("[{}] command missing the shard parameter", NAME);
throw new OpenSearchParseException("[{}] command missing the shard parameter", NAME);
}
if (fromNode == null) {
throw new ElasticsearchParseException("[{}] command missing the from_node parameter", NAME);
throw new OpenSearchParseException("[{}] command missing the from_node parameter", NAME);
}
if (toNode == null) {
throw new ElasticsearchParseException("[{}] command missing the to_node parameter", NAME);
throw new OpenSearchParseException("[{}] command missing the to_node parameter", NAME);
}
return new MoveAllocationCommand(index, shardId, fromNode, toNode);
}

View File

@ -18,7 +18,7 @@
*/
package org.elasticsearch.common.geo;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@ -173,10 +173,10 @@ public class GeoBoundingBox implements ToXContentObject, Writeable {
/**
* Parses the bounding box and returns bottom, top, left, right coordinates
*/
public static GeoBoundingBox parseBoundingBox(XContentParser parser) throws IOException, ElasticsearchParseException {
public static GeoBoundingBox parseBoundingBox(XContentParser parser) throws IOException, OpenSearchParseException {
XContentParser.Token token = parser.currentToken();
if (token != XContentParser.Token.START_OBJECT) {
throw new ElasticsearchParseException("failed to parse bounding box. Expected start object but found [{}]", token);
throw new OpenSearchParseException("failed to parse bounding box. Expected start object but found [{}]", token);
}
double top = Double.NaN;
@ -196,12 +196,12 @@ public class GeoBoundingBox implements ToXContentObject, Writeable {
try {
Geometry geometry = WKT_PARSER.fromWKT(parser.text());
if (ShapeType.ENVELOPE.equals(geometry.type()) == false) {
throw new ElasticsearchParseException("failed to parse WKT bounding box. ["
throw new OpenSearchParseException("failed to parse WKT bounding box. ["
+ geometry.type() + "] found. expected [" + ShapeType.ENVELOPE + "]");
}
envelope = (Rectangle) geometry;
} catch (ParseException|IllegalArgumentException e) {
throw new ElasticsearchParseException("failed to parse WKT bounding box", e);
throw new OpenSearchParseException("failed to parse WKT bounding box", e);
}
} else if (TOP_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
top = parser.doubleValue();
@ -229,17 +229,17 @@ public class GeoBoundingBox implements ToXContentObject, Writeable {
bottom = sparse.getLat();
left = sparse.getLon();
} else {
throw new ElasticsearchParseException("failed to parse bounding box. unexpected field [{}]", currentFieldName);
throw new OpenSearchParseException("failed to parse bounding box. unexpected field [{}]", currentFieldName);
}
}
} else {
throw new ElasticsearchParseException("failed to parse bounding box. field name expected but [{}] found", token);
throw new OpenSearchParseException("failed to parse bounding box. field name expected but [{}] found", token);
}
}
if (envelope != null) {
if (Double.isNaN(top) == false || Double.isNaN(bottom) == false || Double.isNaN(left) == false ||
Double.isNaN(right) == false) {
throw new ElasticsearchParseException("failed to parse bounding box. Conflicting definition found "
throw new OpenSearchParseException("failed to parse bounding box. Conflicting definition found "
+ "using well-known text and explicit corners.");
}
GeoPoint topLeft = new GeoPoint(envelope.getMaxLat(), envelope.getMinLon());

View File

@ -20,7 +20,7 @@
package org.elasticsearch.common.geo;
import org.elasticsearch.OpenSearchException;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.geo.parsers.ShapeParser;
import org.elasticsearch.common.unit.DistanceUnit;
@ -375,10 +375,10 @@ public final class GeoJson {
}
if (shapeType == ShapeType.GEOMETRYCOLLECTION) {
if (geometries == null) {
throw new ElasticsearchParseException("geometries not included");
throw new OpenSearchParseException("geometries not included");
}
if (coordinates != null) {
throw new ElasticsearchParseException("parameter coordinates is not supported for type " + type);
throw new OpenSearchParseException("parameter coordinates is not supported for type " + type);
}
verifyNulls(type, null, orientation, radius);
return new GeometryCollection<>(geometries);
@ -386,13 +386,13 @@ public final class GeoJson {
// We expect to have coordinates for all the rest
if (coordinates == null) {
throw new ElasticsearchParseException("coordinates not included");
throw new OpenSearchParseException("coordinates not included");
}
switch (shapeType) {
case CIRCLE:
if (radius == null) {
throw new ElasticsearchParseException("radius is not specified");
throw new OpenSearchParseException("radius is not specified");
}
verifyNulls(type, geometries, orientation, null);
Point point = coordinates.asPoint();
@ -421,7 +421,7 @@ public final class GeoJson {
verifyNulls(type, geometries, orientation, radius);
return coordinates.asRectangle();
default:
throw new ElasticsearchParseException("unsupported shape type " + type);
throw new OpenSearchParseException("unsupported shape type " + type);
}
}
@ -430,13 +430,13 @@ public final class GeoJson {
*/
private static void verifyNulls(String type, List<Geometry> geometries, Boolean orientation, DistanceUnit.Distance radius) {
if (geometries != null) {
throw new ElasticsearchParseException("parameter geometries is not supported for type " + type);
throw new OpenSearchParseException("parameter geometries is not supported for type " + type);
}
if (orientation != null) {
throw new ElasticsearchParseException("parameter orientation is not supported for type " + type);
throw new OpenSearchParseException("parameter orientation is not supported for type " + type);
}
if (radius != null) {
throw new ElasticsearchParseException("parameter radius is not supported for type " + type);
throw new OpenSearchParseException("parameter radius is not supported for type " + type);
}
}
@ -459,7 +459,7 @@ public final class GeoJson {
while (token != XContentParser.Token.END_ARRAY) {
CoordinateNode node = parseCoordinates(parser);
if (nodes.isEmpty() == false && nodes.get(0).numDimensions() != node.numDimensions()) {
throw new ElasticsearchParseException("Exception parsing coordinates: number of dimensions do not match");
throw new OpenSearchParseException("Exception parsing coordinates: number of dimensions do not match");
}
nodes.add(node);
token = parser.nextToken();
@ -474,11 +474,11 @@ public final class GeoJson {
private static Point parseCoordinate(XContentParser parser) throws IOException {
// Add support for coerce here
if (parser.currentToken() != XContentParser.Token.VALUE_NUMBER) {
throw new ElasticsearchParseException("geo coordinates must be numbers");
throw new OpenSearchParseException("geo coordinates must be numbers");
}
double lon = parser.doubleValue();
if (parser.nextToken() != XContentParser.Token.VALUE_NUMBER) {
throw new ElasticsearchParseException("geo coordinates must be numbers");
throw new OpenSearchParseException("geo coordinates must be numbers");
}
double lat = parser.doubleValue();
XContentParser.Token token = parser.nextToken();
@ -490,7 +490,7 @@ public final class GeoJson {
}
// do not support > 3 dimensions
if (parser.currentToken() == XContentParser.Token.VALUE_NUMBER) {
throw new ElasticsearchParseException("geo coordinates greater than 3 dimensions are not supported");
throw new OpenSearchParseException("geo coordinates greater than 3 dimensions are not supported");
}
return new Point(lon, lat, alt);
}
@ -724,7 +724,7 @@ public final class GeoJson {
public Rectangle asRectangle() {
if (children.size() != 2) {
throw new ElasticsearchParseException(
throw new OpenSearchParseException(
"invalid number of points [{}] provided for geo_shape [{}] when expecting an array of 2 coordinates",
children.size(), ShapeType.ENVELOPE);
}

View File

@ -25,10 +25,10 @@ import org.apache.lucene.geo.GeoEncodingUtils;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.util.BitUtil;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.common.geo.GeoUtils.EffectivePoint;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.geometry.Geometry;
import org.elasticsearch.geometry.Point;
import org.elasticsearch.geometry.Rectangle;
@ -103,7 +103,7 @@ public class GeoPoint implements ToXContentFragment {
public GeoPoint resetFromCoordinates(String value, final boolean ignoreZValue) {
String[] vals = value.split(",");
if (vals.length > 3) {
throw new ElasticsearchParseException("failed to parse [{}], expected 2 or 3 coordinates "
throw new OpenSearchParseException("failed to parse [{}], expected 2 or 3 coordinates "
+ "but found: [{}]", vals.length);
}
final double lat;
@ -111,12 +111,12 @@ public class GeoPoint implements ToXContentFragment {
try {
lat = Double.parseDouble(vals[0].trim());
} catch (NumberFormatException ex) {
throw new ElasticsearchParseException("latitude must be a number");
throw new OpenSearchParseException("latitude must be a number");
}
try {
lon = Double.parseDouble(vals[1].trim());
} catch (NumberFormatException ex) {
throw new ElasticsearchParseException("longitude must be a number");
throw new OpenSearchParseException("longitude must be a number");
}
if (vals.length > 2) {
GeoPoint.assertZValue(ignoreZValue, Double.parseDouble(vals[2].trim()));
@ -130,10 +130,10 @@ public class GeoPoint implements ToXContentFragment {
geometry = new WellKnownText(false, new GeographyValidator(ignoreZValue))
.fromWKT(value);
} catch (Exception e) {
throw new ElasticsearchParseException("Invalid WKT format", e);
throw new OpenSearchParseException("Invalid WKT format", e);
}
if (geometry.type() != ShapeType.POINT) {
throw new ElasticsearchParseException("[geo_point] supports only POINT among WKT primitives, " +
throw new OpenSearchParseException("[geo_point] supports only POINT among WKT primitives, " +
"but found " + geometry.type());
}
Point point = (Point) geometry;
@ -187,7 +187,7 @@ public class GeoPoint implements ToXContentFragment {
try {
hash = Geohash.mortonEncode(geohash);
} catch (IllegalArgumentException ex) {
throw new ElasticsearchParseException(ex.getMessage(), ex);
throw new OpenSearchParseException(ex.getMessage(), ex);
}
return this.reset(Geohash.decodeLatitude(hash), Geohash.decodeLongitude(hash));
}
@ -265,7 +265,7 @@ public class GeoPoint implements ToXContentFragment {
public static double assertZValue(final boolean ignoreZValue, double zValue) {
if (ignoreZValue == false) {
throw new ElasticsearchParseException("Exception parsing coordinates: found Z value [{}] but [{}] "
throw new OpenSearchParseException("Exception parsing coordinates: found Z value [{}] but [{}] "
+ "parameter is [{}]", zValue, IGNORE_Z_VALUE, ignoreZValue);
}
return zValue;

View File

@ -18,7 +18,7 @@
*/
package org.elasticsearch.common.geo;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.common.geo.builders.CircleBuilder;
import org.elasticsearch.common.geo.builders.CoordinatesBuilder;
import org.elasticsearch.common.geo.builders.EnvelopeBuilder;
@ -56,10 +56,10 @@ public enum GeoShapeType {
@Override
CoordinateNode validate(CoordinateNode coordinates, boolean coerce) {
if (coordinates.isEmpty()) {
throw new ElasticsearchParseException(
throw new OpenSearchParseException(
"invalid number of points (0) provided when expecting a single coordinate ([lat, lng])");
} else if (coordinates.children != null) {
throw new ElasticsearchParseException("multipoint data provided when single point data expected.");
throw new OpenSearchParseException("multipoint data provided when single point data expected.");
}
return coordinates;
}
@ -80,10 +80,10 @@ public enum GeoShapeType {
CoordinateNode validate(CoordinateNode coordinates, boolean coerce) {
if (coordinates.children == null || coordinates.children.isEmpty()) {
if (coordinates.coordinate != null) {
throw new ElasticsearchParseException("single coordinate found when expecting an array of " +
throw new OpenSearchParseException("single coordinate found when expecting an array of " +
"coordinates. change type to point or change data to an array of >0 coordinates");
}
throw new ElasticsearchParseException("no data provided for multipoint object when expecting " +
throw new OpenSearchParseException("no data provided for multipoint object when expecting " +
">0 points (e.g., [[lat, lng]] or [[lat, lng], ...])");
} else {
for (CoordinateNode point : coordinates.children) {
@ -109,7 +109,7 @@ public enum GeoShapeType {
@Override
CoordinateNode validate(CoordinateNode coordinates, boolean coerce) {
if (coordinates.children.size() < 2) {
throw new ElasticsearchParseException("invalid number of points in LineString (found [{}] - must be >= 2)",
throw new OpenSearchParseException("invalid number of points in LineString (found [{}] - must be >= 2)",
coordinates.children.size());
}
return coordinates;
@ -130,7 +130,7 @@ public enum GeoShapeType {
@Override
CoordinateNode validate(CoordinateNode coordinates, boolean coerce) {
if (coordinates.children.size() < 1) {
throw new ElasticsearchParseException("invalid number of lines in MultiLineString (found [{}] - must be >= 1)",
throw new OpenSearchParseException("invalid number of lines in MultiLineString (found [{}] - must be >= 1)",
coordinates.children.size());
}
return coordinates;
@ -159,12 +159,12 @@ public enum GeoShapeType {
String error = "Invalid LinearRing found.";
error += (coordinates.coordinate == null) ?
" No coordinate array provided" : " Found a single coordinate when expecting a coordinate array";
throw new ElasticsearchParseException(error);
throw new OpenSearchParseException(error);
}
int numValidPts = coerce ? 3 : 4;
if (coordinates.children.size() < numValidPts) {
throw new ElasticsearchParseException("invalid number of points in LinearRing (found [{}] - must be >= [{}])",
throw new OpenSearchParseException("invalid number of points in LinearRing (found [{}] - must be >= [{}])",
coordinates.children.size(), numValidPts);
}
// close linear ring iff coerce is set and ring is open, otherwise throw parse exception
@ -173,7 +173,7 @@ public enum GeoShapeType {
if (coerce) {
coordinates.children.add(coordinates.children.get(0));
} else {
throw new ElasticsearchParseException("invalid LinearRing found (coordinates are not closed)");
throw new OpenSearchParseException("invalid LinearRing found (coordinates are not closed)");
}
}
}
@ -187,7 +187,7 @@ public enum GeoShapeType {
* represented as a GeoJSON geometry type, it is referred to in the Polygon geometry type definition.
*/
if (coordinates.children == null || coordinates.children.isEmpty()) {
throw new ElasticsearchParseException(
throw new OpenSearchParseException(
"invalid LinearRing provided for type polygon. Linear ring must be an array of coordinates");
}
for (CoordinateNode ring : coordinates.children) {
@ -230,7 +230,7 @@ public enum GeoShapeType {
CoordinateNode validate(CoordinateNode coordinates, boolean coerce) {
// validate the coordinate array for envelope type
if (coordinates.children.size() != 2) {
throw new ElasticsearchParseException(
throw new OpenSearchParseException(
"invalid number of points [{}] provided for geo_shape [{}] when expecting an array of 2 coordinates",
coordinates.children.size(), GeoShapeType.ENVELOPE.shapename);
}

View File

@ -22,7 +22,7 @@ package org.elasticsearch.common.geo;
import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree;
import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree;
import org.apache.lucene.util.SloppyMath;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
@ -354,12 +354,12 @@ public class GeoUtils {
* @param parser {@link XContentParser} to parse the value from
* @return new {@link GeoPoint} parsed from the parse
*/
public static GeoPoint parseGeoPoint(XContentParser parser) throws IOException, ElasticsearchParseException {
public static GeoPoint parseGeoPoint(XContentParser parser) throws IOException, OpenSearchParseException {
return parseGeoPoint(parser, new GeoPoint());
}
public static GeoPoint parseGeoPoint(XContentParser parser, GeoPoint point) throws IOException, ElasticsearchParseException {
public static GeoPoint parseGeoPoint(XContentParser parser, GeoPoint point) throws IOException, OpenSearchParseException {
return parseGeoPoint(parser, point, false);
}
@ -372,7 +372,7 @@ public class GeoUtils {
* <p>
* Array: two or more elements, the first element is longitude, the second is latitude, the rest is ignored if ignoreZValue is true
*/
public static GeoPoint parseGeoPoint(Object value, final boolean ignoreZValue) throws ElasticsearchParseException {
public static GeoPoint parseGeoPoint(Object value, final boolean ignoreZValue) throws OpenSearchParseException {
return parseGeoPoint(value, new GeoPoint(), ignoreZValue);
}
@ -385,7 +385,7 @@ public class GeoUtils {
* <p>
* Array: two or more elements, the first element is longitude, the second is latitude, the rest is ignored if ignoreZValue is true
*/
public static GeoPoint parseGeoPoint(Object value, GeoPoint point, final boolean ignoreZValue) throws ElasticsearchParseException {
public static GeoPoint parseGeoPoint(Object value, GeoPoint point, final boolean ignoreZValue) throws OpenSearchParseException {
try (XContentParser parser = new MapXContentParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE,
Collections.singletonMap("null_value", value), null)) {
parser.nextToken(); // start object
@ -393,7 +393,7 @@ public class GeoUtils {
parser.nextToken(); // field value
return parseGeoPoint(parser, point, ignoreZValue);
} catch (IOException ex) {
throw new ElasticsearchParseException("error parsing geopoint", ex);
throw new OpenSearchParseException("error parsing geopoint", ex);
}
}
@ -412,7 +412,7 @@ public class GeoUtils {
* the left bottom corner of the geohash cell is used as the geopoint coordinates.GeoBoundingBoxQueryBuilder.java
*/
public static GeoPoint parseGeoPoint(XContentParser parser, GeoPoint point, final boolean ignoreZValue)
throws IOException, ElasticsearchParseException {
throws IOException, OpenSearchParseException {
return parseGeoPoint(parser, point, ignoreZValue, EffectivePoint.BOTTOM_LEFT);
}
@ -431,7 +431,7 @@ public class GeoUtils {
* @return new {@link GeoPoint} parsed from the parse
*/
public static GeoPoint parseGeoPoint(XContentParser parser, GeoPoint point, final boolean ignoreZValue, EffectivePoint effectivePoint)
throws IOException, ElasticsearchParseException {
throws IOException, OpenSearchParseException {
double lat = Double.NaN;
double lon = Double.NaN;
String geohash = null;
@ -454,7 +454,7 @@ public class GeoUtils {
}
break;
default:
throw new ElasticsearchParseException("latitude must be a number");
throw new OpenSearchParseException("latitude must be a number");
}
} else if (LONGITUDE.equals(field)) {
subParser.nextToken();
@ -468,35 +468,35 @@ public class GeoUtils {
}
break;
default:
throw new ElasticsearchParseException("longitude must be a number");
throw new OpenSearchParseException("longitude must be a number");
}
} else if (GEOHASH.equals(field)) {
if (subParser.nextToken() == Token.VALUE_STRING) {
geohash = subParser.text();
} else {
throw new ElasticsearchParseException("geohash must be a string");
throw new OpenSearchParseException("geohash must be a string");
}
} else {
throw new ElasticsearchParseException("field must be either [{}], [{}] or [{}]", LATITUDE, LONGITUDE, GEOHASH);
throw new OpenSearchParseException("field must be either [{}], [{}] or [{}]", LATITUDE, LONGITUDE, GEOHASH);
}
} else {
throw new ElasticsearchParseException("token [{}] not allowed", subParser.currentToken());
throw new OpenSearchParseException("token [{}] not allowed", subParser.currentToken());
}
}
}
if (geohash != null) {
if(!Double.isNaN(lat) || !Double.isNaN(lon)) {
throw new ElasticsearchParseException("field must be either lat/lon or geohash");
throw new OpenSearchParseException("field must be either lat/lon or geohash");
} else {
return point.parseGeoHash(geohash, effectivePoint);
}
} else if (numberFormatException != null) {
throw new ElasticsearchParseException("[{}] and [{}] must be valid double values", numberFormatException, LATITUDE,
throw new OpenSearchParseException("[{}] and [{}] must be valid double values", numberFormatException, LATITUDE,
LONGITUDE);
} else if (Double.isNaN(lat)) {
throw new ElasticsearchParseException("field [{}] missing", LATITUDE);
throw new OpenSearchParseException("field [{}] missing", LATITUDE);
} else if (Double.isNaN(lon)) {
throw new ElasticsearchParseException("field [{}] missing", LONGITUDE);
throw new OpenSearchParseException("field [{}] missing", LONGITUDE);
} else {
return point.reset(lat, lon);
}
@ -514,10 +514,10 @@ public class GeoUtils {
} else if (element == 3) {
GeoPoint.assertZValue(ignoreZValue, subParser.doubleValue());
} else {
throw new ElasticsearchParseException("[geo_point] field type does not accept > 3 dimensions");
throw new OpenSearchParseException("[geo_point] field type does not accept > 3 dimensions");
}
} else {
throw new ElasticsearchParseException("numeric value expected");
throw new OpenSearchParseException("numeric value expected");
}
}
}
@ -526,7 +526,7 @@ public class GeoUtils {
String val = parser.text();
return point.resetFromString(val, ignoreZValue, effectivePoint);
} else {
throw new ElasticsearchParseException("geo_point expected");
throw new OpenSearchParseException("geo_point expected");
}
}
@ -555,7 +555,7 @@ public class GeoUtils {
* @param parser {@link XContentParser} to parse the value from
* @return int representing precision
*/
public static int parsePrecision(XContentParser parser) throws IOException, ElasticsearchParseException {
public static int parsePrecision(XContentParser parser) throws IOException, OpenSearchParseException {
XContentParser.Token token = parser.currentToken();
if (token.equals(XContentParser.Token.VALUE_NUMBER)) {
return XContentMapValues.nodeIntegerValue(parser.intValue());

View File

@ -19,7 +19,7 @@
package org.elasticsearch.common.geo;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentParser;
@ -87,7 +87,7 @@ public final class GeometryParser {
// We don't know the format of the original geometry - so going with default
return new GeoJsonGeometryFormat(geoJsonParser);
} else {
throw new ElasticsearchParseException("shape must be an object consisting of type and coordinates");
throw new OpenSearchParseException("shape must be an object consisting of type and coordinates");
}
}
@ -102,7 +102,7 @@ public final class GeometryParser {
* <p>
* Json structure: valid geojson definition
*/
public Geometry parseGeometry(Object value) throws ElasticsearchParseException {
public Geometry parseGeometry(Object value) throws OpenSearchParseException {
if (value instanceof List) {
List<?> values = (List<?>) value;
if (values.size() == 2 && values.get(0) instanceof Number) {
@ -129,7 +129,7 @@ public final class GeometryParser {
}
} catch (IOException | ParseException ex) {
throw new ElasticsearchParseException("error parsing geometry ", ex);
throw new OpenSearchParseException("error parsing geometry ", ex);
}
}

View File

@ -18,7 +18,7 @@
*/
package org.elasticsearch.common.geo.parsers;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoShapeType;
@ -80,7 +80,7 @@ abstract class GeoJsonParser {
subParser.nextToken();
CoordinateNode tempNode = parseCoordinates(subParser, ignoreZValue.value());
if (coordinateNode != null && tempNode.numDimensions() != coordinateNode.numDimensions()) {
throw new ElasticsearchParseException("Exception parsing coordinates: " +
throw new OpenSearchParseException("Exception parsing coordinates: " +
"number of dimensions do not match");
}
coordinateNode = tempNode;
@ -118,15 +118,15 @@ abstract class GeoJsonParser {
}
if (malformedException != null) {
throw new ElasticsearchParseException(malformedException);
throw new OpenSearchParseException(malformedException);
} else if (shapeType == null) {
throw new ElasticsearchParseException("shape type not included");
throw new OpenSearchParseException("shape type not included");
} else if (coordinateNode == null && GeoShapeType.GEOMETRYCOLLECTION != shapeType) {
throw new ElasticsearchParseException("coordinates not included");
throw new OpenSearchParseException("coordinates not included");
} else if (geometryCollections == null && GeoShapeType.GEOMETRYCOLLECTION == shapeType) {
throw new ElasticsearchParseException("geometries not included");
throw new OpenSearchParseException("geometries not included");
} else if (radius != null && GeoShapeType.CIRCLE != shapeType) {
throw new ElasticsearchParseException("field [{}] is supported for [{}] only", CircleBuilder.FIELD_RADIUS,
throw new OpenSearchParseException("field [{}] is supported for [{}] only", CircleBuilder.FIELD_RADIUS,
CircleBuilder.TYPE);
}
@ -152,7 +152,7 @@ abstract class GeoJsonParser {
if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
parser.skipChildren();
parser.nextToken();
throw new ElasticsearchParseException("coordinates cannot be specified as objects");
throw new OpenSearchParseException("coordinates cannot be specified as objects");
}
XContentParser.Token token = parser.nextToken();
@ -169,7 +169,7 @@ abstract class GeoJsonParser {
while (token != XContentParser.Token.END_ARRAY) {
CoordinateNode node = parseCoordinates(parser, ignoreZValue);
if (nodes.isEmpty() == false && nodes.get(0).numDimensions() != node.numDimensions()) {
throw new ElasticsearchParseException("Exception parsing coordinates: number of dimensions do not match");
throw new OpenSearchParseException("Exception parsing coordinates: number of dimensions do not match");
}
nodes.add(node);
token = parser.nextToken();
@ -180,11 +180,11 @@ abstract class GeoJsonParser {
private static Coordinate parseCoordinate(XContentParser parser, boolean ignoreZValue) throws IOException {
if (parser.currentToken() != XContentParser.Token.VALUE_NUMBER) {
throw new ElasticsearchParseException("geo coordinates must be numbers");
throw new OpenSearchParseException("geo coordinates must be numbers");
}
double lon = parser.doubleValue();
if (parser.nextToken() != XContentParser.Token.VALUE_NUMBER) {
throw new ElasticsearchParseException("geo coordinates must be numbers");
throw new OpenSearchParseException("geo coordinates must be numbers");
}
double lat = parser.doubleValue();
XContentParser.Token token = parser.nextToken();
@ -196,7 +196,7 @@ abstract class GeoJsonParser {
}
// do not support > 3 dimensions
if (parser.currentToken() == XContentParser.Token.VALUE_NUMBER) {
throw new ElasticsearchParseException("geo coordinates greater than 3 dimensions are not supported");
throw new OpenSearchParseException("geo coordinates greater than 3 dimensions are not supported");
}
return new Coordinate(lon, lat, alt);
}
@ -211,7 +211,7 @@ abstract class GeoJsonParser {
static GeometryCollectionBuilder parseGeometries(XContentParser parser, AbstractShapeGeometryFieldMapper mapper) throws
IOException {
if (parser.currentToken() != XContentParser.Token.START_ARRAY) {
throw new ElasticsearchParseException("geometries must be an array of geojson objects");
throw new OpenSearchParseException("geometries must be an array of geojson objects");
}
XContentParser.Token token = parser.nextToken();

View File

@ -18,7 +18,7 @@
*/
package org.elasticsearch.common.geo.parsers;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoShapeType;
@ -64,19 +64,19 @@ public class GeoWKTParser {
private GeoWKTParser() {}
public static ShapeBuilder parse(XContentParser parser, final AbstractShapeGeometryFieldMapper shapeMapper)
throws IOException, ElasticsearchParseException {
throws IOException, OpenSearchParseException {
return parseExpectedType(parser, null, shapeMapper);
}
public static ShapeBuilder parseExpectedType(XContentParser parser, final GeoShapeType shapeType)
throws IOException, ElasticsearchParseException {
throws IOException, OpenSearchParseException {
return parseExpectedType(parser, shapeType, null);
}
/** throws an exception if the parsed geometry type does not match the expected shape type */
public static ShapeBuilder parseExpectedType(XContentParser parser, final GeoShapeType shapeType,
final AbstractShapeGeometryFieldMapper shapeMapper)
throws IOException, ElasticsearchParseException {
throws IOException, OpenSearchParseException {
try (StringReader reader = new StringReader(parser.text())) {
Explicit<Boolean> ignoreZValue = (shapeMapper == null) ? AbstractShapeGeometryFieldMapper.Defaults.IGNORE_Z_VALUE :
shapeMapper.ignoreZValue();
@ -102,11 +102,11 @@ public class GeoWKTParser {
/** parse geometry from the stream tokenizer */
private static ShapeBuilder parseGeometry(StreamTokenizer stream, GeoShapeType shapeType, final boolean ignoreZValue,
final boolean coerce)
throws IOException, ElasticsearchParseException {
throws IOException, OpenSearchParseException {
final GeoShapeType type = GeoShapeType.forName(nextWord(stream));
if (shapeType != null && shapeType != GeoShapeType.GEOMETRYCOLLECTION) {
if (type.wktName().equals(shapeType.wktName()) == false) {
throw new ElasticsearchParseException("Expected geometry type [{}] but found [{}]", shapeType, type);
throw new OpenSearchParseException("Expected geometry type [{}] but found [{}]", shapeType, type);
}
}
switch (type) {
@ -131,7 +131,7 @@ public class GeoWKTParser {
}
}
private static EnvelopeBuilder parseBBox(StreamTokenizer stream) throws IOException, ElasticsearchParseException {
private static EnvelopeBuilder parseBBox(StreamTokenizer stream) throws IOException, OpenSearchParseException {
if (nextEmptyOrOpen(stream).equals(EMPTY)) {
return null;
}
@ -147,7 +147,7 @@ public class GeoWKTParser {
}
private static PointBuilder parsePoint(StreamTokenizer stream, final boolean ignoreZValue, final boolean coerce)
throws IOException, ElasticsearchParseException {
throws IOException, OpenSearchParseException {
if (nextEmptyOrOpen(stream).equals(EMPTY)) {
return null;
}
@ -160,7 +160,7 @@ public class GeoWKTParser {
}
private static List<Coordinate> parseCoordinateList(StreamTokenizer stream, final boolean ignoreZValue, final boolean coerce)
throws IOException, ElasticsearchParseException {
throws IOException, OpenSearchParseException {
CoordinatesBuilder coordinates = new CoordinatesBuilder();
boolean isOpenParen = false;
if (isNumberNext(stream) || (isOpenParen = nextWord(stream).equals(LPAREN))) {
@ -168,7 +168,7 @@ public class GeoWKTParser {
}
if (isOpenParen && nextCloser(stream).equals(RPAREN) == false) {
throw new ElasticsearchParseException("expected: [{}]" + RPAREN + " but found: [{}]" + tokenString(stream), stream.lineno());
throw new OpenSearchParseException("expected: [{}]" + RPAREN + " but found: [{}]" + tokenString(stream), stream.lineno());
}
while (nextCloserOrComma(stream).equals(COMMA)) {
@ -177,14 +177,14 @@ public class GeoWKTParser {
coordinates.coordinate(parseCoordinate(stream, ignoreZValue, coerce));
}
if (isOpenParen && nextCloser(stream).equals(RPAREN) == false) {
throw new ElasticsearchParseException("expected: " + RPAREN + " but found: " + tokenString(stream), stream.lineno());
throw new OpenSearchParseException("expected: " + RPAREN + " but found: " + tokenString(stream), stream.lineno());
}
}
return coordinates.build();
}
private static Coordinate parseCoordinate(StreamTokenizer stream, final boolean ignoreZValue, final boolean coerce)
throws IOException, ElasticsearchParseException {
throws IOException, OpenSearchParseException {
final double lon = nextNumber(stream);
final double lat = nextNumber(stream);
Double z = null;
@ -195,7 +195,7 @@ public class GeoWKTParser {
}
private static MultiPointBuilder parseMultiPoint(StreamTokenizer stream, final boolean ignoreZValue, final boolean coerce)
throws IOException, ElasticsearchParseException {
throws IOException, OpenSearchParseException {
String token = nextEmptyOrOpen(stream);
if (token.equals(EMPTY)) {
return new MultiPointBuilder();
@ -204,7 +204,7 @@ public class GeoWKTParser {
}
private static LineStringBuilder parseLine(StreamTokenizer stream, final boolean ignoreZValue, final boolean coerce)
throws IOException, ElasticsearchParseException {
throws IOException, OpenSearchParseException {
String token = nextEmptyOrOpen(stream);
if (token.equals(EMPTY)) {
return null;
@ -215,7 +215,7 @@ public class GeoWKTParser {
// A LinearRing is closed LineString with 4 or more positions. The first and last positions
// are equivalent (they represent equivalent points).
private static LineStringBuilder parseLinearRing(StreamTokenizer stream, final boolean ignoreZValue, final boolean coerce)
throws IOException, ElasticsearchParseException {
throws IOException, OpenSearchParseException {
String token = nextEmptyOrOpen(stream);
if (token.equals(EMPTY)) {
return null;
@ -227,19 +227,19 @@ public class GeoWKTParser {
if (coerce) {
coordinates.add(coordinates.get(0));
} else {
throw new ElasticsearchParseException("invalid LinearRing found (coordinates are not closed)");
throw new OpenSearchParseException("invalid LinearRing found (coordinates are not closed)");
}
}
}
if (coordinates.size() < 4) {
throw new ElasticsearchParseException("invalid number of points in LinearRing (found [{}] - must be >= 4)",
throw new OpenSearchParseException("invalid number of points in LinearRing (found [{}] - must be >= 4)",
coordinates.size());
}
return new LineStringBuilder(coordinates);
}
private static MultiLineStringBuilder parseMultiLine(StreamTokenizer stream, final boolean ignoreZValue, final boolean coerce)
throws IOException, ElasticsearchParseException {
throws IOException, OpenSearchParseException {
String token = nextEmptyOrOpen(stream);
if (token.equals(EMPTY)) {
return new MultiLineStringBuilder();
@ -253,7 +253,7 @@ public class GeoWKTParser {
}
private static PolygonBuilder parsePolygon(StreamTokenizer stream, final boolean ignoreZValue, final boolean coerce)
throws IOException, ElasticsearchParseException {
throws IOException, OpenSearchParseException {
if (nextEmptyOrOpen(stream).equals(EMPTY)) {
return null;
}
@ -266,7 +266,7 @@ public class GeoWKTParser {
}
private static MultiPolygonBuilder parseMultiPolygon(StreamTokenizer stream, final boolean ignoreZValue, final boolean coerce)
throws IOException, ElasticsearchParseException {
throws IOException, OpenSearchParseException {
if (nextEmptyOrOpen(stream).equals(EMPTY)) {
return null;
}
@ -279,7 +279,7 @@ public class GeoWKTParser {
private static GeometryCollectionBuilder parseGeometryCollection(StreamTokenizer stream, final boolean ignoreZValue,
final boolean coerce)
throws IOException, ElasticsearchParseException {
throws IOException, OpenSearchParseException {
if (nextEmptyOrOpen(stream).equals(EMPTY)) {
return null;
}
@ -292,7 +292,7 @@ public class GeoWKTParser {
}
/** next word in the stream */
private static String nextWord(StreamTokenizer stream) throws ElasticsearchParseException, IOException {
private static String nextWord(StreamTokenizer stream) throws OpenSearchParseException, IOException {
switch (stream.nextToken()) {
case StreamTokenizer.TT_WORD:
final String word = stream.sval;
@ -301,10 +301,10 @@ public class GeoWKTParser {
case ')': return RPAREN;
case ',': return COMMA;
}
throw new ElasticsearchParseException("expected word but found: " + tokenString(stream), stream.lineno());
throw new OpenSearchParseException("expected word but found: " + tokenString(stream), stream.lineno());
}
private static double nextNumber(StreamTokenizer stream) throws IOException, ElasticsearchParseException {
private static double nextNumber(StreamTokenizer stream) throws IOException, OpenSearchParseException {
if (stream.nextToken() == StreamTokenizer.TT_WORD) {
if (stream.sval.equalsIgnoreCase(NAN)) {
return Double.NaN;
@ -312,11 +312,11 @@ public class GeoWKTParser {
try {
return Double.parseDouble(stream.sval);
} catch (NumberFormatException e) {
throw new ElasticsearchParseException("invalid number found: " + stream.sval, stream.lineno());
throw new OpenSearchParseException("invalid number found: " + stream.sval, stream.lineno());
}
}
}
throw new ElasticsearchParseException("expected number but found: " + tokenString(stream), stream.lineno());
throw new OpenSearchParseException("expected number but found: " + tokenString(stream), stream.lineno());
}
private static String tokenString(StreamTokenizer stream) {
@ -335,42 +335,42 @@ public class GeoWKTParser {
return type == StreamTokenizer.TT_WORD;
}
private static String nextEmptyOrOpen(StreamTokenizer stream) throws IOException, ElasticsearchParseException {
private static String nextEmptyOrOpen(StreamTokenizer stream) throws IOException, OpenSearchParseException {
final String next = nextWord(stream);
if (next.equals(EMPTY) || next.equals(LPAREN)) {
return next;
}
throw new ElasticsearchParseException("expected " + EMPTY + " or " + LPAREN
throw new OpenSearchParseException("expected " + EMPTY + " or " + LPAREN
+ " but found: " + tokenString(stream), stream.lineno());
}
private static String nextCloser(StreamTokenizer stream) throws IOException, ElasticsearchParseException {
private static String nextCloser(StreamTokenizer stream) throws IOException, OpenSearchParseException {
if (nextWord(stream).equals(RPAREN)) {
return RPAREN;
}
throw new ElasticsearchParseException("expected " + RPAREN + " but found: " + tokenString(stream), stream.lineno());
throw new OpenSearchParseException("expected " + RPAREN + " but found: " + tokenString(stream), stream.lineno());
}
private static String nextComma(StreamTokenizer stream) throws IOException, ElasticsearchParseException {
private static String nextComma(StreamTokenizer stream) throws IOException, OpenSearchParseException {
if (nextWord(stream).equals(COMMA)) {
return COMMA;
}
throw new ElasticsearchParseException("expected " + COMMA + " but found: " + tokenString(stream), stream.lineno());
throw new OpenSearchParseException("expected " + COMMA + " but found: " + tokenString(stream), stream.lineno());
}
private static String nextCloserOrComma(StreamTokenizer stream) throws IOException, ElasticsearchParseException {
private static String nextCloserOrComma(StreamTokenizer stream) throws IOException, OpenSearchParseException {
String token = nextWord(stream);
if (token.equals(COMMA) || token.equals(RPAREN)) {
return token;
}
throw new ElasticsearchParseException("expected " + COMMA + " or " + RPAREN
throw new OpenSearchParseException("expected " + COMMA + " or " + RPAREN
+ " but found: " + tokenString(stream), stream.lineno());
}
/** next word in the stream */
private static void checkEOF(StreamTokenizer stream) throws ElasticsearchParseException, IOException {
private static void checkEOF(StreamTokenizer stream) throws OpenSearchParseException, IOException {
if (stream.nextToken() != StreamTokenizer.TT_EOF) {
throw new ElasticsearchParseException("expected end of WKT string but found additional text: "
throw new OpenSearchParseException("expected end of WKT string but found additional text: "
+ tokenString(stream), stream.lineno());
}
}

View File

@ -18,7 +18,7 @@
*/
package org.elasticsearch.common.geo.parsers;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.geo.builders.ShapeBuilder;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
@ -66,7 +66,7 @@ public interface ShapeParser {
} else if (parser.currentToken() == XContentParser.Token.VALUE_STRING) {
return GeoWKTParser.parse(parser, shapeMapper);
}
throw new ElasticsearchParseException("shape must be an object consisting of type and coordinates");
throw new OpenSearchParseException("shape must be an object consisting of type and coordinates");
}
/**

View File

@ -19,7 +19,7 @@
package org.elasticsearch.common.joda;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.common.time.DateMathParser;
import org.elasticsearch.common.time.DateUtils;
import org.joda.time.DateTimeZone;
@ -59,7 +59,7 @@ public class JodaDateMathParser implements DateMathParser {
try {
time = now.getAsLong();
} catch (Exception e) {
throw new ElasticsearchParseException("could not read the current timestamp", e);
throw new OpenSearchParseException("could not read the current timestamp", e);
}
mathString = text.substring("now".length());
} else {
@ -74,7 +74,7 @@ public class JodaDateMathParser implements DateMathParser {
return Instant.ofEpochMilli(parseMath(mathString, time, roundUp, timeZone));
}
private long parseMath(String mathString, long time, boolean roundUp, DateTimeZone timeZone) throws ElasticsearchParseException {
private long parseMath(String mathString, long time, boolean roundUp, DateTimeZone timeZone) throws OpenSearchParseException {
if (timeZone == null) {
timeZone = DateTimeZone.UTC;
}
@ -93,12 +93,12 @@ public class JodaDateMathParser implements DateMathParser {
} else if (c == '-') {
sign = -1;
} else {
throw new ElasticsearchParseException("operator not supported for date math [{}]", mathString);
throw new OpenSearchParseException("operator not supported for date math [{}]", mathString);
}
}
if (i >= mathString.length()) {
throw new ElasticsearchParseException("truncated date math [{}]", mathString);
throw new OpenSearchParseException("truncated date math [{}]", mathString);
}
final int num;
@ -110,13 +110,13 @@ public class JodaDateMathParser implements DateMathParser {
i++;
}
if (i >= mathString.length()) {
throw new ElasticsearchParseException("truncated date math [{}]", mathString);
throw new OpenSearchParseException("truncated date math [{}]", mathString);
}
num = Integer.parseInt(mathString.substring(numFrom, i));
}
if (round) {
if (num != 1) {
throw new ElasticsearchParseException("rounding `/` can only be used on single unit types [{}]", mathString);
throw new OpenSearchParseException("rounding `/` can only be used on single unit types [{}]", mathString);
}
}
char unit = mathString.charAt(i++);
@ -173,7 +173,7 @@ public class JodaDateMathParser implements DateMathParser {
}
break;
default:
throw new ElasticsearchParseException("unit [{}] not supported for date math [{}]", unit, mathString);
throw new OpenSearchParseException("unit [{}] not supported for date math [{}]", unit, mathString);
}
if (propertyToRound != null) {
if (roundUp) {
@ -212,7 +212,7 @@ public class JodaDateMathParser implements DateMathParser {
}
return date.getMillis();
} catch (IllegalArgumentException e) {
throw new ElasticsearchParseException("failed to parse date field [{}] with format [{}]", e, value,
throw new OpenSearchParseException("failed to parse date field [{}] with format [{}]", e, value,
dateTimeFormatter.pattern());
}
}

View File

@ -21,7 +21,7 @@ package org.elasticsearch.common.settings;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.OpenSearchException;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.Nullable;
@ -465,7 +465,7 @@ public class Setting<T> implements ToXContentObject {
validator.validate(parsed, map, exists(settings));
}
return parsed;
} catch (ElasticsearchParseException ex) {
} catch (OpenSearchParseException ex) {
throw new IllegalArgumentException(ex.getMessage(), ex);
} catch (NumberFormatException ex) {
String err = "Failed to parse value" + (isFiltered() ? "" : " [" + value + "]") + " for setting [" + getKey() + "]";

View File

@ -22,7 +22,7 @@ package org.elasticsearch.common.settings;
import org.apache.logging.log4j.Level;
import org.apache.lucene.util.SetOnce;
import org.elasticsearch.OpenSearchGenerationException;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.Strings;
@ -624,13 +624,13 @@ public final class Settings implements ToXContentFragment {
try {
while (!parser.isClosed() && (lastToken = parser.nextToken()) == null) ;
} catch (Exception e) {
throw new ElasticsearchParseException(
throw new OpenSearchParseException(
"malformed, expected end of settings but encountered additional content starting at line number: [{}], "
+ "column number: [{}]",
e, parser.getTokenLocation().lineNumber, parser.getTokenLocation().columnNumber);
}
if (lastToken != null) {
throw new ElasticsearchParseException(
throw new OpenSearchParseException(
"malformed, expected end of settings but encountered additional content starting at line number: [{}], "
+ "column number: [{}]",
parser.getTokenLocation().lineNumber, parser.getTokenLocation().columnNumber);
@ -687,7 +687,7 @@ public final class Settings implements ToXContentFragment {
private static void validateValue(String key, Object currentValue, XContentParser parser, boolean allowNullValues) {
if (currentValue == null && allowNullValues == false) {
throw new ElasticsearchParseException(
throw new OpenSearchParseException(
"null-valued setting found for key [{}] found at line number [{}], column number [{}]",
key,
parser.getTokenLocation().lineNumber,
@ -1107,7 +1107,7 @@ public final class Settings implements ToXContentFragment {
}
}
put(fromXContent(parser, acceptNullValues, true));
} catch (ElasticsearchParseException e) {
} catch (OpenSearchParseException e) {
throw e;
} catch (Exception e) {
throw new SettingsException("Failed to load settings from [" + resourceName + "]", e);

View File

@ -19,7 +19,7 @@
package org.elasticsearch.common.time;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.common.Strings;
import java.time.DayOfWeek;
@ -65,7 +65,7 @@ public class JavaDateMathParser implements DateMathParser {
// TODO only millisecond granularity here!
time = Instant.ofEpochMilli(now.getAsLong());
} catch (Exception e) {
throw new ElasticsearchParseException("could not read the current timestamp", e);
throw new OpenSearchParseException("could not read the current timestamp", e);
}
mathString = text.substring("now".length());
} else {
@ -81,7 +81,7 @@ public class JavaDateMathParser implements DateMathParser {
}
private Instant parseMath(final String mathString, final Instant time, final boolean roundUpProperty,
ZoneId timeZone) throws ElasticsearchParseException {
ZoneId timeZone) throws OpenSearchParseException {
if (timeZone == null) {
timeZone = ZoneOffset.UTC;
}
@ -100,12 +100,12 @@ public class JavaDateMathParser implements DateMathParser {
} else if (c == '-') {
sign = -1;
} else {
throw new ElasticsearchParseException("operator not supported for date math [{}]", mathString);
throw new OpenSearchParseException("operator not supported for date math [{}]", mathString);
}
}
if (i >= mathString.length()) {
throw new ElasticsearchParseException("truncated date math [{}]", mathString);
throw new OpenSearchParseException("truncated date math [{}]", mathString);
}
final int num;
@ -117,13 +117,13 @@ public class JavaDateMathParser implements DateMathParser {
i++;
}
if (i >= mathString.length()) {
throw new ElasticsearchParseException("truncated date math [{}]", mathString);
throw new OpenSearchParseException("truncated date math [{}]", mathString);
}
num = Integer.parseInt(mathString.substring(numFrom, i));
}
if (round) {
if (num != 1) {
throw new ElasticsearchParseException("rounding `/` can only be used on single unit types [{}]", mathString);
throw new OpenSearchParseException("rounding `/` can only be used on single unit types [{}]", mathString);
}
}
char unit = mathString.charAt(i++);
@ -200,7 +200,7 @@ public class JavaDateMathParser implements DateMathParser {
}
break;
default:
throw new ElasticsearchParseException("unit [{}] not supported for date math [{}]", unit, mathString);
throw new OpenSearchParseException("unit [{}] not supported for date math [{}]", unit, mathString);
}
if (round && roundUpProperty) {
// subtract 1 millisecond to get the largest inclusive value
@ -212,7 +212,7 @@ public class JavaDateMathParser implements DateMathParser {
private Instant parseDateTime(String value, ZoneId timeZone, boolean roundUpIfNoTime) {
if (Strings.isNullOrEmpty(value)) {
throw new ElasticsearchParseException("cannot parse empty date");
throw new OpenSearchParseException("cannot parse empty date");
}
DateFormatter formatter = roundUpIfNoTime ? this.roundupParser : this.formatter;
@ -229,7 +229,7 @@ public class JavaDateMathParser implements DateMathParser {
return DateFormatters.from(accessor).withZoneSameLocal(timeZone).toInstant();
}
} catch (IllegalArgumentException | DateTimeParseException e) {
throw new ElasticsearchParseException("failed to parse date field [{}] with format [{}]: [{}]",
throw new OpenSearchParseException("failed to parse date field [{}] with format [{}]: [{}]",
e, value, format, e.getMessage());
}
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.common.unit;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
@ -188,12 +188,12 @@ public class ByteSizeValue implements Writeable, Comparable<ByteSizeValue>, ToXC
return Strings.format1Decimals(value, suffix);
}
public static ByteSizeValue parseBytesSizeValue(String sValue, String settingName) throws ElasticsearchParseException {
public static ByteSizeValue parseBytesSizeValue(String sValue, String settingName) throws OpenSearchParseException {
return parseBytesSizeValue(sValue, null, settingName);
}
public static ByteSizeValue parseBytesSizeValue(String sValue, ByteSizeValue defaultValue, String settingName)
throws ElasticsearchParseException {
throws OpenSearchParseException {
settingName = Objects.requireNonNull(settingName);
if (sValue == null) {
return defaultValue;
@ -229,7 +229,7 @@ public class ByteSizeValue implements Writeable, Comparable<ByteSizeValue>, ToXC
return new ByteSizeValue(0, ByteSizeUnit.BYTES);
} else {
// Missing units:
throw new ElasticsearchParseException(
throw new OpenSearchParseException(
"failed to parse setting [{}] with value [{}] as a size in bytes: unit is missing or unrecognized", settingName,
sValue);
}
@ -250,11 +250,11 @@ public class ByteSizeValue implements Writeable, Comparable<ByteSizeValue>, ToXC
initialInput, settingName);
return new ByteSizeValue((long) (doubleValue * unit.toBytes(1)));
} catch (final NumberFormatException ignored) {
throw new ElasticsearchParseException("failed to parse [{}]", e, initialInput);
throw new OpenSearchParseException("failed to parse [{}]", e, initialInput);
}
}
} catch (IllegalArgumentException e) {
throw new ElasticsearchParseException("failed to parse setting [{}] with value [{}] as a size in bytes", e, settingName,
throw new OpenSearchParseException("failed to parse setting [{}] with value [{}] as a size in bytes", e, settingName,
initialInput);
}
}

View File

@ -18,7 +18,7 @@
*/
package org.elasticsearch.common.unit;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
@ -135,11 +135,11 @@ public final class Fuzziness implements ToXContentFragment, Writeable {
int highLimit = Integer.parseInt(fuzzinessLimit[1]);
return new Fuzziness("AUTO", lowerLimit, highLimit);
} catch (NumberFormatException e) {
throw new ElasticsearchParseException("failed to parse [{}] as a \"auto:int,int\"", e,
throw new OpenSearchParseException("failed to parse [{}] as a \"auto:int,int\"", e,
string);
}
} else {
throw new ElasticsearchParseException("failed to find low and high distance values");
throw new OpenSearchParseException("failed to find low and high distance values");
}
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.common.unit;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.monitor.jvm.JvmInfo;
import java.util.Objects;
@ -40,11 +40,11 @@ public enum MemorySizeValue {
try {
final double percent = Double.parseDouble(percentAsString);
if (percent < 0 || percent > 100) {
throw new ElasticsearchParseException("percentage should be in [0-100], got [{}]", percentAsString);
throw new OpenSearchParseException("percentage should be in [0-100], got [{}]", percentAsString);
}
return new ByteSizeValue((long) ((percent / 100) * JvmInfo.jvmInfo().getMem().getHeapMax().getBytes()), ByteSizeUnit.BYTES);
} catch (NumberFormatException e) {
throw new ElasticsearchParseException("failed to parse [{}] as a double", e, percentAsString);
throw new OpenSearchParseException("failed to parse [{}] as a double", e, percentAsString);
}
} else {
return parseBytesSizeValue(sValue, settingName);

View File

@ -19,7 +19,7 @@
package org.elasticsearch.common.unit;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
/**
* Utility class to represent ratio and percentage values between 0 and 100
@ -55,21 +55,21 @@ public class RatioValue {
try {
final double percent = Double.parseDouble(percentAsString);
if (percent < 0 || percent > 100) {
throw new ElasticsearchParseException("Percentage should be in [0-100], got [{}]", percentAsString);
throw new OpenSearchParseException("Percentage should be in [0-100], got [{}]", percentAsString);
}
return new RatioValue(Math.abs(percent));
} catch (NumberFormatException e) {
throw new ElasticsearchParseException("Failed to parse [{}] as a double", e, percentAsString);
throw new OpenSearchParseException("Failed to parse [{}] as a double", e, percentAsString);
}
} else {
try {
double ratio = Double.parseDouble(sValue);
if (ratio < 0 || ratio > 1.0) {
throw new ElasticsearchParseException("Ratio should be in [0-1.0], got [{}]", ratio);
throw new OpenSearchParseException("Ratio should be in [0-1.0], got [{}]", ratio);
}
return new RatioValue(100.0 * Math.abs(ratio));
} catch (NumberFormatException e) {
throw new ElasticsearchParseException("Invalid ratio or percentage [{}]", sValue);
throw new OpenSearchParseException("Invalid ratio or percentage [{}]", sValue);
}
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.common.unit;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@ -167,11 +167,11 @@ public class SizeValue implements Writeable, Comparable<SizeValue> {
return Strings.format1Decimals(value, suffix);
}
public static SizeValue parseSizeValue(String sValue) throws ElasticsearchParseException {
public static SizeValue parseSizeValue(String sValue) throws OpenSearchParseException {
return parseSizeValue(sValue, null);
}
public static SizeValue parseSizeValue(String sValue, SizeValue defaultValue) throws ElasticsearchParseException {
public static SizeValue parseSizeValue(String sValue, SizeValue defaultValue) throws OpenSearchParseException {
if (sValue == null) {
return defaultValue;
}
@ -191,7 +191,7 @@ public class SizeValue implements Writeable, Comparable<SizeValue> {
singles = Long.parseLong(sValue);
}
} catch (NumberFormatException e) {
throw new ElasticsearchParseException("failed to parse [{}]", e, sValue);
throw new OpenSearchParseException("failed to parse [{}]", e, sValue);
}
return new SizeValue(singles, SizeUnit.SINGLE);
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.common.xcontent;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
@ -92,7 +92,7 @@ public class XContentHelper {
*/
@Deprecated
public static Tuple<XContentType, Map<String, Object>> convertToMap(BytesReference bytes, boolean ordered)
throws ElasticsearchParseException {
throws OpenSearchParseException {
return convertToMap(bytes, ordered, null);
}
@ -100,7 +100,7 @@ public class XContentHelper {
* Converts the given bytes into a map that is optionally ordered. The provided {@link XContentType} must be non-null.
*/
public static Tuple<XContentType, Map<String, Object>> convertToMap(BytesReference bytes, boolean ordered, XContentType xContentType)
throws ElasticsearchParseException {
throws OpenSearchParseException {
try {
final XContentType contentType;
InputStream input;
@ -129,51 +129,51 @@ public class XContentHelper {
convertToMap(XContentFactory.xContent(contentType), stream, ordered));
}
} catch (IOException e) {
throw new ElasticsearchParseException("Failed to parse content to map", e);
throw new OpenSearchParseException("Failed to parse content to map", e);
}
}
/**
* Convert a string in some {@link XContent} format to a {@link Map}. Throws an {@link ElasticsearchParseException} if there is any
* Convert a string in some {@link XContent} format to a {@link Map}. Throws an {@link OpenSearchParseException} if there is any
* error.
*/
public static Map<String, Object> convertToMap(XContent xContent, String string, boolean ordered) throws ElasticsearchParseException {
public static Map<String, Object> convertToMap(XContent xContent, String string, boolean ordered) throws OpenSearchParseException {
// It is safe to use EMPTY here because this never uses namedObject
try (XContentParser parser = xContent.createParser(NamedXContentRegistry.EMPTY,
DeprecationHandler.THROW_UNSUPPORTED_OPERATION, string)) {
return ordered ? parser.mapOrdered() : parser.map();
} catch (IOException e) {
throw new ElasticsearchParseException("Failed to parse content to map", e);
throw new OpenSearchParseException("Failed to parse content to map", e);
}
}
/**
* Convert a string in some {@link XContent} format to a {@link Map}. Throws an {@link ElasticsearchParseException} if there is any
* Convert a string in some {@link XContent} format to a {@link Map}. Throws an {@link OpenSearchParseException} if there is any
* error. Note that unlike {@link #convertToMap(BytesReference, boolean)}, this doesn't automatically uncompress the input.
*/
public static Map<String, Object> convertToMap(XContent xContent, InputStream input, boolean ordered)
throws ElasticsearchParseException {
throws OpenSearchParseException {
// It is safe to use EMPTY here because this never uses namedObject
try (XContentParser parser = xContent.createParser(NamedXContentRegistry.EMPTY,
DeprecationHandler.THROW_UNSUPPORTED_OPERATION, input)) {
return ordered ? parser.mapOrdered() : parser.map();
} catch (IOException e) {
throw new ElasticsearchParseException("Failed to parse content to map", e);
throw new OpenSearchParseException("Failed to parse content to map", e);
}
}
/**
* Convert a byte array in some {@link XContent} format to a {@link Map}. Throws an {@link ElasticsearchParseException} if there is any
* Convert a byte array in some {@link XContent} format to a {@link Map}. Throws an {@link OpenSearchParseException} if there is any
* error. Note that unlike {@link #convertToMap(BytesReference, boolean)}, this doesn't automatically uncompress the input.
*/
public static Map<String, Object> convertToMap(XContent xContent, byte[] bytes, int offset, int length, boolean ordered)
throws ElasticsearchParseException {
throws OpenSearchParseException {
// It is safe to use EMPTY here because this never uses namedObject
try (XContentParser parser = xContent.createParser(NamedXContentRegistry.EMPTY,
DeprecationHandler.THROW_UNSUPPORTED_OPERATION, bytes, offset, length)) {
return ordered ? parser.mapOrdered() : parser.map();
} catch (IOException e) {
throw new ElasticsearchParseException("Failed to parse content to map", e);
throw new OpenSearchParseException("Failed to parse content to map", e);
}
}

View File

@ -23,7 +23,7 @@ import org.apache.lucene.util.automaton.Automata;
import org.apache.lucene.util.automaton.Automaton;
import org.apache.lucene.util.automaton.CharacterRunAutomaton;
import org.apache.lucene.util.automaton.Operations;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.Numbers;
import org.elasticsearch.common.Strings;
@ -505,7 +505,7 @@ public class XContentMapValues {
if (node instanceof Map) {
return (Map<String, Object>) node;
} else {
throw new ElasticsearchParseException(desc + " should be a hash but was of type: " + node.getClass());
throw new OpenSearchParseException(desc + " should be a hash but was of type: " + node.getClass());
}
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.index.get;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
@ -200,7 +200,7 @@ public class GetResult implements Writeable, Iterable<DocumentField>, ToXContent
this.source = CompressorFactory.uncompressIfNeeded(this.source);
return this.source;
} catch (IOException e) {
throw new ElasticsearchParseException("failed to decompress source", e);
throw new OpenSearchParseException("failed to decompress source", e);
}
}
@ -229,14 +229,14 @@ public class GetResult implements Writeable, Iterable<DocumentField>, ToXContent
try {
return XContentHelper.convertToJson(source, false);
} catch (IOException e) {
throw new ElasticsearchParseException("failed to convert source to a json string");
throw new OpenSearchParseException("failed to convert source to a json string");
}
}
/**
* The source of the document (As a map).
*/
public Map<String, Object> sourceAsMap() throws ElasticsearchParseException {
public Map<String, Object> sourceAsMap() throws OpenSearchParseException {
if (source == null) {
return null;
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.index.mapper;
import org.apache.lucene.document.FieldType;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.common.CheckedBiFunction;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.ParseField;
@ -220,7 +220,7 @@ public abstract class AbstractPointGeometryFieldMapper<Parsed, Processed> extend
if (token == XContentParser.Token.VALUE_NUMBER) {
GeoPoint.assertZValue(ignoreZValue, parser.doubleValue());
} else if (token != XContentParser.Token.END_ARRAY) {
throw new ElasticsearchParseException("field type does not accept > 3 dimensions");
throw new OpenSearchParseException("field type does not accept > 3 dimensions");
}
point.resetCoords(x, y);

View File

@ -28,7 +28,7 @@ import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.IndexOrDocValuesQuery;
import org.apache.lucene.search.IndexSortSortedNumericDocValuesRangeQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.geo.ShapeRelation;
@ -611,7 +611,7 @@ public final class DateFieldMapper extends ParametrizedFieldMapper {
} else {
try {
timestamp = fieldType().parse(dateAsString);
} catch (IllegalArgumentException | ElasticsearchParseException | DateTimeException | ArithmeticException e) {
} catch (IllegalArgumentException | OpenSearchParseException | DateTimeException | ArithmeticException e) {
if (ignoreMalformed) {
context.addIgnoredField(mappedFieldType.name());
return;

View File

@ -21,7 +21,7 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Tuple;
@ -685,7 +685,7 @@ final class DocumentParser {
for (DateFormatter dateTimeFormatter : context.root().dynamicDateTimeFormatters()) {
try {
dateTimeFormatter.parse(text);
} catch (ElasticsearchParseException | DateTimeParseException | IllegalArgumentException e) {
} catch (OpenSearchParseException | DateTimeParseException | IllegalArgumentException e) {
// failure to parse this, continue
continue;
}

View File

@ -25,7 +25,7 @@ import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.Query;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils;
@ -224,7 +224,7 @@ public class GeoPointFieldMapper extends AbstractPointGeometryFieldMapper<List<P
if (isNormalizable(lat()) && isNormalizable(lon())) {
GeoUtils.normalizePoint(this);
} else {
throw new ElasticsearchParseException("cannot normalize the point - not a number");
throw new OpenSearchParseException("cannot normalize the point - not a number");
}
}

View File

@ -28,7 +28,7 @@ import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree;
import org.apache.lucene.spatial.prefix.tree.PackedQuadPrefixTree;
import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree;
import org.apache.lucene.spatial.prefix.tree.SpatialPrefixTree;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.ParseField;
@ -137,7 +137,7 @@ public class LegacyGeoShapeFieldMapper extends AbstractShapeGeometryFieldMapper<
public void setPointsOnly(boolean pointsOnly) {
if (this.strategy == SpatialStrategy.TERM && pointsOnly == false) {
throw new ElasticsearchParseException("points_only cannot be set to false for term strategy");
throw new OpenSearchParseException("points_only cannot be set to false for term strategy");
}
this.pointsOnly = pointsOnly;
}
@ -174,7 +174,7 @@ public class LegacyGeoShapeFieldMapper extends AbstractShapeGeometryFieldMapper<
private static void checkPrefixTreeSupport(String fieldName) {
if (ShapesAvailability.JTS_AVAILABLE == false || ShapesAvailability.SPATIAL4J_AVAILABLE == false) {
throw new ElasticsearchParseException("Field parameter [{}] is not supported for [{}] field type",
throw new OpenSearchParseException("Field parameter [{}] is not supported for [{}] field type",
fieldName, CONTENT_TYPE);
}
DEPRECATION_LOGGER.deprecate("geo_mapper_field_parameter",

View File

@ -38,7 +38,7 @@ import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper;
import org.apache.lucene.search.spans.SpanQuery;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.geo.ShapeRelation;
import org.elasticsearch.common.time.DateMathParser;
@ -192,7 +192,7 @@ public abstract class MappedFieldType {
* boosted by {@link #boost()}.
* @throws IllegalArgumentException if {@code value} cannot be converted to the expected data type or if the field is not searchable
* due to the way it is configured (eg. not indexed)
* @throws ElasticsearchParseException if {@code value} cannot be converted to the expected data type
* @throws OpenSearchParseException if {@code value} cannot be converted to the expected data type
* @throws UnsupportedOperationException if the field is not searchable regardless of options
* @throws QueryShardException if the field is not searchable regardless of options
*/

View File

@ -23,7 +23,7 @@ import org.apache.lucene.index.Term;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.collect.CopyOnWriteHashMap;
@ -231,7 +231,7 @@ public class ObjectMapper extends Mapper implements Cloneable {
if (fieldNode instanceof Collection && ((Collection) fieldNode).isEmpty()) {
// nothing to do here, empty (to support "properties: []" case)
} else if (!(fieldNode instanceof Map)) {
throw new ElasticsearchParseException("properties must be a map type");
throw new OpenSearchParseException("properties must be a map type");
} else {
parseProperties(builder, (Map<String, Object>) fieldNode, parserContext);
}

View File

@ -20,7 +20,7 @@
package org.elasticsearch.index.mapper;
import org.apache.lucene.index.IndexOptions;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
@ -224,7 +224,7 @@ public class TypeParsers {
} else if (INDEX_OPTIONS_DOCS.equalsIgnoreCase(value)) {
return IndexOptions.DOCS;
} else {
throw new ElasticsearchParseException("failed to parse index option [{}]", value);
throw new OpenSearchParseException("failed to parse index option [{}]", value);
}
}

View File

@ -24,7 +24,7 @@ import org.apache.lucene.document.LatLonPoint;
import org.apache.lucene.search.IndexOrDocValuesQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.common.Numbers;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
@ -387,7 +387,7 @@ public class GeoBoundingBoxQueryBuilder extends AbstractQueryBuilder<GeoBounding
bbox = GeoBoundingBox.parseBoundingBox(parser);
fieldName = currentFieldName;
} catch (Exception e) {
throw new ElasticsearchParseException("failed to parse [{}] query. [{}]", NAME, e.getMessage());
throw new OpenSearchParseException("failed to parse [{}] query. [{}]", NAME, e.getMessage());
}
} else if (token.isValue()) {
if (AbstractQueryBuilder.NAME_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
@ -408,7 +408,7 @@ public class GeoBoundingBoxQueryBuilder extends AbstractQueryBuilder<GeoBounding
}
if (bbox == null) {
throw new ElasticsearchParseException("failed to parse [{}] query. bounding box not provided", NAME);
throw new OpenSearchParseException("failed to parse [{}] query. bounding box not provided", NAME);
}
GeoBoundingBoxQueryBuilder builder = new GeoBoundingBoxQueryBuilder(fieldName);

View File

@ -24,7 +24,7 @@ import org.apache.lucene.index.Fields;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.RoutingMissingException;
import org.elasticsearch.action.termvectors.MultiTermVectorsItemResponse;
@ -426,7 +426,7 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
}
item.fields(fields.toArray(new String[fields.size()]));
} else {
throw new ElasticsearchParseException(
throw new OpenSearchParseException(
"failed to parse More Like This item. field [fields] must be an array");
}
} else if (PER_FIELD_ANALYZER.match(currentFieldName, parser.getDeprecationHandler())) {
@ -438,17 +438,17 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
} else if (VERSION_TYPE.match(currentFieldName, parser.getDeprecationHandler())) {
item.versionType = VersionType.fromString(parser.text());
} else {
throw new ElasticsearchParseException(
throw new OpenSearchParseException(
"failed to parse More Like This item. unknown field [{}]", currentFieldName);
}
}
}
if (item.id != null && item.doc != null) {
throw new ElasticsearchParseException(
throw new OpenSearchParseException(
"failed to parse More Like This item. either [id] or [doc] can be specified, but not both!");
}
if (item.id == null && item.doc == null) {
throw new ElasticsearchParseException(
throw new OpenSearchParseException(
"failed to parse More Like This item. neither [id] nor [doc] is specified!");
}
return item;

View File

@ -21,7 +21,7 @@ package org.elasticsearch.index.query;
import org.apache.lucene.search.FuzzyQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
@ -172,7 +172,7 @@ public class MultiMatchQueryBuilder extends AbstractQueryBuilder<MultiMatchQuery
}
}
if (type == null) {
throw new ElasticsearchParseException("failed to parse [{}] query type [{}]. unknown type.", NAME, value);
throw new OpenSearchParseException("failed to parse [{}] query type [{}]. unknown type.", NAME, value);
}
return type;
}

View File

@ -21,7 +21,7 @@ package org.elasticsearch.index.query.functionscore;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.Explanation;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.geo.GeoDistance;
@ -247,11 +247,11 @@ public abstract class DecayFunctionBuilder<DFB extends DecayFunctionBuilder<DFB>
} else if (DecayFunctionBuilder.OFFSET.equals(parameterName)) {
offset = parser.doubleValue();
} else {
throw new ElasticsearchParseException("parameter [{}] not supported!", parameterName);
throw new OpenSearchParseException("parameter [{}] not supported!", parameterName);
}
}
if (!scaleFound || !refFound) {
throw new ElasticsearchParseException("both [{}] and [{}] must be set for numeric fields.", DecayFunctionBuilder.SCALE,
throw new OpenSearchParseException("both [{}] and [{}] must be set for numeric fields.", DecayFunctionBuilder.SCALE,
DecayFunctionBuilder.ORIGIN);
}
IndexNumericFieldData numericFieldData = context.getForField(fieldType);
@ -278,11 +278,11 @@ public abstract class DecayFunctionBuilder<DFB extends DecayFunctionBuilder<DFB>
} else if (DecayFunctionBuilder.OFFSET.equals(parameterName)) {
offsetString = parser.text();
} else {
throw new ElasticsearchParseException("parameter [{}] not supported!", parameterName);
throw new OpenSearchParseException("parameter [{}] not supported!", parameterName);
}
}
if (origin == null || scaleString == null) {
throw new ElasticsearchParseException("[{}] and [{}] must be set for geo fields.", DecayFunctionBuilder.ORIGIN,
throw new OpenSearchParseException("[{}] and [{}] must be set for geo fields.", DecayFunctionBuilder.ORIGIN,
DecayFunctionBuilder.SCALE);
}
double scale = DistanceUnit.DEFAULT.parse(scaleString, DistanceUnit.DEFAULT);
@ -312,7 +312,7 @@ public abstract class DecayFunctionBuilder<DFB extends DecayFunctionBuilder<DFB>
} else if (DecayFunctionBuilder.OFFSET.equals(parameterName)) {
offsetString = parser.text();
} else {
throw new ElasticsearchParseException("parameter [{}] not supported!", parameterName);
throw new OpenSearchParseException("parameter [{}] not supported!", parameterName);
}
}
long origin;
@ -323,7 +323,7 @@ public abstract class DecayFunctionBuilder<DFB extends DecayFunctionBuilder<DFB>
}
if (scaleString == null) {
throw new ElasticsearchParseException("[{}] must be set for date fields.", DecayFunctionBuilder.SCALE);
throw new OpenSearchParseException("[{}] must be set for date fields.", DecayFunctionBuilder.SCALE);
}
TimeValue val = TimeValue.parseTimeValue(scaleString, TimeValue.timeValueHours(24),
DecayFunctionParser.class.getSimpleName() + ".scale");

View File

@ -20,7 +20,7 @@
package org.elasticsearch.index.reindex;
import org.elasticsearch.OpenSearchException;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.bulk.BulkItemResponse.Failure;
import org.elasticsearch.common.xcontent.ObjectParser;
@ -291,7 +291,7 @@ public class BulkByScrollResponse extends ActionResponse implements ToXContentFr
return new SearchFailure(searchExc, index, shardId, nodeId, RestStatus.fromCode(status));
}
} else {
throw new ElasticsearchParseException("failed to parse failures array. At least one of {reason,cause} must be present");
throw new OpenSearchParseException("failed to parse failures array. At least one of {reason,cause} must be present");
}
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.index.search;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.index.mapper.MappedFieldType;
@ -141,7 +141,7 @@ public final class QueryParserHelper {
} catch (QueryShardException | UnsupportedOperationException e) {
// field type is never searchable with term queries (eg. geo point): ignore
continue;
} catch (IllegalArgumentException | ElasticsearchParseException e) {
} catch (IllegalArgumentException | OpenSearchParseException e) {
// other exceptions are parsing errors or not indexed fields: keep
}
}

View File

@ -21,7 +21,7 @@ package org.elasticsearch.index.snapshots.blobstore;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.Version;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.lucene.Lucene;
@ -308,28 +308,28 @@ public class BlobStoreIndexShardSnapshot implements ToXContentFragment {
metaHash.offset = 0;
metaHash.length = metaHash.bytes.length;
} else {
throw new ElasticsearchParseException("unknown parameter [{}]", currentFieldName);
throw new OpenSearchParseException("unknown parameter [{}]", currentFieldName);
}
} else {
throw new ElasticsearchParseException("unexpected token [{}]", token);
throw new OpenSearchParseException("unexpected token [{}]", token);
}
} else {
throw new ElasticsearchParseException("unexpected token [{}]",token);
throw new OpenSearchParseException("unexpected token [{}]",token);
}
}
}
// Verify that file information is complete
if (name == null || Strings.validFileName(name) == false) {
throw new ElasticsearchParseException("missing or invalid file name [" + name + "]");
throw new OpenSearchParseException("missing or invalid file name [" + name + "]");
} else if (physicalName == null || Strings.validFileName(physicalName) == false) {
throw new ElasticsearchParseException("missing or invalid physical file name [" + physicalName + "]");
throw new OpenSearchParseException("missing or invalid physical file name [" + physicalName + "]");
} else if (length < 0) {
throw new ElasticsearchParseException("missing or invalid file length");
throw new OpenSearchParseException("missing or invalid file length");
} else if (writtenBy == null) {
throw new ElasticsearchParseException("missing or invalid written_by [" + writtenByStr + "]");
throw new OpenSearchParseException("missing or invalid written_by [" + writtenByStr + "]");
} else if (checksum == null) {
throw new ElasticsearchParseException("missing checksum for name [" + name + "]");
throw new OpenSearchParseException("missing checksum for name [" + name + "]");
}
return new FileInfo(name, new StoreFileMetadata(physicalName, length, checksum, writtenBy, metaHash), partSize);
}
@ -466,7 +466,7 @@ public class BlobStoreIndexShardSnapshot implements ToXContentFragment {
private static final String TIME = "time";
private static final String FILES = "files";
// for the sake of BWC keep the actual property names as in 6.x
// + there is a constraint in #fromXContent() that leads to ElasticsearchParseException("unknown parameter [incremental_file_count]");
// + there is a constraint in #fromXContent() that leads to OpenSearchParseException("unknown parameter [incremental_file_count]");
private static final String INCREMENTAL_FILE_COUNT = "number_of_files";
private static final String INCREMENTAL_SIZE = "total_size";
@ -540,7 +540,7 @@ public class BlobStoreIndexShardSnapshot implements ToXContentFragment {
} else if (PARSE_INCREMENTAL_SIZE.match(currentFieldName, parser.getDeprecationHandler())) {
incrementalSize = parser.longValue();
} else {
throw new ElasticsearchParseException("unknown parameter [{}]", currentFieldName);
throw new OpenSearchParseException("unknown parameter [{}]", currentFieldName);
}
} else if (token == XContentParser.Token.START_ARRAY) {
if (PARSE_FILES.match(currentFieldName, parser.getDeprecationHandler())) {
@ -548,10 +548,10 @@ public class BlobStoreIndexShardSnapshot implements ToXContentFragment {
indexFiles.add(FileInfo.fromXContent(parser));
}
} else {
throw new ElasticsearchParseException("unknown parameter [{}]", currentFieldName);
throw new OpenSearchParseException("unknown parameter [{}]", currentFieldName);
}
} else {
throw new ElasticsearchParseException("unexpected token [{}]", token);
throw new OpenSearchParseException("unexpected token [{}]", token);
}
}
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.index.snapshots.blobstore;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -267,7 +267,7 @@ public class BlobStoreIndexShardSnapshots implements Iterable<SnapshotFiles>, To
token = parser.nextToken();
if (token == XContentParser.Token.START_ARRAY) {
if (ParseFields.FILES.match(currentFieldName, parser.getDeprecationHandler()) == false) {
throw new ElasticsearchParseException("unknown array [{}]", currentFieldName);
throw new OpenSearchParseException("unknown array [{}]", currentFieldName);
}
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
FileInfo fileInfo = FileInfo.fromXContent(parser);
@ -275,7 +275,7 @@ public class BlobStoreIndexShardSnapshots implements Iterable<SnapshotFiles>, To
}
} else if (token == XContentParser.Token.START_OBJECT) {
if (ParseFields.SNAPSHOTS.match(currentFieldName, parser.getDeprecationHandler()) == false) {
throw new ElasticsearchParseException("unknown object [{}]", currentFieldName);
throw new OpenSearchParseException("unknown object [{}]", currentFieldName);
}
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser);
@ -299,7 +299,7 @@ public class BlobStoreIndexShardSnapshots implements Iterable<SnapshotFiles>, To
}
}
} else {
throw new ElasticsearchParseException("unexpected token [{}]", token);
throw new OpenSearchParseException("unexpected token [{}]", token);
}
}
}

View File

@ -21,9 +21,9 @@ package org.elasticsearch.ingest;
import java.io.IOException;
import java.io.InputStream;
import org.elasticsearch.OpenSearchException;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.OpenSearchException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
@ -56,7 +56,7 @@ public final class ConfigurationUtils {
/**
* Returns and removes the specified optional property from the specified configuration map.
*
* If the property value isn't of type string a {@link ElasticsearchParseException} is thrown.
* If the property value isn't of type string a {@link OpenSearchParseException} is thrown.
*/
public static String readOptionalStringProperty(String processorType, String processorTag,
Map<String, Object> configuration, String propertyName) {
@ -67,8 +67,8 @@ public final class ConfigurationUtils {
/**
* Returns and removes the specified property from the specified configuration map.
*
* If the property value isn't of type string an {@link ElasticsearchParseException} is thrown.
* If the property is missing an {@link ElasticsearchParseException} is thrown
* If the property value isn't of type string an {@link OpenSearchParseException} is thrown.
* If the property is missing an {@link OpenSearchParseException} is thrown
*/
public static String readStringProperty(String processorType, String processorTag, Map<String, Object> configuration,
String propertyName) {
@ -78,8 +78,8 @@ public final class ConfigurationUtils {
/**
* Returns and removes the specified property from the specified configuration map.
*
* If the property value isn't of type string a {@link ElasticsearchParseException} is thrown.
* If the property is missing and no default value has been specified a {@link ElasticsearchParseException} is thrown
* If the property value isn't of type string a {@link OpenSearchParseException} is thrown.
* If the property is missing and no default value has been specified a {@link OpenSearchParseException} is thrown
*/
public static String readStringProperty(String processorType, String processorTag, Map<String, Object> configuration,
String propertyName, String defaultValue) {
@ -106,8 +106,8 @@ public final class ConfigurationUtils {
/**
* Returns and removes the specified property from the specified configuration map.
*
* If the property value isn't of type string or int a {@link ElasticsearchParseException} is thrown.
* If the property is missing and no default value has been specified a {@link ElasticsearchParseException} is thrown
* If the property value isn't of type string or int a {@link OpenSearchParseException} is thrown.
* If the property is missing and no default value has been specified a {@link OpenSearchParseException} is thrown
*/
public static String readStringOrIntProperty(String processorType, String processorTag,
Map<String, Object> configuration, String propertyName, String defaultValue) {
@ -138,7 +138,7 @@ public final class ConfigurationUtils {
/**
* Returns and removes the specified property from the specified configuration map.
*
* If the property value isn't of type string or int a {@link ElasticsearchParseException} is thrown.
* If the property value isn't of type string or int a {@link OpenSearchParseException} is thrown.
*/
public static String readOptionalStringOrIntProperty(String processorType, String processorTag,
Map<String, Object> configuration, String propertyName) {
@ -173,8 +173,8 @@ public final class ConfigurationUtils {
/**
* Returns and removes the specified property from the specified configuration map.
*
* If the property value isn't of type int a {@link ElasticsearchParseException} is thrown.
* If the property is missing an {@link ElasticsearchParseException} is thrown
* If the property value isn't of type int a {@link OpenSearchParseException} is thrown.
* If the property is missing an {@link OpenSearchParseException} is thrown
*/
public static Integer readIntProperty(String processorType, String processorTag, Map<String, Object> configuration,
String propertyName, Integer defaultValue) {
@ -193,8 +193,8 @@ public final class ConfigurationUtils {
/**
* Returns and removes the specified property from the specified configuration map.
*
* If the property value isn't of type int a {@link ElasticsearchParseException} is thrown.
* If the property is missing an {@link ElasticsearchParseException} is thrown
* If the property value isn't of type int a {@link OpenSearchParseException} is thrown.
* If the property is missing an {@link OpenSearchParseException} is thrown
*/
public static Double readDoubleProperty(String processorType, String processorTag, Map<String, Object> configuration,
String propertyName) {
@ -213,7 +213,7 @@ public final class ConfigurationUtils {
/**
* Returns and removes the specified property of type list from the specified configuration map.
*
* If the property value isn't of type list an {@link ElasticsearchParseException} is thrown.
* If the property value isn't of type list an {@link OpenSearchParseException} is thrown.
*/
public static <T> List<T> readOptionalList(String processorType, String processorTag, Map<String, Object> configuration,
String propertyName) {
@ -227,8 +227,8 @@ public final class ConfigurationUtils {
/**
* Returns and removes the specified property of type list from the specified configuration map.
*
* If the property value isn't of type list an {@link ElasticsearchParseException} is thrown.
* If the property is missing an {@link ElasticsearchParseException} is thrown
* If the property value isn't of type list an {@link OpenSearchParseException} is thrown.
* If the property is missing an {@link OpenSearchParseException} is thrown
*/
public static <T> List<T> readList(String processorType, String processorTag, Map<String, Object> configuration,
String propertyName) {
@ -254,8 +254,8 @@ public final class ConfigurationUtils {
/**
* Returns and removes the specified property of type map from the specified configuration map.
*
* If the property value isn't of type map an {@link ElasticsearchParseException} is thrown.
* If the property is missing an {@link ElasticsearchParseException} is thrown
* If the property value isn't of type map an {@link OpenSearchParseException} is thrown.
* If the property is missing an {@link OpenSearchParseException} is thrown
*/
public static <T> Map<String, T> readMap(String processorType, String processorTag, Map<String, Object> configuration,
String propertyName) {
@ -270,7 +270,7 @@ public final class ConfigurationUtils {
/**
* Returns and removes the specified property of type map from the specified configuration map.
*
* If the property value isn't of type map an {@link ElasticsearchParseException} is thrown.
* If the property value isn't of type map an {@link OpenSearchParseException} is thrown.
*/
public static <T> Map<String, T> readOptionalMap(String processorType, String processorTag, Map<String, Object> configuration,
String propertyName) {
@ -313,7 +313,7 @@ public final class ConfigurationUtils {
} else {
msg = "[" + propertyName + "] " + reason;
}
ElasticsearchParseException exception = new ElasticsearchParseException(msg);
OpenSearchParseException exception = new OpenSearchParseException(msg);
addMetadataToException(exception, processorType, processorTag, propertyName);
return exception;
}
@ -429,7 +429,7 @@ public final class ConfigurationUtils {
try {
Processor processor = factory.create(processorFactories, tag, description, config);
if (config.isEmpty() == false) {
throw new ElasticsearchParseException("processor [{}] doesn't support one or more provided configuration parameters {}",
throw new OpenSearchParseException("processor [{}] doesn't support one or more provided configuration parameters {}",
type, Arrays.toString(config.keySet().toArray()));
}
if (onFailureProcessors.size() > 0 || ignoreFailure) {

View File

@ -22,7 +22,7 @@ package org.elasticsearch.ingest;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.ResourceNotFoundException;
import org.elasticsearch.action.ActionListener;
@ -708,7 +708,7 @@ public class IngestService implements ClusterStateApplier, ReportingService<Inge
try {
innerUpdatePipelines(newIngestMetadata);
} catch (ElasticsearchParseException e) {
} catch (OpenSearchParseException e) {
logger.warn("failed to update ingest pipelines", e);
}
}
@ -718,7 +718,7 @@ public class IngestService implements ClusterStateApplier, ReportingService<Inge
// Lazy initialize these variables in order to favour the most like scenario that there are no pipeline changes:
Map<String, PipelineHolder> newPipelines = null;
List<ElasticsearchParseException> exceptions = null;
List<OpenSearchParseException> exceptions = null;
// Iterate over pipeline configurations in ingest metadata and constructs a new pipeline if there is no pipeline
// or the pipeline configuration has been modified
for (PipelineConfiguration newConfiguration : newIngestMetadata.getPipelines().values()) {
@ -765,7 +765,7 @@ public class IngestService implements ClusterStateApplier, ReportingService<Inge
}
}
}
} catch (ElasticsearchParseException e) {
} catch (OpenSearchParseException e) {
Pipeline pipeline = substitutePipeline(newConfiguration.getId(), e);
newPipelines.put(newConfiguration.getId(), new PipelineHolder(newConfiguration, pipeline));
if (exceptions == null) {
@ -773,7 +773,7 @@ public class IngestService implements ClusterStateApplier, ReportingService<Inge
}
exceptions.add(e);
} catch (Exception e) {
ElasticsearchParseException parseException = new ElasticsearchParseException(
OpenSearchParseException parseException = new OpenSearchParseException(
"Error updating pipeline with id [" + newConfiguration.getId() + "]", e);
Pipeline pipeline = substitutePipeline(newConfiguration.getId(), parseException);
newPipelines.put(newConfiguration.getId(), new PipelineHolder(newConfiguration, pipeline));
@ -841,7 +841,7 @@ public class IngestService implements ClusterStateApplier, ReportingService<Inge
return processors;
}
private static Pipeline substitutePipeline(String id, ElasticsearchParseException e) {
private static Pipeline substitutePipeline(String id, OpenSearchParseException e) {
String tag = e.getHeaderKeys().contains("processor_tag") ? e.getHeader("processor_tag").get(0) : null;
String type = e.getHeaderKeys().contains("processor_type") ? e.getHeader("processor_type").get(0) : "unknown";
String errorMessage = "pipeline with id [" + id + "] could not be loaded, caused by [" + e.getDetailedMessage() + "]";

View File

@ -19,7 +19,7 @@
package org.elasticsearch.ingest;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.common.Nullable;
import java.util.Arrays;
@ -77,11 +77,11 @@ public final class Pipeline {
List<Processor> onFailureProcessors =
ConfigurationUtils.readProcessorConfigs(onFailureProcessorConfigs, scriptService, processorFactories);
if (config.isEmpty() == false) {
throw new ElasticsearchParseException("pipeline [" + id +
throw new OpenSearchParseException("pipeline [" + id +
"] doesn't support one or more provided configuration parameters " + Arrays.toString(config.keySet().toArray()));
}
if (onFailureProcessorConfigs != null && onFailureProcessors.isEmpty()) {
throw new ElasticsearchParseException("pipeline [" + id + "] cannot have an empty on_failure option defined");
throw new OpenSearchParseException("pipeline [" + id + "] cannot have an empty on_failure option defined");
}
CompoundProcessor compoundProcessor = new CompoundProcessor(false, Collections.unmodifiableList(processors),
Collections.unmodifiableList(onFailureProcessors));

View File

@ -19,7 +19,7 @@
package org.elasticsearch.repositories;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.ResourceNotFoundException;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
@ -700,7 +700,7 @@ public final class RepositoryData {
// A snapshotted index references a snapshot which does not exist in
// the list of snapshots. This can happen when multiple clusters in
// different versions create or delete snapshot in the same repository.
throw new ElasticsearchParseException("Detected a corrupted repository, index " + indexId
throw new OpenSearchParseException("Detected a corrupted repository, index " + indexId
+ " references an unknown snapshot uuid [" + uuid + "]");
}
snapshotIds.add(snapshotId);

View File

@ -20,7 +20,7 @@
package org.elasticsearch.rest;
import org.apache.lucene.util.SetOnce;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.CheckedConsumer;
import org.elasticsearch.common.Nullable;
@ -226,7 +226,7 @@ public class RestRequest implements ToXContent.Params {
*/
public final BytesReference requiredContent() {
if (hasContent() == false) {
throw new ElasticsearchParseException("request body is required");
throw new OpenSearchParseException("request body is required");
} else if (xContentType.get() == null) {
throw new IllegalStateException("unknown content type");
}
@ -416,7 +416,7 @@ public class RestRequest implements ToXContent.Params {
}
/**
* A parser for the contents of this request if there is a body, otherwise throws an {@link ElasticsearchParseException}. Use
* A parser for the contents of this request if there is a body, otherwise throws an {@link OpenSearchParseException}. Use
* {@link #applyContentParser(CheckedConsumer)} if you want to gracefully handle when the request doesn't have any contents. Use
* {@link #contentOrSourceParamParser()} for requests that support specifying the request body in the {@code source} param.
*/
@ -446,7 +446,7 @@ public class RestRequest implements ToXContent.Params {
/**
* A parser for the contents of this request if it has contents, otherwise a parser for the {@code source} parameter if there is one,
* otherwise throws an {@link ElasticsearchParseException}. Use {@link #withContentOrSourceParamParserOrNull(CheckedConsumer)} instead
* otherwise throws an {@link OpenSearchParseException}. Use {@link #withContentOrSourceParamParserOrNull(CheckedConsumer)} instead
* if you need to handle the absence request content gracefully.
*/
public final XContentParser contentOrSourceParamParser() throws IOException {
@ -480,7 +480,7 @@ public class RestRequest implements ToXContent.Params {
*/
public final Tuple<XContentType, BytesReference> contentOrSourceParam() {
if (hasContentOrSourceParam() == false) {
throw new ElasticsearchParseException("request body or source parameter is required");
throw new OpenSearchParseException("request body or source parameter is required");
} else if (hasContent()) {
return new Tuple<>(xContentType.get(), requiredContent());
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.script;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
@ -433,33 +433,33 @@ public final class Script implements ToXContentObject, Writeable {
if (parameterValue instanceof String || parameterValue == null) {
lang = (String) parameterValue;
} else {
throw new ElasticsearchParseException("Value must be of type String: [" + parameterName + "]");
throw new OpenSearchParseException("Value must be of type String: [" + parameterName + "]");
}
} else if (Script.PARAMS_PARSE_FIELD.match(parameterName, LoggingDeprecationHandler.INSTANCE)) {
if (parameterValue instanceof Map || parameterValue == null) {
params = (Map<String, Object>) parameterValue;
} else {
throw new ElasticsearchParseException("Value must be of type Map: [" + parameterName + "]");
throw new OpenSearchParseException("Value must be of type Map: [" + parameterName + "]");
}
} else if (Script.OPTIONS_PARSE_FIELD.match(parameterName, LoggingDeprecationHandler.INSTANCE)) {
if (parameterValue instanceof Map || parameterValue == null) {
options = (Map<String, String>) parameterValue;
} else {
throw new ElasticsearchParseException("Value must be of type Map: [" + parameterName + "]");
throw new OpenSearchParseException("Value must be of type Map: [" + parameterName + "]");
}
} else if (ScriptType.INLINE.getParseField().match(parameterName, LoggingDeprecationHandler.INSTANCE)) {
if (parameterValue instanceof String || parameterValue == null) {
script = (String) parameterValue;
type = ScriptType.INLINE;
} else {
throw new ElasticsearchParseException("Value must be of type String: [" + parameterName + "]");
throw new OpenSearchParseException("Value must be of type String: [" + parameterName + "]");
}
} else if (ScriptType.STORED.getParseField().match(parameterName, LoggingDeprecationHandler.INSTANCE)) {
if (parameterValue instanceof String || parameterValue == null) {
script = (String) parameterValue;
type = ScriptType.STORED;
} else {
throw new ElasticsearchParseException("Value must be of type String: [" + parameterName + "]");
throw new OpenSearchParseException("Value must be of type String: [" + parameterName + "]");
}
} else {
deprecationLogger.deprecate("script_unsupported_fields", "script section does not support ["
@ -467,7 +467,7 @@ public final class Script implements ToXContentObject, Writeable {
}
}
if (script == null) {
throw new ElasticsearchParseException("Expected one of [{}] or [{}] fields, but found none",
throw new OpenSearchParseException("Expected one of [{}] or [{}] fields, but found none",
ScriptType.INLINE.getParseField().getPreferredName(), ScriptType.STORED.getParseField().getPreferredName());
}
assert type != null : "if script is not null, type should definitely not be null";

View File

@ -20,7 +20,7 @@
package org.elasticsearch.search;
import org.apache.lucene.search.Explanation;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.action.OriginalIndices;
import org.elasticsearch.common.Nullable;
@ -388,7 +388,7 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
this.source = CompressorFactory.uncompressIfNeeded(this.source);
return this.source;
} catch (IOException e) {
throw new ElasticsearchParseException("failed to decompress source", e);
throw new OpenSearchParseException("failed to decompress source", e);
}
}
@ -419,7 +419,7 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
try {
return XContentHelper.convertToJson(getSourceRef(), false);
} catch (IOException e) {
throw new ElasticsearchParseException("failed to convert source to a json string");
throw new OpenSearchParseException("failed to convert source to a json string");
}
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.search.aggregations;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@ -174,7 +174,7 @@ public abstract class Aggregator extends BucketCollector implements Releasable {
* also only add objects which can be serialized with
* {@link StreamOutput#writeGenericValue(Object)} and
* {@link XContentBuilder#value(Object)}. And they'll have an integration
* test.
* test.
*/
public void collectDebugInfo(BiConsumer<String, Object> add) {}
@ -213,7 +213,7 @@ public abstract class Aggregator extends BucketCollector implements Releasable {
return mode;
}
}
throw new ElasticsearchParseException("no [{}] found for value [{}]", KEY.getPreferredName(), value);
throw new OpenSearchParseException("no [{}] found for value [{}]", KEY.getPreferredName(), value);
}
public static SubAggCollectionMode readFromStream(StreamInput in) throws IOException {

View File

@ -20,7 +20,7 @@ package org.elasticsearch.search.aggregations.bucket.geogrid;
import org.apache.lucene.geo.GeoEncodingUtils;
import org.apache.lucene.util.SloppyMath;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.OpenSearchParseException;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.util.ESSloppyMath;
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
@ -89,7 +89,7 @@ public final class GeoTileUtils {
* @param parser {@link XContentParser} to parse the value from
* @return int representing precision
*/
static int parsePrecision(XContentParser parser) throws IOException, ElasticsearchParseException {
static int parsePrecision(XContentParser parser) throws IOException, OpenSearchParseException {
final Object node = parser.currentToken().equals(XContentParser.Token.VALUE_NUMBER)
? Integer.valueOf(parser.intValue())
: parser.text();

Some files were not shown because too many files have changed in this diff Show More