Always support ignore_malformed in the same way (#90565)
This makes sure that all field types that support `ignore_malfored` do so in the same way. Production changes: * All mapper has an `ignoreMalformed` method that must return `true` if the field accepts the `ignore_malformed` mapping parameter was configured. It defaults to `false` because many fields either don't have a concept of "malformed" value or don't have the ability to ignore malformed values. * Fix the `scaled_float` field to store it's field name in `_ignored` if it ignores any malfored values. This is how all other field mappers work. Test changes: * `MapperTestCase` forces subclasses to declare if their `supportIgnoreMalformed` or not. * If `MapperTestCase` subclasses `supportIgnoreMalfored` they must define some `exampleMalformedValues`. * `MapperTestCase` always grows three new tests: * One that creates a field without setting `ignore_malformed` and verifies that all `exampleMalformedValues` throw expected errors * On that explicitly configured `ignore_malformed` to false and, if `supportIgnoreMalformed` it verifies the errors again. If not `supportIgnoreMalformed` it verifies that the parameter is unknown. * On that explicitly configured `ignore_malformed` to true and, if `supportIgnoreMalformed` it verifies that parsing doesn't produce errors and correctly produces `_ignored`. If not `supportIgnoreMalformed` it verifies that the parameter is unknown. * Moved some subclasesses of `MapperTestCase` from `internalClusterTests` to `tests`. This isn't strictly required but that's the right place for them.
This commit is contained in:
parent
419a7db89a
commit
f4fad2548f
|
@ -178,6 +178,6 @@ public class DataStreamTimestampFieldMapperTests extends MetadataMapperTestCase
|
|||
}));
|
||||
assertThat(mapperService, notNullValue());
|
||||
assertThat(mapperService.documentMapper().mappers().getMapper("@timestamp"), notNullValue());
|
||||
assertThat(((DateFieldMapper) mapperService.documentMapper().mappers().getMapper("@timestamp")).getIgnoreMalformed(), is(false));
|
||||
assertThat(((DateFieldMapper) mapperService.documentMapper().mappers().getMapper("@timestamp")).ignoreMalformed(), is(false));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -85,10 +85,6 @@ public class LegacyGeoShapeFieldMapperTests extends MapperTestCase {
|
|||
LegacyGeoShapeFieldMapper gsfm = (LegacyGeoShapeFieldMapper) m;
|
||||
assertEquals(Orientation.RIGHT, gsfm.orientation());
|
||||
});
|
||||
checker.registerUpdateCheck(b -> b.field("ignore_malformed", true), m -> {
|
||||
LegacyGeoShapeFieldMapper gpfm = (LegacyGeoShapeFieldMapper) m;
|
||||
assertTrue(gpfm.ignoreMalformed());
|
||||
});
|
||||
checker.registerUpdateCheck(b -> b.field("ignore_z_value", false), m -> {
|
||||
LegacyGeoShapeFieldMapper gpfm = (LegacyGeoShapeFieldMapper) m;
|
||||
assertFalse(gpfm.ignoreZValue());
|
||||
|
@ -232,27 +228,14 @@ public class LegacyGeoShapeFieldMapperTests extends MapperTestCase {
|
|||
assertFieldWarnings("strategy", "tree");
|
||||
}
|
||||
|
||||
/**
|
||||
* Test that ignore_malformed parameter correctly parses
|
||||
*/
|
||||
public void testIgnoreMalformedParsing() throws IOException {
|
||||
DocumentMapper mapper = createDocumentMapper(
|
||||
fieldMapping(b -> b.field("type", "geo_shape").field("tree", "quadtree").field("ignore_malformed", true))
|
||||
);
|
||||
Mapper fieldMapper = mapper.mappers().getMapper("field");
|
||||
assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class));
|
||||
boolean ignoreMalformed = ((LegacyGeoShapeFieldMapper) fieldMapper).ignoreMalformed();
|
||||
assertThat(ignoreMalformed, equalTo(true));
|
||||
@Override
|
||||
protected boolean supportsIgnoreMalformed() {
|
||||
return true;
|
||||
}
|
||||
|
||||
// explicit false ignore_malformed test
|
||||
mapper = createDocumentMapper(
|
||||
fieldMapping(b -> b.field("type", "geo_shape").field("tree", "quadtree").field("ignore_malformed", false))
|
||||
);
|
||||
fieldMapper = mapper.mappers().getMapper("field");
|
||||
assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class));
|
||||
ignoreMalformed = ((LegacyGeoShapeFieldMapper) fieldMapper).ignoreMalformed();
|
||||
assertThat(ignoreMalformed, equalTo(false));
|
||||
assertFieldWarnings("tree", "strategy");
|
||||
@Override
|
||||
protected List<ExampleMalformedValue> exampleMalformedValues() {
|
||||
return List.of();
|
||||
}
|
||||
|
||||
public void testGeohashConfiguration() throws IOException {
|
||||
|
|
|
@ -431,7 +431,8 @@ public class ScaledFloatFieldMapper extends FieldMapper {
|
|||
return coerce.value();
|
||||
}
|
||||
|
||||
boolean ignoreMalformed() {
|
||||
@Override
|
||||
public boolean ignoreMalformed() {
|
||||
return ignoreMalformed.value();
|
||||
}
|
||||
|
||||
|
@ -464,6 +465,7 @@ public class ScaledFloatFieldMapper extends FieldMapper {
|
|||
numericValue = parse(parser, coerce.value());
|
||||
} catch (IllegalArgumentException e) {
|
||||
if (ignoreMalformed.value()) {
|
||||
context.addIgnoredField(mappedFieldType.name());
|
||||
return;
|
||||
} else {
|
||||
throw e;
|
||||
|
@ -487,6 +489,7 @@ public class ScaledFloatFieldMapper extends FieldMapper {
|
|||
double doubleValue = numericValue.doubleValue();
|
||||
if (Double.isFinite(doubleValue) == false) {
|
||||
if (ignoreMalformed.value()) {
|
||||
context.addIgnoredField(mappedFieldType.name());
|
||||
return;
|
||||
} else {
|
||||
// since we encode to a long, we have no way to carry NaNs and infinities
|
||||
|
|
|
@ -207,6 +207,11 @@ public class MatchOnlyTextFieldMapperTests extends MapperTestCase {
|
|||
assumeFalse("We don't have a way to assert things here", true);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsIgnoreMalformed() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected SyntheticSourceSupport syntheticSourceSupport() {
|
||||
return new MatchOnlyTextSyntheticSourceSupport();
|
||||
|
|
|
@ -64,6 +64,11 @@ public class RankFeatureFieldMapperTests extends MapperTestCase {
|
|||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsIgnoreMalformed() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Collection<? extends Plugin> getPlugins() {
|
||||
return List.of(new MapperExtrasPlugin());
|
||||
|
|
|
@ -58,6 +58,11 @@ public class RankFeaturesFieldMapperTests extends MapperTestCase {
|
|||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsIgnoreMalformed() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void registerParameters(ParameterChecker checker) throws IOException {
|
||||
checker.registerConflictCheck("positive_score_impact", b -> b.field("positive_score_impact", false));
|
||||
|
|
|
@ -28,7 +28,6 @@ import org.elasticsearch.xcontent.XContentType;
|
|||
import org.junit.AssumptionViolatedException;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
|
@ -64,10 +63,6 @@ public class ScaledFloatFieldMapperTests extends MapperTestCase {
|
|||
checker.registerConflictCheck("store", b -> b.field("store", true));
|
||||
checker.registerConflictCheck("null_value", b -> b.field("null_value", 1));
|
||||
checker.registerUpdateCheck(b -> b.field("coerce", false), m -> assertFalse(((ScaledFloatFieldMapper) m).coerce()));
|
||||
checker.registerUpdateCheck(
|
||||
b -> b.field("ignore_malformed", true),
|
||||
m -> assertTrue(((ScaledFloatFieldMapper) m).ignoreMalformed())
|
||||
);
|
||||
}
|
||||
|
||||
public void testExistsQueryDocValuesDisabled() throws IOException {
|
||||
|
@ -217,40 +212,19 @@ public class ScaledFloatFieldMapperTests extends MapperTestCase {
|
|||
assertThat(e.getCause().getMessage(), containsString("passed as String"));
|
||||
}
|
||||
|
||||
public void testIgnoreMalformed() throws Exception {
|
||||
doTestIgnoreMalformed("a", "For input string: \"a\"");
|
||||
|
||||
List<String> values = Arrays.asList("NaN", "Infinity", "-Infinity");
|
||||
for (String value : values) {
|
||||
doTestIgnoreMalformed(value, "[scaled_float] only supports finite values, but got [" + value + "]");
|
||||
}
|
||||
@Override
|
||||
protected boolean supportsIgnoreMalformed() {
|
||||
return true;
|
||||
}
|
||||
|
||||
private void doTestIgnoreMalformed(String value, String exceptionMessageContains) throws Exception {
|
||||
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
|
||||
ThrowingRunnable runnable = () -> mapper.parse(
|
||||
new SourceToParse(
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", value).endObject()),
|
||||
XContentType.JSON
|
||||
)
|
||||
@Override
|
||||
protected List<ExampleMalformedValue> exampleMalformedValues() {
|
||||
return List.of(
|
||||
exampleMalformedValue("a").errorMatches("For input string: \"a\""),
|
||||
exampleMalformedValue("NaN").errorMatches("[scaled_float] only supports finite values, but got [NaN]"),
|
||||
exampleMalformedValue("Infinity").errorMatches("[scaled_float] only supports finite values, but got [Infinity]"),
|
||||
exampleMalformedValue("-Infinity").errorMatches("[scaled_float] only supports finite values, but got [-Infinity]")
|
||||
);
|
||||
MapperParsingException e = expectThrows(MapperParsingException.class, runnable);
|
||||
assertThat(e.getCause().getMessage(), containsString(exceptionMessageContains));
|
||||
|
||||
DocumentMapper mapper2 = createDocumentMapper(
|
||||
fieldMapping(b -> b.field("type", "scaled_float").field("scaling_factor", 10.0).field("ignore_malformed", true))
|
||||
);
|
||||
ParsedDocument doc = mapper2.parse(
|
||||
new SourceToParse(
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", value).endObject()),
|
||||
XContentType.JSON
|
||||
)
|
||||
);
|
||||
|
||||
IndexableField[] fields = doc.rootDoc().getFields("field");
|
||||
assertEquals(0, fields.length);
|
||||
}
|
||||
|
||||
public void testNullValue() throws IOException {
|
||||
|
|
|
@ -795,6 +795,11 @@ public class SearchAsYouTypeFieldMapperTests extends MapperTestCase {
|
|||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsIgnoreMalformed() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected SyntheticSourceSupport syntheticSourceSupport() {
|
||||
throw new AssumptionViolatedException("not supported");
|
||||
|
|
|
@ -189,6 +189,11 @@ public class TokenCountFieldMapperTests extends MapperTestCase {
|
|||
b.field("type", "token_count").field("analyzer", "standard");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsIgnoreMalformed() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected SyntheticSourceSupport syntheticSourceSupport() {
|
||||
throw new AssumptionViolatedException("not supported");
|
||||
|
|
|
@ -302,6 +302,11 @@ public class ICUCollationKeywordFieldMapperTests extends MapperTestCase {
|
|||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsIgnoreMalformed() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected SyntheticSourceSupport syntheticSourceSupport() {
|
||||
throw new AssumptionViolatedException("not supported");
|
||||
|
|
|
@ -9,7 +9,6 @@ import org.elasticsearch.gradle.internal.info.BuildParams
|
|||
*/
|
||||
apply plugin: 'elasticsearch.internal-yaml-rest-test'
|
||||
apply plugin: 'elasticsearch.yaml-rest-compat-test'
|
||||
apply plugin: 'elasticsearch.internal-cluster-test'
|
||||
|
||||
esplugin {
|
||||
description 'The Mapper Annotated_text plugin adds support for text fields with markup used to inject annotation tokens into the index.'
|
||||
|
@ -17,15 +16,11 @@ esplugin {
|
|||
}
|
||||
|
||||
if (BuildParams.isSnapshotBuild() == false) {
|
||||
tasks.named("internalClusterTest").configure {
|
||||
tasks.named("test").configure {
|
||||
systemProperty 'es.index_mode_feature_flag_registered', 'true'
|
||||
}
|
||||
}
|
||||
|
||||
tasks.named('internalClusterTestTestingConventions').configure {
|
||||
baseClass 'org.elasticsearch.index.mapper.MapperTestCase'
|
||||
}
|
||||
|
||||
restResources {
|
||||
restApi {
|
||||
include '_common', 'indices', 'index', 'search'
|
||||
|
|
|
@ -589,6 +589,11 @@ public class AnnotatedTextFieldMapperTests extends MapperTestCase {
|
|||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsIgnoreMalformed() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected SyntheticSourceSupport syntheticSourceSupport() {
|
||||
throw new AssumptionViolatedException("not supported");
|
|
@ -64,6 +64,11 @@ public class Murmur3FieldMapperTests extends MapperTestCase {
|
|||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsIgnoreMalformed() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected SyntheticSourceSupport syntheticSourceSupport() {
|
||||
throw new AssumptionViolatedException("not supported");
|
||||
|
|
|
@ -206,6 +206,7 @@ public abstract class AbstractGeometryFieldMapper<T> extends FieldMapper {
|
|||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean ignoreMalformed() {
|
||||
return ignoreMalformed.value();
|
||||
}
|
||||
|
|
|
@ -148,7 +148,7 @@ public class DataStreamTimestampFieldMapper extends MetadataFieldMapper {
|
|||
"data stream timestamp field [" + DEFAULT_PATH + "] has disallowed [null_value] attribute specified"
|
||||
);
|
||||
}
|
||||
if (dateFieldMapper.getIgnoreMalformed()) {
|
||||
if (dateFieldMapper.ignoreMalformed()) {
|
||||
throw new IllegalArgumentException(
|
||||
"data stream timestamp field [" + DEFAULT_PATH + "] has disallowed [ignore_malformed] attribute specified"
|
||||
);
|
||||
|
|
|
@ -926,7 +926,8 @@ public final class DateFieldMapper extends FieldMapper {
|
|||
this.scriptValues.valuesForDoc(searchLookup, readerContext, doc, v -> indexValue(documentParserContext, v));
|
||||
}
|
||||
|
||||
public boolean getIgnoreMalformed() {
|
||||
@Override
|
||||
public boolean ignoreMalformed() {
|
||||
return ignoreMalformed;
|
||||
}
|
||||
|
||||
|
|
|
@ -134,6 +134,16 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
|||
return multiFields;
|
||||
}
|
||||
|
||||
/**
|
||||
* Will this field ignore malformed values for this field and accept the
|
||||
* document ({@code true}) or will it reject documents with malformed
|
||||
* values for this field ({@code false}). Some fields don't have a concept
|
||||
* of "malformed" and will return {@code false} here.
|
||||
*/
|
||||
public boolean ignoreMalformed() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether this mapper can handle an array value during document parsing. If true,
|
||||
* when an array is encountered during parsing, the document parser will pass the
|
||||
|
|
|
@ -456,7 +456,8 @@ public class IpFieldMapper extends FieldMapper {
|
|||
this.dimension = builder.dimension.getValue();
|
||||
}
|
||||
|
||||
boolean ignoreMalformed() {
|
||||
@Override
|
||||
public boolean ignoreMalformed() {
|
||||
return ignoreMalformed;
|
||||
}
|
||||
|
||||
|
|
|
@ -1647,7 +1647,8 @@ public class NumberFieldMapper extends FieldMapper {
|
|||
return coerce.value();
|
||||
}
|
||||
|
||||
boolean ignoreMalformed() {
|
||||
@Override
|
||||
public boolean ignoreMalformed() {
|
||||
return ignoreMalformed.value();
|
||||
}
|
||||
|
||||
|
|
|
@ -146,6 +146,11 @@ public class BinaryFieldMapperTests extends MapperTestCase {
|
|||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsIgnoreMalformed() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected SyntheticSourceSupport syntheticSourceSupport() {
|
||||
throw new AssumptionViolatedException("not supported");
|
||||
|
|
|
@ -202,6 +202,11 @@ public class BooleanFieldMapperTests extends MapperTestCase {
|
|||
assertThat(e.getMessage(), equalTo("Failed to parse mapping: Field [null_value] cannot be set in conjunction with field [script]"));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsIgnoreMalformed() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected SyntheticSourceSupport syntheticSourceSupport() {
|
||||
return new SyntheticSourceSupport() {
|
||||
|
|
|
@ -92,6 +92,11 @@ public class CompletionFieldMapperTests extends MapperTestCase {
|
|||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsIgnoreMalformed() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void registerParameters(ParameterChecker checker) throws IOException {
|
||||
checker.registerConflictCheck("analyzer", b -> b.field("analyzer", "standard"));
|
||||
|
|
|
@ -14,9 +14,9 @@ import org.elasticsearch.Version;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.time.DateFormatter;
|
||||
import org.elasticsearch.common.time.DateUtils;
|
||||
import org.elasticsearch.core.CheckedConsumer;
|
||||
import org.elasticsearch.core.Tuple;
|
||||
import org.elasticsearch.index.mapper.DateFieldMapper.DateFieldType;
|
||||
import org.elasticsearch.index.termvectors.TermVectorsService;
|
||||
import org.elasticsearch.script.DateFieldScript;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
|
@ -65,7 +65,6 @@ public class DateFieldMapperTests extends MapperTestCase {
|
|||
checker.registerConflictCheck("format", b -> b.field("format", "yyyy-MM-dd"));
|
||||
checker.registerConflictCheck("locale", b -> b.field("locale", "es"));
|
||||
checker.registerConflictCheck("null_value", b -> b.field("null_value", "34500000"));
|
||||
checker.registerUpdateCheck(b -> b.field("ignore_malformed", true), m -> assertTrue(((DateFieldMapper) m).getIgnoreMalformed()));
|
||||
}
|
||||
|
||||
public void testExistsQueryDocValuesDisabled() throws IOException {
|
||||
|
@ -143,39 +142,25 @@ public class DateFieldMapperTests extends MapperTestCase {
|
|||
assertEquals(1457654400000L, storedField.numericValue().longValue());
|
||||
}
|
||||
|
||||
public void testIgnoreMalformed() throws IOException {
|
||||
testIgnoreMalformedForValue(
|
||||
"2016-03-99",
|
||||
"failed to parse date field [2016-03-99] with format [strict_date_optional_time||epoch_millis]",
|
||||
"strict_date_optional_time||epoch_millis"
|
||||
);
|
||||
testIgnoreMalformedForValue("-522000000", "long overflow", "date_optional_time");
|
||||
@Override
|
||||
protected boolean supportsIgnoreMalformed() {
|
||||
return true;
|
||||
}
|
||||
|
||||
private void testIgnoreMalformedForValue(String value, String expectedCause, String dateFormat) throws IOException {
|
||||
|
||||
DocumentMapper mapper = createDocumentMapper(fieldMapping((builder) -> dateFieldMapping(builder, dateFormat)));
|
||||
|
||||
MapperParsingException e = expectThrows(MapperParsingException.class, () -> mapper.parse(source(b -> b.field("field", value))));
|
||||
assertThat(e.getMessage(), containsString("failed to parse field [field] of type [date]"));
|
||||
assertThat(e.getMessage(), containsString("Preview of field's value: '" + value + "'"));
|
||||
assertThat(e.getCause().getMessage(), containsString(expectedCause));
|
||||
|
||||
DocumentMapper mapper2 = createDocumentMapper(
|
||||
fieldMapping(b -> b.field("type", "date").field("format", dateFormat).field("ignore_malformed", true))
|
||||
@Override
|
||||
protected List<ExampleMalformedValue> exampleMalformedValues() {
|
||||
return List.of(
|
||||
exampleMalformedValue("2016-03-99").mapping(mappingWithFormat("strict_date_optional_time||epoch_millis"))
|
||||
.errorMatches("failed to parse date field [2016-03-99] with format [strict_date_optional_time||epoch_millis]"),
|
||||
exampleMalformedValue("-522000000").mapping(mappingWithFormat("date_optional_time")).errorMatches("long overflow")
|
||||
);
|
||||
|
||||
ParsedDocument doc = mapper2.parse(source(b -> b.field("field", value)));
|
||||
|
||||
IndexableField[] fields = doc.rootDoc().getFields("field");
|
||||
assertEquals(0, fields.length);
|
||||
assertArrayEquals(new String[] { "field" }, TermVectorsService.getValues(doc.rootDoc().getFields("_ignored")));
|
||||
}
|
||||
|
||||
private void dateFieldMapping(XContentBuilder builder, String dateFormat) throws IOException {
|
||||
builder.field("type", "date");
|
||||
builder.field("format", dateFormat);
|
||||
|
||||
private CheckedConsumer<XContentBuilder, IOException> mappingWithFormat(String dateFormat) {
|
||||
return b -> {
|
||||
minimalMapping(b);
|
||||
b.field("format", dateFormat);
|
||||
};
|
||||
}
|
||||
|
||||
public void testChangeFormat() throws IOException {
|
||||
|
|
|
@ -53,10 +53,6 @@ public class GeoPointFieldMapperTests extends MapperTestCase {
|
|||
|
||||
@Override
|
||||
protected void registerParameters(ParameterChecker checker) throws IOException {
|
||||
checker.registerUpdateCheck(b -> b.field("ignore_malformed", true), m -> {
|
||||
GeoPointFieldMapper gpfm = (GeoPointFieldMapper) m;
|
||||
assertTrue(gpfm.ignoreMalformed());
|
||||
});
|
||||
checker.registerUpdateCheck(b -> b.field("ignore_z_value", false), m -> {
|
||||
GeoPointFieldMapper gpfm = (GeoPointFieldMapper) m;
|
||||
assertFalse(gpfm.ignoreZValue());
|
||||
|
@ -377,53 +373,33 @@ public class GeoPointFieldMapperTests extends MapperTestCase {
|
|||
assertThat(nullValue, equalTo(new GeoPoint(89, 1)));
|
||||
}
|
||||
|
||||
public void testInvalidGeohashIgnored() throws Exception {
|
||||
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "geo_point").field("ignore_malformed", "true")));
|
||||
ParsedDocument doc = mapper.parse(source(b -> b.field("field", "1234.333")));
|
||||
assertThat(doc.rootDoc().getField("field"), nullValue());
|
||||
@Override
|
||||
protected boolean supportsIgnoreMalformed() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public void testInvalidGeohashNotIgnored() throws Exception {
|
||||
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
|
||||
MapperParsingException e = expectThrows(
|
||||
MapperParsingException.class,
|
||||
() -> mapper.parse(source(b -> b.field("field", "1234.333")))
|
||||
);
|
||||
assertThat(e.getMessage(), containsString("failed to parse field [field] of type [geo_point]"));
|
||||
assertThat(e.getRootCause().getMessage(), containsString("unsupported symbol [.] in geohash [1234.333]"));
|
||||
}
|
||||
|
||||
public void testInvalidGeopointValuesIgnored() throws Exception {
|
||||
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "geo_point").field("ignore_malformed", "true")));
|
||||
|
||||
assertThat(mapper.parse(source(b -> b.field("field", "1234.333"))).rootDoc().getField("field"), nullValue());
|
||||
assertThat(
|
||||
mapper.parse(source(b -> b.startObject("field").field("lat", "-").field("lon", 1.3).endObject())).rootDoc().getField("field"),
|
||||
nullValue()
|
||||
);
|
||||
assertThat(
|
||||
mapper.parse(source(b -> b.startObject("field").field("lat", 1.3).field("lon", "-").endObject())).rootDoc().getField("field"),
|
||||
nullValue()
|
||||
);
|
||||
assertThat(mapper.parse(source(b -> b.field("field", "-,1.3"))).rootDoc().getField("field"), nullValue());
|
||||
assertThat(mapper.parse(source(b -> b.field("field", "1.3,-"))).rootDoc().getField("field"), nullValue());
|
||||
assertThat(
|
||||
mapper.parse(source(b -> b.startObject("field").field("lat", "NaN").field("lon", 1.2).endObject())).rootDoc().getField("field"),
|
||||
nullValue()
|
||||
);
|
||||
assertThat(
|
||||
mapper.parse(source(b -> b.startObject("field").field("lat", 1.2).field("lon", "NaN").endObject())).rootDoc().getField("field"),
|
||||
nullValue()
|
||||
);
|
||||
assertThat(mapper.parse(source(b -> b.field("field", "1.3,NaN"))).rootDoc().getField("field"), nullValue());
|
||||
assertThat(mapper.parse(source(b -> b.field("field", "NaN,1.3"))).rootDoc().getField("field"), nullValue());
|
||||
assertThat(
|
||||
mapper.parse(source(b -> b.startObject("field").nullField("lat").field("lon", 1.2).endObject())).rootDoc().getField("field"),
|
||||
nullValue()
|
||||
);
|
||||
assertThat(
|
||||
mapper.parse(source(b -> b.startObject("field").field("lat", 1.2).nullField("lon").endObject())).rootDoc().getField("field"),
|
||||
nullValue()
|
||||
@Override
|
||||
protected List<ExampleMalformedValue> exampleMalformedValues() {
|
||||
return List.of(
|
||||
exampleMalformedValue("1234.333").errorMatches("unsupported symbol [.] in geohash [1234.333]"),
|
||||
exampleMalformedValue(b -> b.startObject().field("lat", "-").field("lon", 1.3).endObject()).errorMatches(
|
||||
"[latitude] must be a valid double value"
|
||||
),
|
||||
exampleMalformedValue(b -> b.startObject().field("lat", 1.3).field("lon", "-").endObject()).errorMatches(
|
||||
"[longitude] must be a valid double value"
|
||||
),
|
||||
exampleMalformedValue("-,1.3").errorMatches("latitude must be a number"),
|
||||
exampleMalformedValue("1.3,-").errorMatches("longitude must be a number"),
|
||||
exampleMalformedValue(b -> b.startObject().field("lat", "NaN").field("lon", 1.2).endObject()).errorMatches("Required [lat]"),
|
||||
exampleMalformedValue(b -> b.startObject().field("lat", 1.2).field("lon", "NaN").endObject()).errorMatches("Required [lon]"),
|
||||
exampleMalformedValue("NaN,1.3").errorMatches("invalid latitude NaN; must be between -90.0 and 90.0"),
|
||||
exampleMalformedValue("1.3,NaN").errorMatches("invalid longitude NaN; must be between -180.0 and 180.0"),
|
||||
exampleMalformedValue(b -> b.startObject().nullField("lat").field("lon", "NaN").endObject()).errorMatches(
|
||||
"latitude must be a number"
|
||||
),
|
||||
exampleMalformedValue(b -> b.startObject().field("lat", "NaN").nullField("lon").endObject()).errorMatches(
|
||||
"longitude must be a number"
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -33,10 +33,6 @@ public class GeoShapeFieldMapperTests extends MapperTestCase {
|
|||
GeoShapeFieldMapper gsfm = (GeoShapeFieldMapper) m;
|
||||
assertEquals(Orientation.RIGHT, gsfm.orientation());
|
||||
});
|
||||
checker.registerUpdateCheck(b -> b.field("ignore_malformed", true), m -> {
|
||||
GeoShapeFieldMapper gpfm = (GeoShapeFieldMapper) m;
|
||||
assertTrue(gpfm.ignoreMalformed());
|
||||
});
|
||||
checker.registerUpdateCheck(b -> b.field("ignore_z_value", false), m -> {
|
||||
GeoShapeFieldMapper gpfm = (GeoShapeFieldMapper) m;
|
||||
assertFalse(gpfm.ignoreZValue());
|
||||
|
@ -138,22 +134,20 @@ public class GeoShapeFieldMapperTests extends MapperTestCase {
|
|||
assertThat(ignoreZValue, equalTo(false));
|
||||
}
|
||||
|
||||
/**
|
||||
* Test that ignore_malformed parameter correctly parses
|
||||
*/
|
||||
public void testIgnoreMalformedParsing() throws IOException {
|
||||
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "geo_shape").field("ignore_malformed", true)));
|
||||
Mapper fieldMapper = mapper.mappers().getMapper("field");
|
||||
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
|
||||
boolean ignoreMalformed = ((GeoShapeFieldMapper) fieldMapper).ignoreMalformed();
|
||||
assertThat(ignoreMalformed, equalTo(true));
|
||||
@Override
|
||||
protected boolean supportsIgnoreMalformed() {
|
||||
return true;
|
||||
}
|
||||
|
||||
// explicit false ignore_malformed test
|
||||
mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "geo_shape").field("ignore_malformed", false)));
|
||||
fieldMapper = mapper.mappers().getMapper("field");
|
||||
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
|
||||
ignoreMalformed = ((GeoShapeFieldMapper) fieldMapper).ignoreMalformed();
|
||||
assertThat(ignoreMalformed, equalTo(false));
|
||||
@Override
|
||||
protected List<ExampleMalformedValue> exampleMalformedValues() {
|
||||
return List.of(
|
||||
exampleMalformedValue("Bad shape").errorMatches("Unknown geometry type: bad"),
|
||||
exampleMalformedValue(
|
||||
"POLYGON ((18.9401790919516 -33.9681188869036, 18.9401790919516 -33.9681188869036, 18.9401790919517 "
|
||||
+ "-33.9681188869036, 18.9401790919517 -33.9681188869036, 18.9401790919516 -33.9681188869036))"
|
||||
).errorMatches("at least three non-collinear points required")
|
||||
);
|
||||
}
|
||||
|
||||
public void testGeoShapeMapperMerge() throws Exception {
|
||||
|
|
|
@ -19,7 +19,6 @@ import org.elasticsearch.Version;
|
|||
import org.elasticsearch.common.network.InetAddresses;
|
||||
import org.elasticsearch.common.network.NetworkAddress;
|
||||
import org.elasticsearch.core.Tuple;
|
||||
import org.elasticsearch.index.termvectors.TermVectorsService;
|
||||
import org.elasticsearch.script.IpFieldScript;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xcontent.XContentBuilder;
|
||||
|
@ -52,8 +51,6 @@ public class IpFieldMapperTests extends MapperTestCase {
|
|||
checker.registerConflictCheck("index", b -> b.field("index", false));
|
||||
checker.registerConflictCheck("store", b -> b.field("store", true));
|
||||
checker.registerConflictCheck("null_value", b -> b.field("null_value", "::1"));
|
||||
checker.registerUpdateCheck(b -> b.field("ignore_malformed", false), m -> assertFalse(((IpFieldMapper) m).ignoreMalformed()));
|
||||
|
||||
registerDimensionChecks(checker);
|
||||
}
|
||||
|
||||
|
@ -149,24 +146,14 @@ public class IpFieldMapperTests extends MapperTestCase {
|
|||
assertEquals(new BytesRef(InetAddressPoint.encode(InetAddress.getByName("::1"))), storedField.binaryValue());
|
||||
}
|
||||
|
||||
public void testIgnoreMalformed() throws Exception {
|
||||
@Override
|
||||
protected boolean supportsIgnoreMalformed() {
|
||||
return true;
|
||||
}
|
||||
|
||||
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
|
||||
|
||||
ThrowingRunnable runnable = () -> mapper.parse(source(b -> b.field("field", ":1")));
|
||||
MapperParsingException e = expectThrows(MapperParsingException.class, runnable);
|
||||
assertThat(e.getCause().getMessage(), containsString("':1' is not an IP string literal"));
|
||||
|
||||
DocumentMapper mapper2 = createDocumentMapper(fieldMapping(b -> {
|
||||
b.field("type", "ip");
|
||||
b.field("ignore_malformed", true);
|
||||
}));
|
||||
|
||||
ParsedDocument doc = mapper2.parse(source(b -> b.field("field", ":1")));
|
||||
|
||||
IndexableField[] fields = doc.rootDoc().getFields("field");
|
||||
assertEquals(0, fields.length);
|
||||
assertArrayEquals(new String[] { "field" }, TermVectorsService.getValues(doc.rootDoc().getFields("_ignored")));
|
||||
@Override
|
||||
protected List<ExampleMalformedValue> exampleMalformedValues() {
|
||||
return List.of(exampleMalformedValue(":1").errorMatches("':1' is not an IP string literal"));
|
||||
}
|
||||
|
||||
public void testNullValue() throws IOException {
|
||||
|
|
|
@ -53,6 +53,11 @@ public class IpRangeFieldMapperTests extends RangeFieldMapperTests {
|
|||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsIgnoreMalformed() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public void testStoreCidr() throws Exception {
|
||||
|
||||
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "ip_range").field("store", true)));
|
||||
|
|
|
@ -631,6 +631,11 @@ public class KeywordFieldMapperTests extends MapperTestCase {
|
|||
assertThat(e.getCause().getMessage(), containsString("UTF8 encoding is longer than the max length"));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsIgnoreMalformed() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected SyntheticSourceSupport syntheticSourceSupport() {
|
||||
return new KeywordSyntheticSourceSupport(randomBoolean(), usually() ? null : randomAlphaOfLength(2), true);
|
||||
|
|
|
@ -13,7 +13,6 @@ import org.apache.lucene.index.IndexableField;
|
|||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.core.Tuple;
|
||||
import org.elasticsearch.index.mapper.NumberFieldTypeTests.OutOfRangeSpec;
|
||||
import org.elasticsearch.index.termvectors.TermVectorsService;
|
||||
import org.elasticsearch.script.DoubleFieldScript;
|
||||
import org.elasticsearch.script.LongFieldScript;
|
||||
import org.elasticsearch.script.Script;
|
||||
|
@ -25,6 +24,7 @@ import java.io.IOException;
|
|||
import java.util.List;
|
||||
import java.util.function.Function;
|
||||
|
||||
import static org.hamcrest.Matchers.both;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.matchesPattern;
|
||||
|
@ -55,7 +55,6 @@ public abstract class NumberFieldMapperTests extends MapperTestCase {
|
|||
checker.registerConflictCheck("store", b -> b.field("store", true));
|
||||
checker.registerConflictCheck("null_value", b -> b.field("null_value", 1));
|
||||
checker.registerUpdateCheck(b -> b.field("coerce", false), m -> assertFalse(((NumberFieldMapper) m).coerce()));
|
||||
checker.registerUpdateCheck(b -> b.field("ignore_malformed", true), m -> assertTrue(((NumberFieldMapper) m).ignoreMalformed()));
|
||||
|
||||
if (allowsIndexTimeScript()) {
|
||||
checker.registerConflictCheck("script", b -> b.field("script", "foo"));
|
||||
|
@ -177,27 +176,19 @@ public abstract class NumberFieldMapperTests extends MapperTestCase {
|
|||
assertThat(e.getCause().getMessage(), containsString("passed as String"));
|
||||
}
|
||||
|
||||
public void testIgnoreMalformed() throws Exception {
|
||||
DocumentMapper notIgnoring = createDocumentMapper(fieldMapping(this::minimalMapping));
|
||||
DocumentMapper ignoring = createDocumentMapper(fieldMapping(b -> {
|
||||
minimalMapping(b);
|
||||
b.field("ignore_malformed", true);
|
||||
}));
|
||||
for (Object malformedValue : new Object[] { "a", Boolean.FALSE }) {
|
||||
SourceToParse source = source(b -> b.field("field", malformedValue));
|
||||
MapperParsingException e = expectThrows(MapperParsingException.class, () -> notIgnoring.parse(source));
|
||||
if (malformedValue instanceof String) {
|
||||
assertThat(e.getCause().getMessage(), containsString("For input string: \"a\""));
|
||||
} else {
|
||||
assertThat(e.getCause().getMessage(), containsString("Current token"));
|
||||
assertThat(e.getCause().getMessage(), containsString("not numeric, can not use numeric value accessors"));
|
||||
}
|
||||
@Override
|
||||
protected boolean supportsIgnoreMalformed() {
|
||||
return true;
|
||||
}
|
||||
|
||||
ParsedDocument doc = ignoring.parse(source);
|
||||
IndexableField[] fields = doc.rootDoc().getFields("field");
|
||||
assertEquals(0, fields.length);
|
||||
assertArrayEquals(new String[] { "field" }, TermVectorsService.getValues(doc.rootDoc().getFields("_ignored")));
|
||||
}
|
||||
@Override
|
||||
protected List<ExampleMalformedValue> exampleMalformedValues() {
|
||||
return List.of(
|
||||
exampleMalformedValue("a").errorMatches("For input string: \"a\""),
|
||||
exampleMalformedValue(b -> b.value(false)).errorMatches(
|
||||
both(containsString("Current token")).and(containsString("not numeric, can not use numeric value accessors"))
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -31,6 +31,11 @@ public abstract class RangeFieldMapperTests extends MapperTestCase {
|
|||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsIgnoreMalformed() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public void testExistsQueryDocValuesDisabled() throws IOException {
|
||||
MapperService mapperService = createMapperService(fieldMapping(b -> {
|
||||
minimalMapping(b);
|
||||
|
|
|
@ -1102,6 +1102,11 @@ public class TextFieldMapperTests extends MapperTestCase {
|
|||
assumeFalse("We don't have a way to assert things here", true);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsIgnoreMalformed() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected SyntheticSourceSupport syntheticSourceSupport() {
|
||||
boolean storeTextField = randomBoolean();
|
||||
|
|
|
@ -70,6 +70,11 @@ public class FlattenedFieldMapperTests extends MapperTestCase {
|
|||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsIgnoreMalformed() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public void testDefaults() throws Exception {
|
||||
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
|
||||
ParsedDocument parsedDoc = mapper.parse(source(b -> b.startObject("field").field("key", "value").endObject()));
|
||||
|
|
|
@ -115,6 +115,11 @@ public class DenseVectorFieldMapperTests extends MapperTestCase {
|
|||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsIgnoreMalformed() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void assertSearchable(MappedFieldType fieldType) {
|
||||
assertThat(fieldType, instanceOf(DenseVectorFieldType.class));
|
||||
|
|
|
@ -38,6 +38,7 @@ import org.elasticsearch.index.fielddata.IndexFieldDataCache;
|
|||
import org.elasticsearch.index.fieldvisitor.LeafStoredFieldLoader;
|
||||
import org.elasticsearch.index.fieldvisitor.StoredFieldLoader;
|
||||
import org.elasticsearch.index.query.SearchExecutionContext;
|
||||
import org.elasticsearch.index.termvectors.TermVectorsService;
|
||||
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptContext;
|
||||
|
@ -137,6 +138,151 @@ public abstract class MapperTestCase extends MapperServiceTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Does this field mapper support {@code ignore_malformed}?
|
||||
*/
|
||||
protected abstract boolean supportsIgnoreMalformed();
|
||||
|
||||
/**
|
||||
* Build an {@link ExampleMalformedValue} that parses a string.
|
||||
*/
|
||||
protected final ExampleMalformedValue exampleMalformedValue(String value) {
|
||||
return exampleMalformedValue(b -> b.value(value));
|
||||
}
|
||||
|
||||
/**
|
||||
* Build an {@link ExampleMalformedValue} for arbitrary xcontent.
|
||||
*/
|
||||
protected final ExampleMalformedValue exampleMalformedValue(CheckedConsumer<XContentBuilder, IOException> value) {
|
||||
return new ExampleMalformedValue(this::minimalMapping, value, equalTo("unset"));
|
||||
}
|
||||
|
||||
/**
|
||||
* An example of a malformed value.
|
||||
*/
|
||||
public static class ExampleMalformedValue {
|
||||
private final CheckedConsumer<XContentBuilder, IOException> mapping;
|
||||
private final CheckedConsumer<XContentBuilder, IOException> value;
|
||||
private final Matcher<String> exceptionMessageMatcher;
|
||||
|
||||
private ExampleMalformedValue(
|
||||
CheckedConsumer<XContentBuilder, IOException> mapping,
|
||||
CheckedConsumer<XContentBuilder, IOException> value,
|
||||
Matcher<String> exceptionMessageMatcher
|
||||
) {
|
||||
this.mapping = mapping;
|
||||
this.value = value;
|
||||
this.exceptionMessageMatcher = exceptionMessageMatcher;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the mapping used for this value. If not called the default is
|
||||
* {@link MapperTestCase#minimalMapping}.
|
||||
*/
|
||||
public ExampleMalformedValue mapping(CheckedConsumer<XContentBuilder, IOException> newMapping) {
|
||||
return new ExampleMalformedValue(newMapping, value, exceptionMessageMatcher);
|
||||
}
|
||||
|
||||
/**
|
||||
* Match error messages that contain a string.
|
||||
*/
|
||||
public ExampleMalformedValue errorMatches(String contains) {
|
||||
return errorMatches(containsString(contains));
|
||||
}
|
||||
|
||||
/**
|
||||
* Match the error message in an arbitrary way.
|
||||
*/
|
||||
public ExampleMalformedValue errorMatches(Matcher<String> newMatcher) {
|
||||
return new ExampleMalformedValue(mapping, value, newMatcher);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Some example of malformed values and matches for exceptions that parsing them should create.
|
||||
*/
|
||||
protected List<ExampleMalformedValue> exampleMalformedValues() {
|
||||
assertFalse("mappers that support ignore_malformed values most override exampleMalformedValues", supportsIgnoreMalformed());
|
||||
return List.of();
|
||||
}
|
||||
|
||||
public final void testIgnoreMalformedFalseByDefault() throws IOException {
|
||||
for (ExampleMalformedValue example : exampleMalformedValues()) {
|
||||
assertIgnoreMalformedFalse(example.mapping, example.value, example.exceptionMessageMatcher);
|
||||
}
|
||||
}
|
||||
|
||||
public final void testIgnoreMalformedExplicitlyFalse() throws IOException {
|
||||
if (false == supportsIgnoreMalformed()) {
|
||||
Exception e = expectThrows(MapperParsingException.class, () -> createMapperService(fieldMapping(b -> {
|
||||
minimalMapping(b);
|
||||
b.field("ignore_malformed", false);
|
||||
})));
|
||||
assertThat(e.getMessage(), containsString("unknown parameter [ignore_malformed] on mapper [field]"));
|
||||
return;
|
||||
}
|
||||
for (ExampleMalformedValue example : exampleMalformedValues()) {
|
||||
assertIgnoreMalformedFalse(b -> {
|
||||
example.mapping.accept(b);
|
||||
b.field("ignore_malformed", false);
|
||||
}, example.value, example.exceptionMessageMatcher);
|
||||
}
|
||||
}
|
||||
|
||||
private void assertIgnoreMalformedFalse(
|
||||
CheckedConsumer<XContentBuilder, IOException> mapping,
|
||||
CheckedConsumer<XContentBuilder, IOException> value,
|
||||
Matcher<String> exceptionMessageMatcher
|
||||
) throws IOException {
|
||||
MapperService mapperService = createMapperService(fieldMapping(mapping));
|
||||
FieldMapper mapper = (FieldMapper) mapperService.documentMapper().mappers().getMapper("field");
|
||||
assertFalse(mapper.ignoreMalformed());
|
||||
SourceToParse source = source(b -> {
|
||||
b.field("field");
|
||||
value.accept(b);
|
||||
});
|
||||
MapperParsingException e = expectThrows(MapperParsingException.class, () -> mapperService.documentMapper().parse(source));
|
||||
assertThat(
|
||||
"incorrect exception while parsing " + source.source().utf8ToString(),
|
||||
e.getCause().getMessage(),
|
||||
exceptionMessageMatcher
|
||||
);
|
||||
}
|
||||
|
||||
public final void testIgnoreMalformedTrue() throws IOException {
|
||||
if (false == supportsIgnoreMalformed()) {
|
||||
Exception e = expectThrows(MapperParsingException.class, () -> createMapperService(fieldMapping(b -> {
|
||||
minimalMapping(b);
|
||||
b.field("ignore_malformed", true);
|
||||
})));
|
||||
assertThat(e.getMessage(), containsString("unknown parameter [ignore_malformed] on mapper [field]"));
|
||||
return;
|
||||
}
|
||||
for (ExampleMalformedValue example : exampleMalformedValues()) {
|
||||
XContentBuilder mapping = fieldMapping(b -> {
|
||||
example.mapping.accept(b);
|
||||
b.field("ignore_malformed", true);
|
||||
});
|
||||
MapperService mapperService = createMapperService(mapping);
|
||||
FieldMapper mapper = (FieldMapper) mapperService.documentMapper().mappers().getMapper("field");
|
||||
assertTrue(mapper.ignoreMalformed());
|
||||
ParsedDocument doc = mapperService.documentMapper().parse(source(b -> {
|
||||
b.field("field");
|
||||
example.value.accept(b);
|
||||
}));
|
||||
IndexableField[] fields = doc.rootDoc().getFields("field");
|
||||
assertThat(fields, equalTo(new IndexableField[0]));
|
||||
assertThat(TermVectorsService.getValues(doc.rootDoc().getFields("_ignored")), equalTo(ignoredFields()));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The field names that are saved in {@code _ignored} when ignoring a malformed value.
|
||||
*/
|
||||
protected String[] ignoredFields() {
|
||||
return new String[] { "field" };
|
||||
}
|
||||
|
||||
protected void assertExistsQuery(MapperService mapperService) throws IOException {
|
||||
LuceneDocument fields = mapperService.documentMapper().parse(source(this::writeField)).rootDoc();
|
||||
SearchExecutionContext searchExecutionContext = createSearchExecutionContext(mapperService);
|
||||
|
@ -456,6 +602,21 @@ public abstract class MapperTestCase extends MapperServiceTestCase {
|
|||
public void testUpdates() throws IOException {
|
||||
ParameterChecker checker = new ParameterChecker();
|
||||
registerParameters(checker);
|
||||
if (supportsIgnoreMalformed()) {
|
||||
checker.registerUpdateCheck(b -> b.field("ignore_malformed", true), m -> assertTrue(m.ignoreMalformed()));
|
||||
} else {
|
||||
MapperService mapperService = createMapperService(fieldMapping(this::minimalMapping));
|
||||
Exception e = expectThrows(
|
||||
MapperParsingException.class,
|
||||
"No conflict when setting parameter [ignore_malformed]",
|
||||
() -> merge(mapperService, fieldMapping(b -> {
|
||||
minimalMapping(b);
|
||||
b.field("ignore_malformed", true);
|
||||
}))
|
||||
);
|
||||
assertThat(e.getMessage(), containsString("unknown parameter [ignore_malformed] on mapper [field]"));
|
||||
}
|
||||
|
||||
for (UpdateCheck updateCheck : checker.updateChecks) {
|
||||
MapperService mapperService = createMapperService(updateCheck.init);
|
||||
merge(mapperService, updateCheck.update);
|
||||
|
@ -465,7 +626,6 @@ public abstract class MapperTestCase extends MapperServiceTestCase {
|
|||
merge(mapperService, updateCheck.update);
|
||||
mapper = (FieldMapper) mapperService.documentMapper().mappers().getMapper("field");
|
||||
updateCheck.check.accept(mapper);
|
||||
|
||||
}
|
||||
for (String param : checker.conflictChecks.keySet()) {
|
||||
MapperService mapperService = createMapperService(checker.conflictChecks.get(param).init);
|
||||
|
|
|
@ -120,7 +120,8 @@ public class HistogramFieldMapper extends FieldMapper {
|
|||
this.ignoreMalformedByDefault = builder.ignoreMalformed.getDefaultValue().value();
|
||||
}
|
||||
|
||||
boolean ignoreMalformed() {
|
||||
@Override
|
||||
public boolean ignoreMalformed() {
|
||||
return ignoreMalformed.value();
|
||||
}
|
||||
|
||||
|
|
|
@ -81,7 +81,7 @@ public class HistogramFieldMapperTests extends MapperTestCase {
|
|||
})));
|
||||
assertThat(
|
||||
e.getCause().getMessage(),
|
||||
containsString("doesn't not support indexing multiple values " + "for the same field in the same document")
|
||||
containsString("doesn't not support indexing multiple values for the same field in the same document")
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -108,10 +108,18 @@ public class HistogramFieldMapperTests extends MapperTestCase {
|
|||
assertThat(e.getCause().getMessage(), containsString("expected field called [counts]"));
|
||||
}
|
||||
|
||||
public void testIgnoreMalformed() throws Exception {
|
||||
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "histogram").field("ignore_malformed", true)));
|
||||
ParsedDocument doc = mapper.parse(source(b -> b.startObject("field").field("values", new double[] { 2, 2 }).endObject()));
|
||||
assertThat(doc.rootDoc().getField("pre_aggregated"), nullValue());
|
||||
@Override
|
||||
protected boolean supportsIgnoreMalformed() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<ExampleMalformedValue> exampleMalformedValues() {
|
||||
return List.of(
|
||||
exampleMalformedValue(b -> b.startObject().startArray("values").value(2).value(2).endArray().endObject()).errorMatches(
|
||||
"expected field called [counts]"
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
public void testIgnoreMalformedSkipsKeyword() throws Exception {
|
||||
|
|
|
@ -542,7 +542,8 @@ public class AggregateDoubleMetricFieldMapper extends FieldMapper {
|
|||
this.indexCreatedVersion = builder.indexCreatedVersion;
|
||||
}
|
||||
|
||||
boolean ignoreMalformed() {
|
||||
@Override
|
||||
public boolean ignoreMalformed() {
|
||||
return ignoreMalformed;
|
||||
}
|
||||
|
||||
|
|
|
@ -33,19 +33,16 @@ import java.util.LinkedHashMap;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Names.IGNORE_MALFORMED;
|
||||
import static org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Names.METRICS;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.matchesPattern;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
import static org.hamcrest.core.IsInstanceOf.instanceOf;
|
||||
|
||||
public class AggregateDoubleMetricFieldMapperTests extends MapperTestCase {
|
||||
|
||||
public static final String METRICS_FIELD = METRICS;
|
||||
public static final String IGNORE_MALFORMED_FIELD = IGNORE_MALFORMED;
|
||||
public static final String CONTENT_TYPE = AggregateDoubleMetricFieldMapper.CONTENT_TYPE;
|
||||
public static final String DEFAULT_METRIC = AggregateDoubleMetricFieldMapper.Names.DEFAULT_METRIC;
|
||||
|
||||
|
@ -61,11 +58,6 @@ public class AggregateDoubleMetricFieldMapperTests extends MapperTestCase {
|
|||
|
||||
@Override
|
||||
protected void registerParameters(ParameterChecker checker) throws IOException {
|
||||
checker.registerUpdateCheck(
|
||||
b -> b.field(IGNORE_MALFORMED_FIELD, true),
|
||||
m -> assertTrue(((AggregateDoubleMetricFieldMapper) m).ignoreMalformed())
|
||||
);
|
||||
|
||||
checker.registerConflictCheck(
|
||||
DEFAULT_METRIC,
|
||||
fieldMapping(this::minimalMapping),
|
||||
|
@ -156,21 +148,41 @@ public class AggregateDoubleMetricFieldMapperTests extends MapperTestCase {
|
|||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Test parsing an aggregate_metric field that contains no values
|
||||
* when ignore_malformed = true
|
||||
*/
|
||||
public void testParseEmptyValueIgnoreMalformed() throws Exception {
|
||||
DocumentMapper mapper = createDocumentMapper(
|
||||
fieldMapping(
|
||||
b -> b.field("type", CONTENT_TYPE)
|
||||
.field(METRICS_FIELD, new String[] { "min", "max", "value_count" })
|
||||
.field("ignore_malformed", true)
|
||||
.field(DEFAULT_METRIC, "max")
|
||||
)
|
||||
@Override
|
||||
protected boolean supportsIgnoreMalformed() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<ExampleMalformedValue> exampleMalformedValues() {
|
||||
return List.of(
|
||||
// no metrics
|
||||
exampleMalformedValue(b -> b.startObject().endObject()).errorMatches(
|
||||
"Aggregate metric field [field] must contain all metrics [min, max, value_count]"
|
||||
),
|
||||
// unmapped metric
|
||||
exampleMalformedValue(
|
||||
b -> b.startObject().field("min", -10.1).field("max", 50.0).field("value_count", 14).field("sum", 55).endObject()
|
||||
).errorMatches("Aggregate metric [sum] does not exist in the mapping of field [field]"),
|
||||
// missing metric
|
||||
exampleMalformedValue(b -> b.startObject().field("min", -10.1).field("max", 50.0).endObject()).errorMatches(
|
||||
"Aggregate metric field [field] must contain all metrics [min, max, value_count]"
|
||||
),
|
||||
// invalid metric value
|
||||
exampleMalformedValue(b -> b.startObject().field("min", "10.0").field("max", 50.0).field("value_count", 14).endObject())
|
||||
.errorMatches("Failed to parse object: expecting token of type [VALUE_NUMBER] but found [VALUE_STRING]"),
|
||||
// negative value count
|
||||
exampleMalformedValue(b -> b.startObject().field("min", 10.0).field("max", 50.0).field("value_count", -14).endObject())
|
||||
.errorMatches("Aggregate metric [value_count] of field [field] cannot be a negative number"),
|
||||
// value count with decimal digits (whole numbers formatted as doubles are permitted, but non-whole numbers are not)
|
||||
exampleMalformedValue(b -> b.startObject().field("min", 10.0).field("max", 50.0).field("value_count", 77.33).endObject())
|
||||
.errorMatches("failed to parse field [field.value_count] of type [integer]")
|
||||
);
|
||||
ParsedDocument doc = mapper.parse(source(b -> b.startObject("field").endObject()));
|
||||
assertThat(doc.rootDoc().getField("field"), nullValue());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String[] ignoredFields() {
|
||||
return new String[] { "field.value_count", "field.min", "field.max" };
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -186,145 +198,6 @@ public class AggregateDoubleMetricFieldMapperTests extends MapperTestCase {
|
|||
assertThat(e.getMessage(), containsString("Metric [unsupported] is not supported."));
|
||||
}
|
||||
|
||||
/**
|
||||
* Test inserting a document containing a metric that has not been defined in the field mapping.
|
||||
*/
|
||||
public void testUnmappedMetric() throws Exception {
|
||||
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
|
||||
Exception e = expectThrows(
|
||||
MapperParsingException.class,
|
||||
() -> mapper.parse(
|
||||
source(
|
||||
b -> b.startObject("field").field("min", -10.1).field("max", 50.0).field("value_count", 14).field("sum", 55).endObject()
|
||||
)
|
||||
)
|
||||
);
|
||||
assertThat(e.getCause().getMessage(), containsString("Aggregate metric [sum] does not exist in the mapping of field [field]"));
|
||||
}
|
||||
|
||||
/**
|
||||
* Test inserting a document containing a metric that has not been defined in the field mapping.
|
||||
* Field will be ignored because config ignore_malformed has been set.
|
||||
*/
|
||||
public void testUnmappedMetricWithIgnoreMalformed() throws Exception {
|
||||
DocumentMapper mapper = createDocumentMapper(
|
||||
fieldMapping(
|
||||
b -> b.field("type", CONTENT_TYPE)
|
||||
.field(METRICS_FIELD, new String[] { "min", "max" })
|
||||
.field("ignore_malformed", true)
|
||||
.field(DEFAULT_METRIC, "max")
|
||||
)
|
||||
);
|
||||
|
||||
ParsedDocument doc = mapper.parse(
|
||||
source(b -> b.startObject("field").field("min", -10.1).field("max", 50.0).field("sum", 55).endObject())
|
||||
);
|
||||
assertNull(doc.rootDoc().getField("metric.min"));
|
||||
}
|
||||
|
||||
/**
|
||||
* Test inserting a document containing less metrics than those defined in the field mapping.
|
||||
* An exception will be thrown
|
||||
*/
|
||||
public void testMissingMetric() throws Exception {
|
||||
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
|
||||
|
||||
Exception e = expectThrows(
|
||||
MapperParsingException.class,
|
||||
() -> mapper.parse(source(b -> b.startObject("field").field("min", -10.1).field("max", 50.0).endObject()))
|
||||
);
|
||||
assertThat(
|
||||
e.getCause().getMessage(),
|
||||
containsString("Aggregate metric field [field] must contain all metrics [min, max, value_count]")
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Test inserting a document containing less metrics than those defined in the field mapping.
|
||||
* Field will be ignored because config ignore_malformed has been set.
|
||||
*/
|
||||
public void testMissingMetricWithIgnoreMalformed() throws Exception {
|
||||
DocumentMapper mapper = createDocumentMapper(
|
||||
fieldMapping(
|
||||
b -> b.field("type", CONTENT_TYPE)
|
||||
.field(METRICS_FIELD, new String[] { "min", "max" })
|
||||
.field("ignore_malformed", true)
|
||||
.field(DEFAULT_METRIC, "max")
|
||||
)
|
||||
);
|
||||
|
||||
ParsedDocument doc = mapper.parse(source(b -> b.startObject("field").field("min", -10.1).field("max", 50.0).endObject()));
|
||||
|
||||
assertNull(doc.rootDoc().getField("metric.min"));
|
||||
}
|
||||
|
||||
/**
|
||||
* Test a metric that has an invalid value (string instead of number)
|
||||
*/
|
||||
public void testInvalidMetricValue() throws Exception {
|
||||
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
|
||||
Exception e = expectThrows(
|
||||
MapperParsingException.class,
|
||||
() -> mapper.parse(
|
||||
source(b -> b.startObject("field").field("min", "10.0").field("max", 50.0).field("value_count", 14).endObject())
|
||||
)
|
||||
);
|
||||
|
||||
assertThat(
|
||||
e.getCause().getMessage(),
|
||||
containsString("Failed to parse object: expecting token of type [VALUE_NUMBER] but found [VALUE_STRING]")
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Test a metric that has an invalid value (string instead of number)
|
||||
* with ignore_malformed = true
|
||||
*/
|
||||
public void testInvalidMetricValueIgnoreMalformed() throws Exception {
|
||||
DocumentMapper mapper = createDocumentMapper(
|
||||
fieldMapping(
|
||||
b -> b.field("type", CONTENT_TYPE)
|
||||
.field(METRICS_FIELD, new String[] { "min", "max" })
|
||||
.field("ignore_malformed", true)
|
||||
.field(DEFAULT_METRIC, "max")
|
||||
)
|
||||
);
|
||||
ParsedDocument doc = mapper.parse(source(b -> b.startObject("field").field("min", "10.0").field("max", 50.0).endObject()));
|
||||
assertThat(doc.rootDoc().getField("metric"), nullValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* Test a field that has a negative value for value_count
|
||||
*/
|
||||
public void testNegativeValueCount() throws Exception {
|
||||
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
|
||||
Exception e = expectThrows(
|
||||
MapperParsingException.class,
|
||||
() -> mapper.parse(
|
||||
source(b -> b.startObject("field").field("min", 10.0).field("max", 50.0).field("value_count", -14).endObject())
|
||||
)
|
||||
);
|
||||
assertThat(
|
||||
e.getCause().getMessage(),
|
||||
containsString("Aggregate metric [value_count] of field [field] cannot be a negative number")
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Test a field that has a negative value for value_count with ignore_malformed = true
|
||||
* No exception will be thrown but the field will be ignored
|
||||
*/
|
||||
public void testNegativeValueCountIgnoreMalformed() throws Exception {
|
||||
DocumentMapper mapper = createDocumentMapper(
|
||||
fieldMapping(
|
||||
b -> b.field("type", CONTENT_TYPE).field(METRICS_FIELD, new String[] { "value_count" }).field("ignore_malformed", true)
|
||||
)
|
||||
);
|
||||
|
||||
ParsedDocument doc = mapper.parse(source(b -> b.startObject("field").field("value_count", -14).endObject()));
|
||||
assertThat(doc.rootDoc().getField("field.value_count"), nullValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* Test parsing a value_count metric written as double with zero decimal digits
|
||||
*/
|
||||
|
@ -336,23 +209,6 @@ public class AggregateDoubleMetricFieldMapperTests extends MapperTestCase {
|
|||
assertEquals(77, doc.rootDoc().getField("field.value_count").numericValue().longValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* Test parsing a value_count metric written as double with some decimal digits
|
||||
*/
|
||||
public void testInvalidDoubleValueCount() throws Exception {
|
||||
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
|
||||
Exception e = expectThrows(
|
||||
MapperParsingException.class,
|
||||
() -> mapper.parse(
|
||||
source(b -> b.startObject("field").field("min", 10.0).field("max", 50.0).field("value_count", 77.33).endObject())
|
||||
)
|
||||
);
|
||||
assertThat(
|
||||
e.getCause().getMessage(),
|
||||
containsString("failed to parse field [field.value_count] of type [integer] in document with id '1'.")
|
||||
);
|
||||
}
|
||||
|
||||
private void randomMapping(XContentBuilder b, int randomNumber) throws IOException {
|
||||
b.field("type", CONTENT_TYPE);
|
||||
switch (randomNumber) {
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import org.elasticsearch.gradle.internal.info.BuildParams
|
||||
|
||||
apply plugin: 'elasticsearch.internal-es-plugin'
|
||||
apply plugin: 'elasticsearch.internal-cluster-test'
|
||||
apply plugin: 'elasticsearch.internal-yaml-rest-test'
|
||||
|
||||
esplugin {
|
||||
|
@ -15,15 +14,10 @@ archivesBaseName = 'x-pack-constant-keyword'
|
|||
dependencies {
|
||||
compileOnly project(':modules:lang-painless:spi')
|
||||
compileOnly project(path: xpackModule('core'))
|
||||
internalClusterTestImplementation(testArtifact(project(xpackModule('core'))))
|
||||
}
|
||||
|
||||
tasks.named('internalClusterTestTestingConventions').configure {
|
||||
baseClass 'org.elasticsearch.index.mapper.MapperTestCase'
|
||||
}
|
||||
|
||||
if (BuildParams.isSnapshotBuild() == false) {
|
||||
tasks.named("internalClusterTest").configure {
|
||||
tasks.named("test").configure {
|
||||
systemProperty 'es.index_mode_feature_flag_registered', 'true'
|
||||
}
|
||||
}
|
||||
|
|
|
@ -67,6 +67,11 @@ public class ConstantKeywordFieldMapperTests extends MapperTestCase {
|
|||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsIgnoreMalformed() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public void testDefaults() throws Exception {
|
||||
XContentBuilder mapping = fieldMapping(b -> b.field("type", "constant_keyword").field("value", "foo"));
|
||||
DocumentMapper mapper = createDocumentMapper(mapping);
|
|
@ -529,7 +529,8 @@ public class UnsignedLongFieldMapper extends FieldMapper {
|
|||
this.metricType = builder.metric.getValue();
|
||||
}
|
||||
|
||||
boolean ignoreMalformed() {
|
||||
@Override
|
||||
public boolean ignoreMalformed() {
|
||||
return ignoreMalformed.value();
|
||||
}
|
||||
|
||||
|
|
|
@ -16,7 +16,6 @@ import org.elasticsearch.index.mapper.MapperParsingException;
|
|||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.MapperTestCase;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.index.termvectors.TermVectorsService;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.xcontent.XContentBuilder;
|
||||
import org.junit.AssumptionViolatedException;
|
||||
|
@ -57,10 +56,6 @@ public class UnsignedLongFieldMapperTests extends MapperTestCase {
|
|||
checker.registerConflictCheck("index", b -> b.field("index", false));
|
||||
checker.registerConflictCheck("store", b -> b.field("store", true));
|
||||
checker.registerConflictCheck("null_value", b -> b.field("null_value", 1));
|
||||
checker.registerUpdateCheck(
|
||||
b -> b.field("ignore_malformed", true),
|
||||
m -> assertTrue(((UnsignedLongFieldMapper) m).ignoreMalformed())
|
||||
);
|
||||
}
|
||||
|
||||
public void testDefaults() throws Exception {
|
||||
|
@ -174,38 +169,17 @@ public class UnsignedLongFieldMapperTests extends MapperTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testIgnoreMalformed() throws Exception {
|
||||
// test ignore_malformed is false by default
|
||||
{
|
||||
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
|
||||
Object malformedValue1 = "a";
|
||||
ThrowingRunnable runnable = () -> mapper.parse(source(b -> b.field("field", malformedValue1)));
|
||||
MapperParsingException e = expectThrows(MapperParsingException.class, runnable);
|
||||
assertThat(e.getCause().getMessage(), containsString("For input string: \"a\""));
|
||||
@Override
|
||||
protected boolean supportsIgnoreMalformed() {
|
||||
return true;
|
||||
}
|
||||
|
||||
Object malformedValue2 = Boolean.FALSE;
|
||||
runnable = () -> mapper.parse(source(b -> b.field("field", malformedValue2)));
|
||||
e = expectThrows(MapperParsingException.class, runnable);
|
||||
assertThat(e.getCause().getMessage(), containsString("For input string: \"false\""));
|
||||
}
|
||||
|
||||
// test ignore_malformed when set to true ignored malformed documents
|
||||
{
|
||||
DocumentMapper mapper = createDocumentMapper(
|
||||
fieldMapping(b -> b.field("type", "unsigned_long").field("ignore_malformed", true))
|
||||
);
|
||||
Object malformedValue1 = "a";
|
||||
ParsedDocument doc = mapper.parse(source(b -> b.field("field", malformedValue1)));
|
||||
IndexableField[] fields = doc.rootDoc().getFields("field");
|
||||
assertEquals(0, fields.length);
|
||||
assertArrayEquals(new String[] { "field" }, TermVectorsService.getValues(doc.rootDoc().getFields("_ignored")));
|
||||
|
||||
Object malformedValue2 = Boolean.FALSE;
|
||||
ParsedDocument doc2 = mapper.parse(source(b -> b.field("field", malformedValue2)));
|
||||
IndexableField[] fields2 = doc2.rootDoc().getFields("field");
|
||||
assertEquals(0, fields2.length);
|
||||
assertArrayEquals(new String[] { "field" }, TermVectorsService.getValues(doc2.rootDoc().getFields("_ignored")));
|
||||
}
|
||||
@Override
|
||||
protected List<ExampleMalformedValue> exampleMalformedValues() {
|
||||
return List.of(
|
||||
exampleMalformedValue("a").errorMatches("For input string: \"a\""),
|
||||
exampleMalformedValue(b -> b.value(false)).errorMatches("For input string: \"false\"")
|
||||
);
|
||||
}
|
||||
|
||||
public void testDecimalParts() throws IOException {
|
||||
|
|
|
@ -62,6 +62,11 @@ public class VersionStringFieldMapperTests extends MapperTestCase {
|
|||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsIgnoreMalformed() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public void testDefaults() throws Exception {
|
||||
XContentBuilder mapping = fieldMapping(this::minimalMapping);
|
||||
DocumentMapper mapper = createDocumentMapper(mapping);
|
||||
|
|
|
@ -21,10 +21,8 @@ import java.io.IOException;
|
|||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
|
||||
import static org.elasticsearch.geometry.utils.Geohash.stringEncode;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
/** Base class for testing cartesian field mappers */
|
||||
public abstract class CartesianFieldMapperTests extends MapperTestCase {
|
||||
|
@ -90,92 +88,9 @@ public abstract class CartesianFieldMapperTests extends MapperTestCase {
|
|||
assertXYPointField(doc.rootDoc().getField(FIELD_NAME), 2000.1f, 305.6f);
|
||||
}
|
||||
|
||||
public void testInvalidPointValuesIgnored() throws IOException {
|
||||
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> {
|
||||
b.field("type", getFieldName());
|
||||
b.field("ignore_malformed", true);
|
||||
}));
|
||||
|
||||
assertThat(mapper.parse(source(b -> b.field(FIELD_NAME, "1234.333"))).rootDoc().getField(FIELD_NAME), nullValue());
|
||||
|
||||
assertThat(
|
||||
mapper.parse(source(b -> b.startObject(FIELD_NAME).field("x", 1.3).field("y", "-").endObject())).rootDoc().getField(FIELD_NAME),
|
||||
nullValue()
|
||||
);
|
||||
|
||||
assertThat(
|
||||
mapper.parse(source(b -> b.startObject(FIELD_NAME).field("geohash", stringEncode(0, 0)).endObject()))
|
||||
.rootDoc()
|
||||
.getField(FIELD_NAME),
|
||||
nullValue()
|
||||
);
|
||||
|
||||
assertThat(
|
||||
mapper.parse(source(b -> b.startObject(FIELD_NAME).field("x", "-").field("y", 1.3).endObject())).rootDoc().getField(FIELD_NAME),
|
||||
nullValue()
|
||||
);
|
||||
|
||||
assertThat(mapper.parse(source(b -> b.field(FIELD_NAME, "-,1.3"))).rootDoc().getField(FIELD_NAME), nullValue());
|
||||
|
||||
assertThat(mapper.parse(source(b -> b.field(FIELD_NAME, "1.3,-"))).rootDoc().getField(FIELD_NAME), nullValue());
|
||||
|
||||
assertThat(
|
||||
mapper.parse(source(b -> b.startObject(FIELD_NAME).field("lon", 1.3).field("y", 1.3).endObject()))
|
||||
.rootDoc()
|
||||
.getField(FIELD_NAME),
|
||||
nullValue()
|
||||
);
|
||||
|
||||
assertThat(
|
||||
mapper.parse(source(b -> b.startObject(FIELD_NAME).field("x", 1.3).field("lat", 1.3).endObject()))
|
||||
.rootDoc()
|
||||
.getField(FIELD_NAME),
|
||||
nullValue()
|
||||
);
|
||||
|
||||
assertThat(
|
||||
mapper.parse(source(b -> b.startObject(FIELD_NAME).field("x", "NaN").field("y", "NaN").endObject()))
|
||||
.rootDoc()
|
||||
.getField(FIELD_NAME),
|
||||
nullValue()
|
||||
);
|
||||
|
||||
assertThat(
|
||||
mapper.parse(source(b -> b.startObject(FIELD_NAME).field("x", "NaN").field("y", 1.3).endObject()))
|
||||
.rootDoc()
|
||||
.getField(FIELD_NAME),
|
||||
nullValue()
|
||||
);
|
||||
|
||||
assertThat(
|
||||
mapper.parse(source(b -> b.startObject(FIELD_NAME).field("x", 1.3).field("y", "NaN").endObject()))
|
||||
.rootDoc()
|
||||
.getField(FIELD_NAME),
|
||||
nullValue()
|
||||
);
|
||||
|
||||
assertThat(
|
||||
mapper.parse(source(b -> b.startObject(FIELD_NAME).field("x", 1.3).field("y", "NaN").endObject()))
|
||||
.rootDoc()
|
||||
.getField(FIELD_NAME),
|
||||
nullValue()
|
||||
);
|
||||
|
||||
assertThat(mapper.parse(source(b -> b.field(FIELD_NAME, "NaN,NaN"))).rootDoc().getField(FIELD_NAME), nullValue());
|
||||
|
||||
assertThat(mapper.parse(source(b -> b.field(FIELD_NAME, "10,NaN"))).rootDoc().getField(FIELD_NAME), nullValue());
|
||||
|
||||
assertThat(mapper.parse(source(b -> b.field(FIELD_NAME, "NaN,12"))).rootDoc().getField(FIELD_NAME), nullValue());
|
||||
|
||||
assertThat(
|
||||
mapper.parse(source(b -> b.startObject(FIELD_NAME).field("x", 1.3).nullField("y").endObject())).rootDoc().getField(FIELD_NAME),
|
||||
nullValue()
|
||||
);
|
||||
|
||||
assertThat(
|
||||
mapper.parse(source(b -> b.startObject(FIELD_NAME).nullField("x").field("y", 1.3).endObject())).rootDoc().getField(FIELD_NAME),
|
||||
nullValue()
|
||||
);
|
||||
@Override
|
||||
protected boolean supportsIgnoreMalformed() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public void testZValueWKT() throws IOException {
|
||||
|
|
|
@ -9,7 +9,6 @@ package org.elasticsearch.xpack.spatial.index.mapper;
|
|||
import org.apache.lucene.index.IndexableField;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.geo.Orientation;
|
||||
import org.elasticsearch.index.mapper.AbstractGeometryFieldMapper;
|
||||
import org.elasticsearch.index.mapper.AbstractShapeGeometryFieldMapper;
|
||||
|
@ -23,12 +22,11 @@ import org.elasticsearch.index.mapper.ParsedDocument;
|
|||
import org.elasticsearch.index.mapper.SourceToParse;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
import org.elasticsearch.xcontent.ToXContent;
|
||||
import org.elasticsearch.xcontent.XContentFactory;
|
||||
import org.elasticsearch.xcontent.XContentType;
|
||||
import org.junit.AssumptionViolatedException;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
@ -60,10 +58,6 @@ public class GeoShapeWithDocValuesFieldMapperTests extends GeoFieldMapperTests {
|
|||
AbstractShapeGeometryFieldMapper<?> gsfm = (AbstractShapeGeometryFieldMapper<?>) m;
|
||||
assertEquals(Orientation.RIGHT, gsfm.orientation());
|
||||
});
|
||||
checker.registerUpdateCheck(b -> b.field("ignore_malformed", true), m -> {
|
||||
AbstractShapeGeometryFieldMapper<?> gpfm = (AbstractShapeGeometryFieldMapper<?>) m;
|
||||
assertTrue(gpfm.ignoreMalformed());
|
||||
});
|
||||
checker.registerUpdateCheck(b -> b.field("ignore_z_value", false), m -> {
|
||||
AbstractShapeGeometryFieldMapper<?> gpfm = (AbstractShapeGeometryFieldMapper<?>) m;
|
||||
assertFalse(gpfm.ignoreZValue());
|
||||
|
@ -189,71 +183,20 @@ public class GeoShapeWithDocValuesFieldMapperTests extends GeoFieldMapperTests {
|
|||
assertThat(ignoreZValue, equalTo(false));
|
||||
}
|
||||
|
||||
/**
|
||||
* Test that ignore_malformed parameter correctly parses
|
||||
*/
|
||||
public void testIgnoreMalformedParsing() throws IOException {
|
||||
|
||||
DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(b -> {
|
||||
b.field("type", getFieldName());
|
||||
b.field("ignore_malformed", true);
|
||||
}));
|
||||
Mapper fieldMapper = defaultMapper.mappers().getMapper(FIELD_NAME);
|
||||
assertThat(fieldMapper, instanceOf(fieldMapperClass()));
|
||||
|
||||
boolean ignoreMalformed = ((AbstractGeometryFieldMapper<?>) fieldMapper).ignoreMalformed();
|
||||
assertThat(ignoreMalformed, equalTo(true));
|
||||
|
||||
// explicit false ignore_malformed test
|
||||
defaultMapper = createDocumentMapper(fieldMapping(b -> {
|
||||
b.field("type", getFieldName());
|
||||
b.field("ignore_malformed", false);
|
||||
}));
|
||||
fieldMapper = defaultMapper.mappers().getMapper(FIELD_NAME);
|
||||
assertThat(fieldMapper, instanceOf(fieldMapperClass()));
|
||||
|
||||
ignoreMalformed = ((AbstractGeometryFieldMapper<?>) fieldMapper).ignoreMalformed();
|
||||
assertThat(ignoreMalformed, equalTo(false));
|
||||
@Override
|
||||
protected boolean supportsIgnoreMalformed() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public void testIgnoreMalformedValues() throws IOException {
|
||||
|
||||
DocumentMapper ignoreMapper = createDocumentMapper(fieldMapping(b -> {
|
||||
b.field("type", getFieldName());
|
||||
b.field("ignore_malformed", true);
|
||||
}));
|
||||
DocumentMapper failMapper = createDocumentMapper(fieldMapping(b -> {
|
||||
b.field("type", getFieldName());
|
||||
b.field("ignore_malformed", false);
|
||||
}));
|
||||
|
||||
{
|
||||
BytesReference arrayedDoc = BytesReference.bytes(
|
||||
XContentFactory.jsonBuilder().startObject().field(FIELD_NAME, "Bad shape").endObject()
|
||||
);
|
||||
SourceToParse sourceToParse = new SourceToParse("1", arrayedDoc, XContentType.JSON);
|
||||
ParsedDocument document = ignoreMapper.parse(sourceToParse);
|
||||
assertThat(document.docs().get(0).getFields(FIELD_NAME).length, equalTo(0));
|
||||
MapperParsingException exception = expectThrows(MapperParsingException.class, () -> failMapper.parse(sourceToParse));
|
||||
assertThat(exception.getCause().getMessage(), containsString("Unknown geometry type: bad"));
|
||||
}
|
||||
{
|
||||
BytesReference arrayedDoc = BytesReference.bytes(
|
||||
XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field(
|
||||
FIELD_NAME,
|
||||
"POLYGON ((18.9401790919516 -33.9681188869036, 18.9401790919516 -33.9681188869036, 18.9401790919517 "
|
||||
+ "-33.9681188869036, 18.9401790919517 -33.9681188869036, 18.9401790919516 -33.9681188869036))"
|
||||
)
|
||||
.endObject()
|
||||
);
|
||||
SourceToParse sourceToParse = new SourceToParse("1", arrayedDoc, XContentType.JSON);
|
||||
ParsedDocument document = ignoreMapper.parse(sourceToParse);
|
||||
assertThat(document.docs().get(0).getFields(FIELD_NAME).length, equalTo(0));
|
||||
MapperParsingException exception = expectThrows(MapperParsingException.class, () -> failMapper.parse(sourceToParse));
|
||||
assertThat(exception.getCause().getMessage(), containsString("at least three non-collinear points required"));
|
||||
}
|
||||
@Override
|
||||
protected List<ExampleMalformedValue> exampleMalformedValues() {
|
||||
return List.of(
|
||||
exampleMalformedValue("Bad shape").errorMatches("Unknown geometry type: bad"),
|
||||
exampleMalformedValue(
|
||||
"POLYGON ((18.9401790919516 -33.9681188869036, 18.9401790919516 -33.9681188869036, 18.9401790919517 "
|
||||
+ "-33.9681188869036, 18.9401790919517 -33.9681188869036, 18.9401790919516 -33.9681188869036))"
|
||||
).errorMatches("at least three non-collinear points required")
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -22,6 +22,7 @@ import org.elasticsearch.xpack.spatial.common.CartesianPoint;
|
|||
import org.junit.AssumptionViolatedException;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
import static org.elasticsearch.geometry.utils.Geohash.stringEncode;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
@ -55,10 +56,6 @@ public class PointFieldMapperTests extends CartesianFieldMapperTests {
|
|||
protected void registerParameters(ParameterChecker checker) throws IOException {
|
||||
checker.registerConflictCheck("doc_values", b -> b.field("doc_values", false));
|
||||
checker.registerConflictCheck("index", b -> b.field("index", false));
|
||||
checker.registerUpdateCheck(b -> b.field("ignore_malformed", true), m -> {
|
||||
PointFieldMapper gpfm = (PointFieldMapper) m;
|
||||
assertTrue(gpfm.ignoreMalformed());
|
||||
});
|
||||
checker.registerUpdateCheck(b -> b.field("ignore_z_value", false), m -> {
|
||||
PointFieldMapper gpfm = (PointFieldMapper) m;
|
||||
assertFalse(gpfm.ignoreZValue());
|
||||
|
@ -362,6 +359,46 @@ public class PointFieldMapperTests extends CartesianFieldMapperTests {
|
|||
assertWarnings("Adding multifields to [point] mappers has no effect and will be forbidden in future");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<ExampleMalformedValue> exampleMalformedValues() {
|
||||
return List.of(
|
||||
exampleMalformedValue("1234.333").errorMatches("expected 2 or 3 coordinates but found: [1]"),
|
||||
exampleMalformedValue(b -> b.startObject().field("x", 1.3).field("y", "-").endObject()).errorMatches(
|
||||
"[y] must be a valid double value"
|
||||
),
|
||||
exampleMalformedValue(b -> b.startObject().field("x", "-").field("y", 1.3).endObject()).errorMatches(
|
||||
"[x] must be a valid double value"
|
||||
),
|
||||
exampleMalformedValue(b -> b.startObject().field("geohash", stringEncode(0, 0)).endObject()).errorMatches(
|
||||
"field [geohash] not supported - must be one of: x, y, z, type, coordinates"
|
||||
),
|
||||
exampleMalformedValue("-,1.3").errorMatches("[x] must be a number"),
|
||||
exampleMalformedValue("1.3,-").errorMatches("[y] must be a number"),
|
||||
exampleMalformedValue(b -> b.startObject().field("lon", 1.3).field("y", 1.3).endObject()).errorMatches(
|
||||
"field [lon] not supported - must be one of: x, y, z, type, coordinates"
|
||||
),
|
||||
exampleMalformedValue(b -> b.startObject().field("x", 1.3).field("lat", 1.3).endObject()).errorMatches(
|
||||
"field [lat] not supported - must be one of: x, y, z, type, coordinates"
|
||||
),
|
||||
exampleMalformedValue(b -> b.startObject().field("x", "NaN").field("y", "NaN").endObject()).errorMatches(
|
||||
"field must be either lat/lon or type/coordinates"
|
||||
),
|
||||
exampleMalformedValue(b -> b.startObject().field("x", "NaN").field("y", 1.3).endObject()).errorMatches("Required [x]"),
|
||||
exampleMalformedValue(b -> b.startObject().field("x", 1.3).field("y", "NaN").endObject()).errorMatches("Required [y]"),
|
||||
exampleMalformedValue("NaN,NaN").errorMatches(
|
||||
"invalid [x] value [NaN]; must be between -3.4028234663852886E38 and 3.4028234663852886E38"
|
||||
),
|
||||
exampleMalformedValue("10,NaN").errorMatches(
|
||||
"invalid [y] value [NaN]; must be between -3.4028234663852886E38 and 3.4028234663852886E38"
|
||||
),
|
||||
exampleMalformedValue("NaN,12").errorMatches(
|
||||
"invalid [x] value [NaN]; must be between -3.4028234663852886E38 and 3.4028234663852886E38"
|
||||
),
|
||||
exampleMalformedValue(b -> b.startObject().field("x", 1.3).nullField("y").endObject()).errorMatches("y must be a number"),
|
||||
exampleMalformedValue(b -> b.startObject().nullField("x").field("y", 1.3).endObject()).errorMatches("x must be a number")
|
||||
);
|
||||
}
|
||||
|
||||
public void testGeoJSONInvalidType() throws IOException {
|
||||
double[] coords = new double[] { 0.0, 0.0 };
|
||||
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
|
||||
|
|
|
@ -10,7 +10,6 @@ import org.apache.lucene.document.ShapeField;
|
|||
import org.apache.lucene.index.IndexableField;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.geo.Orientation;
|
||||
import org.elasticsearch.index.mapper.AbstractGeometryFieldMapper;
|
||||
import org.elasticsearch.index.mapper.AbstractShapeGeometryFieldMapper;
|
||||
|
@ -24,13 +23,13 @@ import org.elasticsearch.index.mapper.ParsedDocument;
|
|||
import org.elasticsearch.index.mapper.SourceToParse;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
import org.elasticsearch.xcontent.ToXContent;
|
||||
import org.elasticsearch.xcontent.XContentFactory;
|
||||
import org.elasticsearch.xcontent.XContentType;
|
||||
import org.junit.AssumptionViolatedException;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import static org.elasticsearch.geometry.utils.Geohash.stringEncode;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
|
@ -77,10 +76,6 @@ public class ShapeFieldMapperTests extends CartesianFieldMapperTests {
|
|||
AbstractShapeGeometryFieldMapper<?> gsfm = (AbstractShapeGeometryFieldMapper<?>) m;
|
||||
assertEquals(Orientation.RIGHT, gsfm.orientation());
|
||||
});
|
||||
checker.registerUpdateCheck(b -> b.field("ignore_malformed", true), m -> {
|
||||
AbstractShapeGeometryFieldMapper<?> gpfm = (AbstractShapeGeometryFieldMapper<?>) m;
|
||||
assertTrue(gpfm.ignoreMalformed());
|
||||
});
|
||||
checker.registerUpdateCheck(b -> b.field("ignore_z_value", false), m -> {
|
||||
AbstractShapeGeometryFieldMapper<?> gpfm = (AbstractShapeGeometryFieldMapper<?>) m;
|
||||
assertFalse(gpfm.ignoreZValue());
|
||||
|
@ -207,71 +202,31 @@ public class ShapeFieldMapperTests extends CartesianFieldMapperTests {
|
|||
assertThat(ignoreZValue, equalTo(false));
|
||||
}
|
||||
|
||||
/**
|
||||
* Test that ignore_malformed parameter correctly parses
|
||||
*/
|
||||
public void testIgnoreMalformedParsing() throws IOException {
|
||||
|
||||
DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(b -> {
|
||||
b.field("type", getFieldName());
|
||||
b.field("ignore_malformed", true);
|
||||
}));
|
||||
Mapper fieldMapper = defaultMapper.mappers().getMapper(FIELD_NAME);
|
||||
assertThat(fieldMapper, instanceOf(fieldMapperClass()));
|
||||
|
||||
boolean ignoreMalformed = ((AbstractGeometryFieldMapper<?>) fieldMapper).ignoreMalformed();
|
||||
assertThat(ignoreMalformed, equalTo(true));
|
||||
|
||||
// explicit false ignore_malformed test
|
||||
defaultMapper = createDocumentMapper(fieldMapping(b -> {
|
||||
b.field("type", getFieldName());
|
||||
b.field("ignore_malformed", false);
|
||||
}));
|
||||
fieldMapper = defaultMapper.mappers().getMapper(FIELD_NAME);
|
||||
assertThat(fieldMapper, instanceOf(fieldMapperClass()));
|
||||
|
||||
ignoreMalformed = ((AbstractGeometryFieldMapper<?>) fieldMapper).ignoreMalformed();
|
||||
assertThat(ignoreMalformed, equalTo(false));
|
||||
}
|
||||
|
||||
public void testIgnoreMalformedValues() throws IOException {
|
||||
|
||||
DocumentMapper ignoreMapper = createDocumentMapper(fieldMapping(b -> {
|
||||
b.field("type", getFieldName());
|
||||
b.field("ignore_malformed", true);
|
||||
}));
|
||||
DocumentMapper failMapper = createDocumentMapper(fieldMapping(b -> {
|
||||
b.field("type", getFieldName());
|
||||
b.field("ignore_malformed", false);
|
||||
}));
|
||||
|
||||
{
|
||||
BytesReference arrayedDoc = BytesReference.bytes(
|
||||
XContentFactory.jsonBuilder().startObject().field(FIELD_NAME, "Bad shape").endObject()
|
||||
);
|
||||
SourceToParse sourceToParse = new SourceToParse("1", arrayedDoc, XContentType.JSON);
|
||||
ParsedDocument document = ignoreMapper.parse(sourceToParse);
|
||||
assertThat(document.docs().get(0).getFields(FIELD_NAME).length, equalTo(0));
|
||||
MapperParsingException exception = expectThrows(MapperParsingException.class, () -> failMapper.parse(sourceToParse));
|
||||
assertThat(exception.getCause().getMessage(), containsString("Unknown geometry type: bad"));
|
||||
}
|
||||
{
|
||||
BytesReference arrayedDoc = BytesReference.bytes(
|
||||
XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field(
|
||||
FIELD_NAME,
|
||||
"POLYGON ((18.9401790919516 -33.9681188869036, 18.9401790919516 -33.9681188869036, 18.9401790919517 "
|
||||
+ "-33.9681188869036, 18.9401790919517 -33.9681188869036, 18.9401790919516 -33.9681188869036))"
|
||||
)
|
||||
.endObject()
|
||||
);
|
||||
SourceToParse sourceToParse = new SourceToParse("1", arrayedDoc, XContentType.JSON);
|
||||
ParsedDocument document = ignoreMapper.parse(sourceToParse);
|
||||
assertThat(document.docs().get(0).getFields(FIELD_NAME).length, equalTo(0));
|
||||
MapperParsingException exception = expectThrows(MapperParsingException.class, () -> failMapper.parse(sourceToParse));
|
||||
assertThat(exception.getCause().getMessage(), containsString("at least three non-collinear points required"));
|
||||
}
|
||||
@Override
|
||||
protected List<ExampleMalformedValue> exampleMalformedValues() {
|
||||
return List.of(
|
||||
exampleMalformedValue("1234.333").errorMatches("Unknown geometry type: 1234.333"),
|
||||
exampleMalformedValue(b -> b.startObject().field("x", 1.3).field("y", "-").endObject()).errorMatches("Required [type]"),
|
||||
exampleMalformedValue(b -> b.startObject().field("x", "-").field("y", 1.3).endObject()).errorMatches("Required [type]"),
|
||||
exampleMalformedValue(b -> b.startObject().field("geohash", stringEncode(0, 0)).endObject()).errorMatches("Required [type]"),
|
||||
exampleMalformedValue("-,1.3").errorMatches("Unknown geometry type: -"),
|
||||
exampleMalformedValue("1.3,-").errorMatches("Unknown geometry type: 1.3"),
|
||||
exampleMalformedValue(b -> b.startObject().field("lon", 1.3).field("y", 1.3).endObject()).errorMatches("Required [type]"),
|
||||
exampleMalformedValue(b -> b.startObject().field("x", 1.3).field("lat", 1.3).endObject()).errorMatches("Required [type]"),
|
||||
exampleMalformedValue(b -> b.startObject().field("x", "NaN").field("y", "NaN").endObject()).errorMatches("Required [type]"),
|
||||
exampleMalformedValue(b -> b.startObject().field("x", "NaN").field("y", 1.3).endObject()).errorMatches("Required [type]"),
|
||||
exampleMalformedValue(b -> b.startObject().field("x", 1.3).field("y", "NaN").endObject()).errorMatches("Required [type]"),
|
||||
exampleMalformedValue("NaN,NaN").errorMatches("Unknown geometry type: nan"),
|
||||
exampleMalformedValue("10,NaN").errorMatches("Unknown geometry type: 10"),
|
||||
exampleMalformedValue("NaN,12").errorMatches("Unknown geometry type: nan"),
|
||||
exampleMalformedValue(b -> b.startObject().field("x", 1.3).nullField("y").endObject()).errorMatches("Required [type]"),
|
||||
exampleMalformedValue(b -> b.startObject().nullField("x").field("y", 1.3).endObject()).errorMatches("Required [type]"),
|
||||
exampleMalformedValue("Bad shape").errorMatches("Unknown geometry type: bad"),
|
||||
exampleMalformedValue(
|
||||
"POLYGON ((18.9401790919516 -33.9681188869036, 18.9401790919516 -33.9681188869036, 18.9401790919517 "
|
||||
+ "-33.9681188869036, 18.9401790919517 -33.9681188869036, 18.9401790919516 -33.9681188869036))"
|
||||
).errorMatches("at least three non-collinear points required")
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -1208,6 +1208,11 @@ public class WildcardFieldMapperTests extends MapperTestCase {
|
|||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsIgnoreMalformed() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected SyntheticSourceSupport syntheticSourceSupport() {
|
||||
throw new AssumptionViolatedException("not supported");
|
||||
|
|
Loading…
Reference in New Issue