Remove 'concrete_bytes' fielddata impl from tests

We don't have this implementation anymore tests will just fallback
to default and issue a warning.
This commit is contained in:
Simon Willnauer 2013-08-15 13:05:04 +02:00
parent 9869427ef6
commit 43fcc55625
3 changed files with 4 additions and 19 deletions

View File

@ -433,10 +433,6 @@ public class SimpleFacetsTests extends AbstractSharedClusterTest {
public void testDuelByteFieldDataImpl() throws ElasticSearchException, IOException, InterruptedException, ExecutionException {
prepareCreate("test")
.addMapping("type", jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("name_concrete")
.field("type", "string")
.startObject("fielddata").field("format", "concrete_bytes").endObject()
.endObject()
.startObject("name_paged")
.field("type", "string")
.startObject("fielddata").field("format", "paged_bytes").endObject()
@ -445,10 +441,6 @@ public class SimpleFacetsTests extends AbstractSharedClusterTest {
.field("type", "string")
.startObject("fielddata").field("format", "fst").endObject()
.endObject()
.startObject("name_concrete_mv")
.field("type", "string")
.startObject("fielddata").field("format", "concrete_bytes").endObject()
.endObject()
.startObject("name_paged_mv")
.field("type", "string")
.startObject("fielddata").field("format", "paged_bytes").endObject()
@ -475,11 +467,9 @@ public class SimpleFacetsTests extends AbstractSharedClusterTest {
for (int i = 0; i < 100; i++) {
client().prepareIndex("test", "type", ""+i).setSource(jsonBuilder().startObject()
.field("name_concrete", ""+i)
.field("name_paged", ""+i)
.field("name_fst", ""+i)
.field("filtered", ""+i)
.field("name_concrete_mv", ""+i, ""+Math.min(99, i+1))
.field("name_paged_mv", ""+i,""+ Math.min(99, i+1))
.field("name_fst_mv", ""+i,""+Math.min(99, i+1))
.field("filtered_mv", ""+i,""+Math.min(99, i+1), ""+(100 + i))
@ -533,12 +523,9 @@ public class SimpleFacetsTests extends AbstractSharedClusterTest {
final SearchRequestBuilder facetRequest;
int incrementAndGet = count.incrementAndGet();
final String field;
switch (incrementAndGet % 3) {
case 2:
field = "filtered"+postfix;
break;
switch (incrementAndGet % 2) {
case 1:
field = "name_concrete"+postfix;
field = "filtered"+postfix;
break;
case 0:
field = "name_paged"+postfix;

View File

@ -73,7 +73,6 @@ public class DuellFieldDataTests extends AbstractFieldDataTests {
Map<FieldDataType, Type> typeMap = new HashMap<FieldDataType, DuellFieldDataTests.Type>();
typeMap.put(new FieldDataType("string", ImmutableSettings.builder().put("format", "fst")), Type.Bytes);
typeMap.put(new FieldDataType("string", ImmutableSettings.builder().put("format", "paged_bytes")), Type.Bytes);
typeMap.put(new FieldDataType("string", ImmutableSettings.builder().put("format", "concrete_bytes")), Type.Bytes);
typeMap.put(new FieldDataType("byte"), Type.Integer);
typeMap.put(new FieldDataType("short"), Type.Integer);
typeMap.put(new FieldDataType("int"), Type.Integer);
@ -242,7 +241,6 @@ public class DuellFieldDataTests extends AbstractFieldDataTests {
Map<FieldDataType, Type> typeMap = new HashMap<FieldDataType, DuellFieldDataTests.Type>();
typeMap.put(new FieldDataType("string", ImmutableSettings.builder().put("format", "fst")), Type.Bytes);
typeMap.put(new FieldDataType("string", ImmutableSettings.builder().put("format", "paged_bytes")), Type.Bytes);
typeMap.put(new FieldDataType("string", ImmutableSettings.builder().put("format", "concrete_bytes")), Type.Bytes);
// TODO add filters
ArrayList<Entry<FieldDataType, Type>> list = new ArrayList<Entry<FieldDataType, Type>>(typeMap.entrySet());
Preprocessor pre = new Preprocessor();

View File

@ -64,7 +64,7 @@ public class FilterFieldDataTest extends AbstractFieldDataTests {
}
writer.forceMerge(1);
AtomicReaderContext context = refreshReader();
String[] formats = new String[] { "fst", "paged_bytes", "concrete_bytes" };
String[] formats = new String[] { "fst", "paged_bytes"};
for (String format : formats) {
{
@ -167,7 +167,7 @@ public class FilterFieldDataTest extends AbstractFieldDataTests {
System.out.println(hundred + " " + ten + " " +five);
writer.forceMerge(1);
AtomicReaderContext context = refreshReader();
String[] formats = new String[] { "fst", "paged_bytes", "concrete_bytes" };
String[] formats = new String[] { "fst", "paged_bytes"};
for (String format : formats) {
{
ifdService.clear();