LUCENE-5858: Move back compat codecs out of core to their own jar

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1621960 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Robert Muir 2014-09-02 11:14:38 +00:00
commit ec0a99552d
219 changed files with 682 additions and 3063 deletions

View File

@ -0,0 +1,100 @@
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-parent</artifactId>
<version>@version@</version>
<relativePath>../pom.xml</relativePath>
</parent>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-backward-codecs</artifactId>
<packaging>jar</packaging>
<name>Lucene Memory</name>
<description>
Codecs for older versions of Lucene.
</description>
<properties>
<module-directory>lucene/backward-codecs</module-directory>
<relative-top-level>../../..</relative-top-level>
<module-path>${relative-top-level}/${module-directory}</module-path>
</properties>
<scm>
<connection>scm:svn:${vc-anonymous-base-url}/${module-directory}</connection>
<developerConnection>scm:svn:${vc-dev-base-url}/${module-directory}</developerConnection>
<url>${vc-browse-base-url}/${module-directory}</url>
</scm>
<dependencies>
<dependency>
<!-- lucene-test-framework dependency must be declared before lucene-core -->
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-test-framework</artifactId>
<scope>test</scope>
</dependency>
@lucene-backward-codecs.internal.dependencies@
@lucene-backward-codecs.external.dependencies@
@lucene-backward-codecs.internal.test.dependencies@
@lucene-backward-codecs.external.test.dependencies@
</dependencies>
<build>
<sourceDirectory>${module-path}/src/java</sourceDirectory>
<testSourceDirectory>${module-path}/src/test</testSourceDirectory>
<testResources>
<testResource>
<directory>${project.build.testSourceDirectory}</directory>
<excludes>
<exclude>**/*.java</exclude>
</excludes>
</testResource>
</testResources>
<plugins>
<plugin>
<groupId>de.thetaphi</groupId>
<artifactId>forbiddenapis</artifactId>
<executions>
<execution>
<id>lucene-shared-check-forbidden-apis</id>
<phase>none</phase> <!-- Block inherited execution -->
</execution>
<execution>
<id>check-forbidden-apis</id>
<configuration>
<!-- disallow undocumented classes like sun.misc.Unsafe: -->
<internalRuntimeForbidden>true</internalRuntimeForbidden>
<bundledSignatures>
<bundledSignature>jdk-unsafe</bundledSignature>
<bundledSignature>jdk-deprecated</bundledSignature>
<bundledSignature>jdk-system-out</bundledSignature>
</bundledSignatures>
<signaturesFiles>
<signaturesFile>${top-level}/lucene/tools/forbiddenApis/base.txt</signaturesFile>
</signaturesFiles>
</configuration>
<goals>
<goal>check</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@ -41,6 +41,7 @@
</scm>
<modules>
<module>core</module>
<module>backward-codecs</module>
<module>codecs</module>
<module>test-framework</module>
<module>analysis</module>

View File

@ -669,7 +669,7 @@ def verifyUnpacked(java, project, artifact, unpackPath, svnRevision, version, te
if project == 'lucene':
# TODO: clean this up to not be a list of modules that we must maintain
extras = ('analysis', 'benchmark', 'classification', 'codecs', 'core', 'demo', 'docs', 'expressions', 'facet', 'grouping', 'highlighter', 'join', 'memory', 'misc', 'queries', 'queryparser', 'replicator', 'sandbox', 'spatial', 'suggest', 'test-framework', 'licenses')
extras = ('analysis', 'backward-codecs', 'benchmark', 'classification', 'codecs', 'core', 'demo', 'docs', 'expressions', 'facet', 'grouping', 'highlighter', 'join', 'memory', 'misc', 'queries', 'queryparser', 'replicator', 'sandbox', 'spatial', 'suggest', 'test-framework', 'licenses')
if isSrc:
extras += ('build.xml', 'common-build.xml', 'module-build.xml', 'ivy-settings.xml', 'ivy-versions.properties', 'ivy-ignore-conflicts.properties', 'version.properties', 'backwards', 'tools', 'site')
else:

View File

@ -85,6 +85,9 @@ Other
* LUCENE-5563: Removed sep layout: which has fallen behind on features and doesn't
perform as well as other options. (Robert Muir)
* LUCENE-5858: Moved compatibility codecs to 'lucene-backward-codecs.jar'.
(Adrien Grand, Robert Muir)
======================= Lucene 4.11.0 ======================
New Features

View File

@ -41,12 +41,10 @@ import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
/**
* trivial test of CollationDocValuesField
*/
@SuppressCodecs("Lucene3x")
public class TestCollationDocValuesField extends LuceneTestCase {
public void testBasic() throws Exception {

View File

@ -38,7 +38,6 @@ import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import com.ibm.icu.text.Collator;
import com.ibm.icu.util.ULocale;
@ -46,7 +45,6 @@ import com.ibm.icu.util.ULocale;
/**
* trivial test of ICUCollationDocValuesField
*/
@SuppressCodecs("Lucene3x")
public class TestICUCollationDocValuesField extends LuceneTestCase {
public void testBasic() throws Exception {

View File

@ -0,0 +1,26 @@
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project name="backward-codecs" default="default">
<description>
Codecs for older versions of Lucene.
</description>
<import file="../module-build.xml"/>
</project>

View File

@ -0,0 +1,21 @@
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
<ivy-module version="2.0">
<info organisation="org.apache.lucene" module="backward-codecs"/>
</ivy-module>

View File

@ -20,6 +20,6 @@
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
</head>
<body>
Support for testing {@link org.apache.lucene.codecs.lucene41.Lucene41Codec}.
Lucene 4.0 file format.
</body>
</html>

View File

@ -20,6 +20,6 @@
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
</head>
<body>
Support for testing {@link org.apache.lucene.codecs.lucene45.Lucene45Codec}.
Lucene 4.1 file format.
</body>
</html>

View File

@ -20,6 +20,6 @@
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
</head>
<body>
Support for testing {@link org.apache.lucene.codecs.lucene42.Lucene42Codec}.
Lucene 4.2 file format.
</body>
</html>

View File

@ -20,6 +20,6 @@
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
</head>
<body>
Support for testing {@link org.apache.lucene.codecs.lucene46.Lucene46Codec}.
Lucene 4.5 file format.
</body>
</html>

View File

@ -0,0 +1,25 @@
<!doctype html public "-//w3c//dtd html 4.0 transitional//en">
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
</head>
<body>
Lucene 4.6 file format.
</body>
</html>

View File

@ -0,0 +1,25 @@
<!doctype html public "-//w3c//dtd html 4.0 transitional//en">
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
</head>
<body>
Lucene 4.9 file format.
</body>
</html>

View File

@ -1,4 +1,3 @@
<!doctype html public "-//w3c//dtd html 4.0 transitional//en">
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
@ -16,10 +15,10 @@
limitations under the License.
-->
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
</head>
<body>
Support for testing {@link org.apache.lucene.codecs.lucene49.Lucene49Codec}.
</body>
<head>
<title>Apache Lucene backward codecs</title>
</head>
<body>
Codecs for reading indexes from older versions of Lucene.
</body>
</html>

View File

@ -0,0 +1,21 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
org.apache.lucene.codecs.lucene40.Lucene40Codec
org.apache.lucene.codecs.lucene41.Lucene41Codec
org.apache.lucene.codecs.lucene42.Lucene42Codec
org.apache.lucene.codecs.lucene45.Lucene45Codec
org.apache.lucene.codecs.lucene46.Lucene46Codec
org.apache.lucene.codecs.lucene49.Lucene49Codec

View File

@ -0,0 +1,18 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
org.apache.lucene.codecs.lucene42.Lucene42DocValuesFormat
org.apache.lucene.codecs.lucene45.Lucene45DocValuesFormat
org.apache.lucene.codecs.lucene49.Lucene49DocValuesFormat

View File

@ -0,0 +1,16 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
org.apache.lucene.codecs.lucene40.Lucene40PostingsFormat

View File

@ -52,6 +52,9 @@ class Lucene40DocValuesWriter extends DocValuesConsumer {
@Override
public void addNumericField(FieldInfo field, Iterable<Number> values) throws IOException {
if (field.getDocValuesGen() != -1) {
throw new UnsupportedOperationException("4.0 does not support dv updates");
}
// examine the values to determine best type to use
long minValue = Long.MAX_VALUE;
long maxValue = Long.MIN_VALUE;
@ -154,6 +157,9 @@ class Lucene40DocValuesWriter extends DocValuesConsumer {
@Override
public void addBinaryField(FieldInfo field, Iterable<BytesRef> values) throws IOException {
if (field.getDocValuesGen() != -1) {
throw new UnsupportedOperationException("4.0 does not support dv updates");
}
// examine the values to determine best type to use
HashSet<BytesRef> uniqueValues = new HashSet<>();
int minLength = Integer.MAX_VALUE;
@ -395,6 +401,9 @@ class Lucene40DocValuesWriter extends DocValuesConsumer {
@Override
public void addSortedField(FieldInfo field, Iterable<BytesRef> values, Iterable<Number> docToOrd) throws IOException {
if (field.getDocValuesGen() != -1) {
throw new UnsupportedOperationException("4.0 does not support dv updates");
}
// examine the values to determine best type to use
int minLength = Integer.MAX_VALUE;
int maxLength = Integer.MIN_VALUE;

View File

@ -46,6 +46,9 @@ public class Lucene40FieldInfosWriter extends FieldInfosWriter {
@Override
public void write(Directory directory, String segmentName, String segmentSuffix, FieldInfos infos, IOContext context) throws IOException {
if (!segmentSuffix.isEmpty()) {
throw new UnsupportedOperationException("4.0 does not support fieldinfo updates");
}
final String fileName = IndexFileNames.segmentFileName(segmentName, "", Lucene40FieldInfosFormat.FIELD_INFOS_EXTENSION);
IndexOutput output = directory.createOutput(fileName, context);
boolean success = false;

View File

@ -6,6 +6,7 @@ import org.apache.lucene.codecs.DocValuesFormat;
import org.apache.lucene.codecs.FieldInfosFormat;
import org.apache.lucene.codecs.FieldInfosWriter;
import org.apache.lucene.codecs.NormsFormat;
import org.apache.lucene.codecs.PostingsFormat;
import org.apache.lucene.codecs.StoredFieldsFormat;
import org.apache.lucene.codecs.TermVectorsFormat;
import org.apache.lucene.util.LuceneTestCase;
@ -34,11 +35,7 @@ public final class Lucene40RWCodec extends Lucene40Codec {
private final FieldInfosFormat fieldInfos = new Lucene40FieldInfosFormat() {
@Override
public FieldInfosWriter getFieldInfosWriter() throws IOException {
if (!LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE) {
return super.getFieldInfosWriter();
} else {
return new Lucene40FieldInfosWriter();
}
return new Lucene40FieldInfosWriter();
}
};
@ -46,6 +43,7 @@ public final class Lucene40RWCodec extends Lucene40Codec {
private final NormsFormat norms = new Lucene40RWNormsFormat();
private final StoredFieldsFormat stored = new Lucene40RWStoredFieldsFormat();
private final TermVectorsFormat vectors = new Lucene40RWTermVectorsFormat();
private final PostingsFormat postings = new Lucene40RWPostingsFormat();
@Override
public FieldInfosFormat fieldInfosFormat() {
@ -71,4 +69,9 @@ public final class Lucene40RWCodec extends Lucene40Codec {
public TermVectorsFormat termVectorsFormat() {
return vectors;
}
@Override
public PostingsFormat getPostingsFormatForField(String field) {
return postings;
}
}

View File

@ -22,7 +22,6 @@ import java.io.IOException;
import org.apache.lucene.codecs.DocValuesConsumer;
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.SegmentWriteState;
import org.apache.lucene.util.LuceneTestCase;
/** Read-write version of {@link Lucene40DocValuesFormat} for testing */
@SuppressWarnings("deprecation")
@ -30,13 +29,9 @@ public class Lucene40RWDocValuesFormat extends Lucene40DocValuesFormat {
@Override
public DocValuesConsumer fieldsConsumer(SegmentWriteState state) throws IOException {
if (!LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE) {
return super.fieldsConsumer(state);
} else {
String filename = IndexFileNames.segmentFileName(state.segmentInfo.name,
String filename = IndexFileNames.segmentFileName(state.segmentInfo.name,
"dv",
IndexFileNames.COMPOUND_FILE_EXTENSION);
return new Lucene40DocValuesWriter(state, filename, Lucene40FieldInfosReader.LEGACY_DV_TYPE_KEY);
}
return new Lucene40DocValuesWriter(state, filename, Lucene40FieldInfosReader.LEGACY_DV_TYPE_KEY);
}
}

View File

@ -23,7 +23,6 @@ import org.apache.lucene.codecs.NormsConsumer;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.SegmentWriteState;
import org.apache.lucene.util.LuceneTestCase;
/** Read-write version of {@link Lucene40NormsFormat} for testing */
@SuppressWarnings("deprecation")
@ -31,24 +30,20 @@ public class Lucene40RWNormsFormat extends Lucene40NormsFormat {
@Override
public NormsConsumer normsConsumer(SegmentWriteState state) throws IOException {
if (!LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE) {
return super.normsConsumer(state);
} else {
String filename = IndexFileNames.segmentFileName(state.segmentInfo.name,
"nrm",
IndexFileNames.COMPOUND_FILE_EXTENSION);
final Lucene40DocValuesWriter impl = new Lucene40DocValuesWriter(state, filename, Lucene40FieldInfosReader.LEGACY_NORM_TYPE_KEY);
return new NormsConsumer() {
@Override
public void addNormsField(FieldInfo field, Iterable<Number> values) throws IOException {
impl.addNumericField(field, values);
}
String filename = IndexFileNames.segmentFileName(state.segmentInfo.name,
"nrm",
IndexFileNames.COMPOUND_FILE_EXTENSION);
final Lucene40DocValuesWriter impl = new Lucene40DocValuesWriter(state, filename, Lucene40FieldInfosReader.LEGACY_NORM_TYPE_KEY);
return new NormsConsumer() {
@Override
public void addNormsField(FieldInfo field, Iterable<Number> values) throws IOException {
impl.addNumericField(field, values);
}
@Override
public void close() throws IOException {
impl.close();
}
};
}
@Override
public void close() throws IOException {
impl.close();
}
};
}
}

View File

@ -33,24 +33,20 @@ public class Lucene40RWPostingsFormat extends Lucene40PostingsFormat {
@Override
public FieldsConsumer fieldsConsumer(SegmentWriteState state) throws IOException {
if (!LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE) {
return super.fieldsConsumer(state);
} else {
PostingsWriterBase docs = new Lucene40PostingsWriter(state);
PostingsWriterBase docs = new Lucene40PostingsWriter(state);
// TODO: should we make the terms index more easily
// pluggable? Ie so that this codec would record which
// index impl was used, and switch on loading?
// Or... you must make a new Codec for this?
boolean success = false;
try {
FieldsConsumer ret = new BlockTreeTermsWriter(state, docs, minBlockSize, maxBlockSize);
success = true;
return ret;
} finally {
if (!success) {
docs.close();
}
// TODO: should we make the terms index more easily
// pluggable? Ie so that this codec would record which
// index impl was used, and switch on loading?
// Or... you must make a new Codec for this?
boolean success = false;
try {
FieldsConsumer ret = new BlockTreeTermsWriter(state, docs, minBlockSize, maxBlockSize);
success = true;
return ret;
} finally {
if (!success) {
docs.close();
}
}
}

View File

@ -23,7 +23,6 @@ import org.apache.lucene.codecs.StoredFieldsWriter;
import org.apache.lucene.index.SegmentInfo;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.util.LuceneTestCase;
/**
* Simulates writing Lucene 4.0 Stored Fields Format.
@ -32,10 +31,6 @@ public class Lucene40RWStoredFieldsFormat extends Lucene40StoredFieldsFormat {
@Override
public StoredFieldsWriter fieldsWriter(Directory directory, SegmentInfo si, IOContext context) throws IOException {
if (!LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE) {
throw new UnsupportedOperationException("this codec can only be used for reading");
} else {
return new Lucene40StoredFieldsWriter(directory, si.name, context);
}
return new Lucene40StoredFieldsWriter(directory, si.name, context);
}
}

View File

@ -32,10 +32,6 @@ public class Lucene40RWTermVectorsFormat extends Lucene40TermVectorsFormat {
@Override
public TermVectorsWriter vectorsWriter(Directory directory, SegmentInfo segmentInfo, IOContext context) throws IOException {
if (!LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE) {
throw new UnsupportedOperationException("this codec can only be used for reading");
} else {
return new Lucene40TermVectorsWriter(directory, segmentInfo.name, context);
}
return new Lucene40TermVectorsWriter(directory, segmentInfo.name, context);
}
}

View File

@ -19,7 +19,6 @@ package org.apache.lucene.codecs.lucene40;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.index.BaseDocValuesFormatTestCase;
import org.junit.BeforeClass;
/**
* Tests Lucene40DocValuesFormat
@ -27,11 +26,6 @@ import org.junit.BeforeClass;
public class TestLucene40DocValuesFormat extends BaseDocValuesFormatTestCase {
private final Codec codec = new Lucene40RWCodec();
@BeforeClass
public static void beforeClass() {
OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true; // explicitly instantiates ancient codec
}
@Override
protected Codec getCodec() {
return codec;
@ -43,4 +37,20 @@ public class TestLucene40DocValuesFormat extends BaseDocValuesFormatTestCase {
return false;
}
// this codec doesnt support missing (its the same as empty string)
@Override
protected boolean codecSupportsDocsWithField() {
return false;
}
@Override
protected boolean codecSupportsSortedSet() {
return false;
}
@Override
protected boolean codecSupportsSortedNumeric() {
return false;
}
}

View File

@ -19,8 +19,6 @@ package org.apache.lucene.codecs.lucene40;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.index.BaseNormsFormatTestCase;
import org.junit.BeforeClass;
/** Tests Lucene40's norms format */
public class TestLucene40NormsFormat extends BaseNormsFormatTestCase {
@ -30,9 +28,4 @@ public class TestLucene40NormsFormat extends BaseNormsFormatTestCase {
protected Codec getCodec() {
return codec;
}
@BeforeClass
public static void beforeClass() {
OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true; // explicitly instantiates ancient codec
}
}

View File

@ -19,7 +19,6 @@ package org.apache.lucene.codecs.lucene40;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.index.BasePostingsFormatTestCase;
import org.junit.BeforeClass;
/**
* Tests Lucene40PostingsFormat
@ -27,11 +26,6 @@ import org.junit.BeforeClass;
public class TestLucene40PostingsFormat extends BasePostingsFormatTestCase {
private final Codec codec = new Lucene40RWCodec();
@BeforeClass
public static void beforeClass() {
OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true; // explicitly instantiates ancient codec
}
@Override
protected Codec getCodec() {
return codec;

View File

@ -21,7 +21,6 @@ import java.util.ArrayList;
import java.util.Collections;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
@ -34,7 +33,6 @@ import org.apache.lucene.index.Term;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
import org.junit.BeforeClass;
public class TestLucene40PostingsReader extends LuceneTestCase {
static final String terms[] = new String[100];
@ -44,18 +42,13 @@ public class TestLucene40PostingsReader extends LuceneTestCase {
}
}
@BeforeClass
public static void beforeClass() {
OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true; // explicitly instantiates ancient codec
}
/** tests terms with different probabilities of being in the document.
* depends heavily on term vectors cross-check at checkIndex
*/
public void testPostings() throws Exception {
Directory dir = newFSDirectory(createTempDir("postings"));
IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
iwc.setCodec(Codec.forName("Lucene40"));
iwc.setCodec(new Lucene40RWCodec());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
Document doc = new Document();

View File

@ -23,11 +23,6 @@ import org.junit.BeforeClass;
public class TestLucene40StoredFieldsFormat extends BaseStoredFieldsFormatTestCase {
@BeforeClass
public static void beforeClass() {
OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true; // explicitly instantiates ancient codec
}
@Override
protected Codec getCodec() {
return new Lucene40RWCodec();

View File

@ -23,11 +23,6 @@ import org.junit.BeforeClass;
public class TestLucene40TermVectorsFormat extends BaseTermVectorsFormatTestCase {
@BeforeClass
public static void beforeClass() {
OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true; // explicitly instantiates ancient codec
}
@Override
protected Codec getCodec() {
return new Lucene40RWCodec();

View File

@ -43,11 +43,6 @@ import org.junit.BeforeClass;
// TODO: really this should be in BaseTestPF or somewhere else? useful test!
public class TestReuseDocsEnum extends LuceneTestCase {
@BeforeClass
public static void beforeClass() {
OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true; // explicitly instantiates ancient codec
}
public void testReuseDocsEnumNoReuse() throws IOException {
Directory dir = newDirectory();
Codec cp = TestUtil.alwaysPostingsFormat(new Lucene40RWPostingsFormat());

View File

@ -41,11 +41,7 @@ public class Lucene41RWCodec extends Lucene41Codec {
private final FieldInfosFormat fieldInfos = new Lucene40FieldInfosFormat() {
@Override
public FieldInfosWriter getFieldInfosWriter() throws IOException {
if (!LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE) {
return super.getFieldInfosWriter();
} else {
return new Lucene40FieldInfosWriter();
}
return new Lucene40FieldInfosWriter();
}
};

View File

@ -87,6 +87,9 @@ class Lucene42DocValuesConsumer extends DocValuesConsumer {
@Override
public void addNumericField(FieldInfo field, Iterable<Number> values) throws IOException {
if (field.getDocValuesGen() != -1) {
throw new UnsupportedOperationException("4.2 does not support dv updates");
}
addNumericField(field, values, true);
}
@ -209,6 +212,9 @@ class Lucene42DocValuesConsumer extends DocValuesConsumer {
@Override
public void addBinaryField(FieldInfo field, final Iterable<BytesRef> values) throws IOException {
if (field.getDocValuesGen() != -1) {
throw new UnsupportedOperationException("4.2 does not support dv updates");
}
// write the byte[] data
meta.writeVInt(field.number);
meta.writeByte(BYTES);
@ -270,6 +276,9 @@ class Lucene42DocValuesConsumer extends DocValuesConsumer {
@Override
public void addSortedField(FieldInfo field, Iterable<BytesRef> values, Iterable<Number> docToOrd) throws IOException {
if (field.getDocValuesGen() != -1) {
throw new UnsupportedOperationException("4.2 does not support dv updates");
}
// three cases for simulating the old writer:
// 1. no missing
// 2. missing (and empty string in use): remap ord=-1 -> ord=0
@ -307,6 +316,7 @@ class Lucene42DocValuesConsumer extends DocValuesConsumer {
// note: this might not be the most efficient... but its fairly simple
@Override
public void addSortedSetField(FieldInfo field, Iterable<BytesRef> values, final Iterable<Number> docToOrdCount, final Iterable<Number> ords) throws IOException {
assert field.getDocValuesGen() == -1;
// write the ordinals as a binary field
addBinaryField(field, new Iterable<BytesRef>() {
@Override

View File

@ -46,6 +46,9 @@ public final class Lucene42FieldInfosWriter extends FieldInfosWriter {
@Override
public void write(Directory directory, String segmentName, String segmentSuffix, FieldInfos infos, IOContext context) throws IOException {
if (!segmentSuffix.isEmpty()) {
throw new UnsupportedOperationException("4.2 does not support fieldinfo updates");
}
final String fileName = IndexFileNames.segmentFileName(segmentName, "", Lucene42FieldInfosFormat.EXTENSION);
IndexOutput output = directory.createOutput(fileName, context);
boolean success = false;

View File

@ -37,11 +37,7 @@ public class Lucene42RWCodec extends Lucene42Codec {
private final FieldInfosFormat fieldInfosFormat = new Lucene42FieldInfosFormat() {
@Override
public FieldInfosWriter getFieldInfosWriter() throws IOException {
if (!LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE) {
return super.getFieldInfosWriter();
} else {
return new Lucene42FieldInfosWriter();
}
return new Lucene42FieldInfosWriter();
}
};

View File

@ -31,11 +31,7 @@ public class Lucene42RWDocValuesFormat extends Lucene42DocValuesFormat {
@Override
public DocValuesConsumer fieldsConsumer(SegmentWriteState state) throws IOException {
if (!LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE) {
return super.fieldsConsumer(state);
} else {
// note: we choose DEFAULT here (its reasonably fast, and for small bpv has tiny waste)
return new Lucene42DocValuesConsumer(state, DATA_CODEC, DATA_EXTENSION, METADATA_CODEC, METADATA_EXTENSION, acceptableOverheadRatio);
}
// note: we choose DEFAULT here (its reasonably fast, and for small bpv has tiny waste)
return new Lucene42DocValuesConsumer(state, DATA_CODEC, DATA_EXTENSION, METADATA_CODEC, METADATA_EXTENSION, acceptableOverheadRatio);
}
}

View File

@ -30,10 +30,6 @@ public class Lucene42RWNormsFormat extends Lucene42NormsFormat {
@Override
public NormsConsumer normsConsumer(SegmentWriteState state) throws IOException {
if (LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE) {
return new Lucene42NormsConsumer(state, DATA_CODEC, DATA_EXTENSION, METADATA_CODEC, METADATA_EXTENSION, acceptableOverheadRatio);
} else {
return super.normsConsumer(state);
}
return new Lucene42NormsConsumer(state, DATA_CODEC, DATA_EXTENSION, METADATA_CODEC, METADATA_EXTENSION, acceptableOverheadRatio);
}
}

View File

@ -27,11 +27,6 @@ import org.junit.BeforeClass;
public class TestLucene42DocValuesFormat extends BaseCompressingDocValuesFormatTestCase {
private final Codec codec = new Lucene42RWCodec();
@BeforeClass
public static void beforeClass() {
OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true; // explicitly instantiates ancient codec
}
@Override
protected Codec getCodec() {
return codec;
@ -41,4 +36,15 @@ public class TestLucene42DocValuesFormat extends BaseCompressingDocValuesFormatT
protected boolean codecAcceptsHugeBinaryValues(String field) {
return false;
}
// this codec doesnt support missing (its the same as empty string)
@Override
protected boolean codecSupportsDocsWithField() {
return false;
}
@Override
protected boolean codecSupportsSortedNumeric() {
return false;
}
}

View File

@ -30,9 +30,4 @@ public class TestLucene42NormsFormat extends BaseNormsFormatTestCase {
protected Codec getCodec() {
return codec;
}
@BeforeClass
public static void beforeClass() {
OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true; // explicitly instantiates ancient codec
}
}

View File

@ -26,7 +26,6 @@ import org.apache.lucene.codecs.NormsFormat;
import org.apache.lucene.codecs.lucene42.Lucene42FieldInfosFormat;
import org.apache.lucene.codecs.lucene42.Lucene42FieldInfosWriter;
import org.apache.lucene.codecs.lucene42.Lucene42RWNormsFormat;
import org.apache.lucene.util.LuceneTestCase;
/**
* Read-write version of {@link Lucene45Codec} for testing.
@ -37,11 +36,7 @@ public class Lucene45RWCodec extends Lucene45Codec {
private final FieldInfosFormat fieldInfosFormat = new Lucene42FieldInfosFormat() {
@Override
public FieldInfosWriter getFieldInfosWriter() throws IOException {
if (!LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE) {
return super.getFieldInfosWriter();
} else {
return new Lucene42FieldInfosWriter();
}
return new Lucene42FieldInfosWriter();
}
};

View File

@ -22,7 +22,6 @@ import java.io.IOException;
import org.apache.lucene.codecs.DocValuesConsumer;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.SegmentWriteState;
import org.apache.lucene.util.LuceneTestCase;
/**
* Read-write version of {@link Lucene45DocValuesFormat} for testing.
@ -31,15 +30,11 @@ public class Lucene45RWDocValuesFormat extends Lucene45DocValuesFormat {
@Override
public DocValuesConsumer fieldsConsumer(SegmentWriteState state) throws IOException {
if (LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE) {
return new Lucene45DocValuesConsumer(state, DATA_CODEC, DATA_EXTENSION, META_CODEC, META_EXTENSION) {
@Override
void checkCanWrite(FieldInfo field) {
// allow writing all fields
}
};
} else {
return super.fieldsConsumer(state);
}
return new Lucene45DocValuesConsumer(state, DATA_CODEC, DATA_EXTENSION, META_CODEC, META_EXTENSION) {
@Override
void checkCanWrite(FieldInfo field) {
// allow writing all fields
}
};
}
}

View File

@ -27,13 +27,13 @@ import org.junit.BeforeClass;
public class TestLucene45DocValuesFormat extends BaseCompressingDocValuesFormatTestCase {
private final Codec codec = new Lucene45RWCodec();
@BeforeClass
public static void beforeClass() {
OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true; // explicitly instantiates ancient codec
}
@Override
protected Codec getCodec() {
return codec;
}
@Override
protected boolean codecSupportsSortedNumeric() {
return false;
}
}

View File

@ -29,16 +29,12 @@ public class Lucene49RWDocValuesFormat extends Lucene49DocValuesFormat {
@Override
public DocValuesConsumer fieldsConsumer(SegmentWriteState state) throws IOException {
if (LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE) {
return new Lucene49DocValuesConsumer(state, DATA_CODEC, DATA_EXTENSION, META_CODEC, META_EXTENSION) {
@Override
void checkCanWrite(FieldInfo field) {
// allow writing all fields
}
};
} else {
return super.fieldsConsumer(state);
}
return new Lucene49DocValuesConsumer(state, DATA_CODEC, DATA_EXTENSION, META_CODEC, META_EXTENSION) {
@Override
void checkCanWrite(FieldInfo field) {
// allow writing all fields
}
};
}
}

View File

@ -27,11 +27,6 @@ import org.junit.BeforeClass;
public class TestLucene49DocValuesFormat extends BaseCompressingDocValuesFormatTestCase {
private final Codec codec = new Lucene49RWCodec();
@BeforeClass
public static void beforeClass() {
OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true; // explicitly instantiates ancient codec
}
@Override
protected Codec getCodec() {
return codec;

View File

@ -73,11 +73,6 @@ import org.junit.BeforeClass;
Verify we can read the pre-5.0 file format, do searches
against it, and add documents to it.
*/
// note: add this if we make a 4.x impersonator
// TODO: don't use 4.x codec, its unrealistic since it means
// we won't even be running the actual code, only the impostor
// @SuppressCodecs("Lucene4x")
@SuppressCodecs({"Lucene40", "Lucene41", "Lucene42", "Lucene45", "Lucene46", "Lucene49"})
public class TestBackwardsCompatibility extends LuceneTestCase {
// Uncomment these cases & run them on an older Lucene version,
@ -167,8 +162,8 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
}
private void updateBinary(IndexWriter writer, String id, String f, String cf, long value) throws IOException {
writer.updateBinaryDocValue(new Term("id", id), f, TestBinaryDocValuesUpdates.toBytes(value));
writer.updateBinaryDocValue(new Term("id", id), cf, TestBinaryDocValuesUpdates.toBytes(value*2));
writer.updateBinaryDocValue(new Term("id", id), f, TestDocValuesUpdatesOnOldSegments.toBytes(value));
writer.updateBinaryDocValue(new Term("id", id), cf, TestDocValuesUpdatesOnOldSegments.toBytes(value*2));
}
/* // Creates an index with DocValues updates
@ -191,10 +186,10 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
doc.add(new NumericDocValuesField("ndv1_c", i*2));
doc.add(new NumericDocValuesField("ndv2", i*3));
doc.add(new NumericDocValuesField("ndv2_c", i*6));
doc.add(new BinaryDocValuesField("bdv1", TestBinaryDocValuesUpdates.toBytes(i)));
doc.add(new BinaryDocValuesField("bdv1_c", TestBinaryDocValuesUpdates.toBytes(i*2)));
doc.add(new BinaryDocValuesField("bdv2", TestBinaryDocValuesUpdates.toBytes(i*3)));
doc.add(new BinaryDocValuesField("bdv2_c", TestBinaryDocValuesUpdates.toBytes(i*6)));
doc.add(new BinaryDocValuesField("bdv1", TestDocValuesUpdatesOnOldSegments.toBytes(i)));
doc.add(new BinaryDocValuesField("bdv1_c", TestDocValuesUpdatesOnOldSegments.toBytes(i*2)));
doc.add(new BinaryDocValuesField("bdv2", TestDocValuesUpdatesOnOldSegments.toBytes(i*3)));
doc.add(new BinaryDocValuesField("bdv2_c", TestDocValuesUpdatesOnOldSegments.toBytes(i*6)));
writer.addDocument(doc);
if ((i+1) % 10 == 0) {
writer.commit(); // flush every 10 docs
@ -281,7 +276,6 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
@BeforeClass
public static void beforeClass() throws Exception {
assertFalse("test infra is broken!", LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE);
List<String> names = new ArrayList<>(oldNames.length + oldSingleSegmentNames.length);
names.addAll(Arrays.asList(oldNames));
names.addAll(Arrays.asList(oldSingleSegmentNames));
@ -1085,7 +1079,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
BinaryDocValues bdvf = r.getBinaryDocValues(f);
BinaryDocValues bdvcf = r.getBinaryDocValues(cf);
for (int i = 0; i < r.maxDoc(); i++) {
assertEquals(TestBinaryDocValuesUpdates.getValue(bdvcf, i), TestBinaryDocValuesUpdates.getValue(bdvf, i)*2);
assertEquals(TestDocValuesUpdatesOnOldSegments.getValue(bdvcf, i), TestDocValuesUpdatesOnOldSegments.getValue(bdvf, i)*2);
}
}

View File

@ -0,0 +1,115 @@
package org.apache.lucene.index;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.codecs.lucene40.Lucene40RWCodec;
import org.apache.lucene.codecs.lucene41.Lucene41RWCodec;
import org.apache.lucene.codecs.lucene42.Lucene42RWCodec;
import org.apache.lucene.codecs.lucene45.Lucene45RWCodec;
import org.apache.lucene.document.BinaryDocValuesField;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase;
public class TestDocValuesUpdatesOnOldSegments extends LuceneTestCase {
static long getValue(BinaryDocValues bdv, int idx) {
BytesRef term = bdv.get(idx);
idx = term.offset;
byte b = term.bytes[idx++];
long value = b & 0x7FL;
for (int shift = 7; (b & 0x80L) != 0; shift += 7) {
b = term.bytes[idx++];
value |= (b & 0x7FL) << shift;
}
return value;
}
// encodes a long into a BytesRef as VLong so that we get varying number of bytes when we update
static BytesRef toBytes(long value) {
BytesRef bytes = new BytesRef(10); // negative longs may take 10 bytes
while ((value & ~0x7FL) != 0L) {
bytes.bytes[bytes.length++] = (byte) ((value & 0x7FL) | 0x80L);
value >>>= 7;
}
bytes.bytes[bytes.length++] = (byte) value;
return bytes;
}
public void testBinaryUpdates() throws Exception {
Codec[] oldCodecs = new Codec[] { new Lucene40RWCodec(), new Lucene41RWCodec(), new Lucene42RWCodec(), new Lucene45RWCodec() };
Directory dir = newDirectory();
// create a segment with an old Codec
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
conf.setCodec(oldCodecs[random().nextInt(oldCodecs.length)]);
IndexWriter writer = new IndexWriter(dir, conf);
Document doc = new Document();
doc.add(new StringField("id", "doc", Store.NO));
doc.add(new BinaryDocValuesField("f", toBytes(5L)));
writer.addDocument(doc);
writer.close();
conf = newIndexWriterConfig(new MockAnalyzer(random()));
writer = new IndexWriter(dir, conf);
writer.updateBinaryDocValue(new Term("id", "doc"), "f", toBytes(4L));
try {
writer.close();
fail("should not have succeeded to update a segment written with an old Codec");
} catch (UnsupportedOperationException e) {
writer.rollback();
}
dir.close();
}
public void testNumericUpdates() throws Exception {
Codec[] oldCodecs = new Codec[] { new Lucene40RWCodec(), new Lucene41RWCodec(), new Lucene42RWCodec(), new Lucene45RWCodec() };
Directory dir = newDirectory();
// create a segment with an old Codec
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
conf.setCodec(oldCodecs[random().nextInt(oldCodecs.length)]);
IndexWriter writer = new IndexWriter(dir, conf);
Document doc = new Document();
doc.add(new StringField("id", "doc", Store.NO));
doc.add(new NumericDocValuesField("f", 5));
writer.addDocument(doc);
writer.close();
conf = newIndexWriterConfig(new MockAnalyzer(random()));
writer = new IndexWriter(dir, conf);
writer.updateNumericDocValue(new Term("id", "doc"), "f", 4L);
try {
writer.close();
fail("should not have succeeded to update a segment written with an old Codec");
} catch (UnsupportedOperationException e) {
writer.rollback();
}
dir.close();
}
}

Some files were not shown because too many files have changed in this diff Show More