mirror of https://github.com/apache/lucene.git
LUCENE-5971: Create addBackcompatIndexes.py script to build and add backcompat test indexes
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1627419 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
272be0d711
commit
8d885e4fb0
|
@ -0,0 +1,227 @@
|
|||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import sys
|
||||
sys.path.append(os.path.dirname(__file__))
|
||||
import scriptutil
|
||||
|
||||
import argparse
|
||||
import urllib.error
|
||||
import urllib.request
|
||||
import re
|
||||
import shutil
|
||||
|
||||
def create_and_add_index(source, indextype, version, temp_dir):
|
||||
if indextype in ('cfs', 'nocfs'):
|
||||
dirname = 'index.%s' % indextype
|
||||
else:
|
||||
dirname = indextype
|
||||
filename = {
|
||||
'cfs': 'index.%s-cfs.zip',
|
||||
'nocfs': 'index.%s-nocfs.zip'
|
||||
}[indextype] % version
|
||||
print(' creating %s...' % filename, end='', flush=True)
|
||||
module = 'lucene/backward-codecs'
|
||||
index_dir = os.path.join(module, 'src/test/org/apache/lucene/index')
|
||||
test_file = os.path.join(index_dir, filename)
|
||||
if os.path.exists(os.path.join(index_dir, filename)):
|
||||
print('uptodate')
|
||||
return
|
||||
|
||||
test = {
|
||||
'cfs': 'testCreateCFS',
|
||||
'nocfs': 'testCreateNoCFS'
|
||||
}[indextype]
|
||||
ant_args = ' '.join([
|
||||
'-Dtests.bwcdir=%s' % temp_dir,
|
||||
'-Dtests.codec=default',
|
||||
'-Dtests.useSecurityManager=false',
|
||||
'-Dtestcase=TestBackwardsCompatibility',
|
||||
'-Dtestmethod=%s' % test
|
||||
])
|
||||
base_dir = os.getcwd()
|
||||
bc_index_dir = os.path.join(temp_dir, dirname)
|
||||
bc_index_file = os.path.join(bc_index_dir, filename)
|
||||
|
||||
if os.path.exists(bc_index_file):
|
||||
print('alreadyexists')
|
||||
else:
|
||||
if os.path.exists(bc_index_dir):
|
||||
shutil.rmtree(bc_index_dir)
|
||||
os.chdir(os.path.join(source, module))
|
||||
scriptutil.run('ant test %s' % ant_args)
|
||||
os.chdir(bc_index_dir)
|
||||
scriptutil.run('zip %s *' % filename)
|
||||
print('done')
|
||||
|
||||
print(' adding %s...' % filename, end='', flush=True)
|
||||
scriptutil.run('cp %s %s' % (bc_index_file, os.path.join(base_dir, index_dir)))
|
||||
os.chdir(base_dir)
|
||||
output = scriptutil.run('svn status %s' % test_file)
|
||||
if not output.strip():
|
||||
# make sure to only add if the file isn't already in svn (we might be regenerating)
|
||||
scriptutil.run('svn add %s' % test_file)
|
||||
os.chdir(base_dir)
|
||||
scriptutil.run('rm -rf %s' % bc_index_dir)
|
||||
print('done')
|
||||
|
||||
def update_backcompat_tests(types, version):
|
||||
print(' adding new indexes to backcompat tests...', end='', flush=True)
|
||||
module = 'lucene/backward-codecs'
|
||||
filename = '%s/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java' % module
|
||||
matcher = re.compile(r'final static String\[\] oldNames = {|};')
|
||||
|
||||
def find_version(x):
|
||||
x = x.strip()
|
||||
end = x.index("-")
|
||||
return scriptutil.Version.parse(x[1:end])
|
||||
|
||||
class Edit(object):
|
||||
start = None
|
||||
def __call__(self, buffer, match, line):
|
||||
if self.start:
|
||||
# find where we this version should exist
|
||||
i = len(buffer) - 1
|
||||
v = find_version(buffer[i])
|
||||
while i >= self.start and v.on_or_after(version):
|
||||
i -= 1
|
||||
v = find_version(buffer[i])
|
||||
i += 1 # readjust since we skipped past by 1
|
||||
|
||||
# unfortunately python doesn't have a range remove from list...
|
||||
# here we want to remove any previous references to the version we are adding
|
||||
while i < len(buffer) and version.on_or_after(find_version(buffer[i])):
|
||||
buffer.pop(i)
|
||||
|
||||
if i == len(buffer) and not buffer[-1].strip().endswith(","):
|
||||
# add comma
|
||||
buffer[-1] = buffer[-1].rstrip() + ",\n"
|
||||
|
||||
last = buffer[-1]
|
||||
spaces = ' ' * (len(last) - len(last.lstrip()))
|
||||
for (j, t) in enumerate(types):
|
||||
newline = spaces + ('"%s-%s"' % (version, t))
|
||||
if j < len(types) - 1 or i < len(buffer):
|
||||
newline += ','
|
||||
buffer.insert(i, newline + '\n')
|
||||
i += 1
|
||||
|
||||
buffer.append(line)
|
||||
return True
|
||||
|
||||
if 'oldNames' in line:
|
||||
self.start = len(buffer) # location of first index name
|
||||
buffer.append(line)
|
||||
return False
|
||||
|
||||
changed = scriptutil.update_file(filename, matcher, Edit())
|
||||
print('done' if changed else 'uptodate')
|
||||
|
||||
def check_backcompat_tests():
|
||||
print(' checking backcompat tests...', end='', flush=True)
|
||||
olddir = os.getcwd()
|
||||
os.chdir('lucene/backward-codecs')
|
||||
scriptutil.run('ant test -Dtestcase=TestBackwardsCompatibility')
|
||||
os.chdir(olddir)
|
||||
print('ok')
|
||||
|
||||
def download_from_mirror(version, remotename, localname):
|
||||
url = 'http://apache.cs.utah.edu/lucene/java/%s/%s' % (version, remotename)
|
||||
try:
|
||||
urllib.request.urlretrieve(url, localname)
|
||||
return True
|
||||
except urllib.error.URLError as e:
|
||||
if e.code == 404:
|
||||
return False
|
||||
raise e
|
||||
|
||||
def download_from_archives(version, remotename, localname):
|
||||
url = 'http://archive.apache.org/dist/lucene/java/%s/%s' % (version, remotename)
|
||||
try:
|
||||
urllib.request.urlretrieve(url, localname)
|
||||
return True
|
||||
except urllib.error.URLError as e:
|
||||
if e.code == 404:
|
||||
return False
|
||||
raise e
|
||||
|
||||
def download_release(version, temp_dir, force):
|
||||
print(' downloading %s source release...' % version, end='', flush=True)
|
||||
source = os.path.join(temp_dir, 'lucene-%s' % version)
|
||||
if os.path.exists(source):
|
||||
if force:
|
||||
shutil.rmtree(source)
|
||||
else:
|
||||
print('uptodate')
|
||||
return source
|
||||
|
||||
filename = 'lucene-%s-src.tgz' % version
|
||||
source_tgz = os.path.join(temp_dir, filename)
|
||||
if not download_from_mirror(version, filename, source_tgz) and \
|
||||
not download_from_archives(version, filename, source_tgz):
|
||||
raise Exception('Could not find version %s in apache mirror or archives' % version)
|
||||
|
||||
olddir = os.getcwd()
|
||||
os.chdir(temp_dir)
|
||||
scriptutil.run('tar -xvzf %s' % source_tgz)
|
||||
os.chdir(olddir)
|
||||
print('done')
|
||||
return source
|
||||
|
||||
def read_config():
|
||||
parser = argparse.ArgumentParser(description='Add backcompat index and test for new version')
|
||||
parser.add_argument('--force', action='store_true', default=False,
|
||||
help='Redownload the version and rebuild, even if it already exists')
|
||||
parser.add_argument('--no-cleanup', dest='cleanup', action='store_false', default=True,
|
||||
help='Do not cleanup the built indexes, so that they can be reused ' +
|
||||
'for adding to another branch')
|
||||
parser.add_argument('--temp-dir', metavar='DIR', default='/tmp/lucenebwc',
|
||||
help='Temp directory to build backcompat indexes within')
|
||||
parser.add_argument('version', type=scriptutil.Version.parse,
|
||||
help='Version to add, of the form X.Y.Z')
|
||||
c = parser.parse_args()
|
||||
|
||||
return c
|
||||
|
||||
def main():
|
||||
c = read_config()
|
||||
if not os.path.exists(c.temp_dir):
|
||||
os.makedirs(c.temp_dir)
|
||||
|
||||
print('\nCreating backwards compatibility indexes')
|
||||
source = download_release(c.version, c.temp_dir, c.force)
|
||||
create_and_add_index(source, 'cfs', c.version, c.temp_dir)
|
||||
create_and_add_index(source, 'nocfs', c.version, c.temp_dir)
|
||||
|
||||
print('\nAdding backwards compatibility tests')
|
||||
update_backcompat_tests(['cfs', 'nocfs'], c.version)
|
||||
|
||||
print('\nTesting changes')
|
||||
check_backcompat_tests()
|
||||
|
||||
if c.cleanup:
|
||||
print('\nCleaning up')
|
||||
print(' deleting %s...' % c.temp_dir, end='', flush=True)
|
||||
shutil.rmtree(c.temp_dir)
|
||||
print('done')
|
||||
|
||||
print()
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
main()
|
||||
except KeyboardInterrupt:
|
||||
print('\nRecieved Ctrl-C, exiting early')
|
|
@ -13,80 +13,18 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import sys
|
||||
sys.path.append(os.path.dirname(__file__))
|
||||
import scriptutil
|
||||
|
||||
import argparse
|
||||
import io
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
class Version(object):
|
||||
def __init__(self, major, minor, bugfix):
|
||||
self.major = major
|
||||
self.minor = minor
|
||||
self.bugfix = bugfix
|
||||
self.previous_dot_matcher = self.make_previous_matcher()
|
||||
self.dot = '%d.%d.%d' % (self.major, self.minor, self.bugfix)
|
||||
self.constant = 'LUCENE_%d_%d_%d' % (self.major, self.minor, self.bugfix)
|
||||
|
||||
@classmethod
|
||||
def parse(cls, value):
|
||||
match = re.search(r'(\d+)\.(\d+).(\d+)', value)
|
||||
if match is None:
|
||||
raise argparse.ArgumentTypeError('Version argument must be of format x.y.z')
|
||||
return Version(*[int(v) for v in match.groups()])
|
||||
|
||||
def __str__(self):
|
||||
return self.dot
|
||||
|
||||
def make_previous_matcher(self, prefix='', suffix='', sep='\\.'):
|
||||
if self.is_bugfix_release():
|
||||
pattern = '%s%s%s%s%d' % (self.major, sep, self.minor, sep, self.bugfix - 1)
|
||||
elif self.is_minor_release():
|
||||
pattern = '%s%s%d%s\\d+' % (self.major, sep, self.minor - 1, sep)
|
||||
else:
|
||||
pattern = '%d%s\\d+%s\\d+' % (self.major - 1, sep, sep)
|
||||
|
||||
return re.compile(prefix + '(' + pattern + ')' + suffix)
|
||||
|
||||
def is_bugfix_release(self):
|
||||
return self.bugfix != 0
|
||||
|
||||
def is_minor_release(self):
|
||||
return self.bugfix == 0 and self.minor != 0
|
||||
|
||||
def is_major_release(self):
|
||||
return self.bugfix == 0 and self.minor == 0
|
||||
|
||||
def run(cmd):
|
||||
try:
|
||||
subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(e.output.decode('utf-8'))
|
||||
raise e
|
||||
|
||||
def update_file(filename, line_re, edit):
|
||||
infile = open(filename, 'r')
|
||||
buffer = []
|
||||
|
||||
changed = False
|
||||
for line in infile:
|
||||
if not changed:
|
||||
match = line_re.search(line)
|
||||
if match:
|
||||
changed = edit(buffer, match, line)
|
||||
if changed is None:
|
||||
return False
|
||||
continue
|
||||
buffer.append(line)
|
||||
if not changed:
|
||||
raise Exception('Could not find %s in %s' % (line_re, filename))
|
||||
with open(filename, 'w') as f:
|
||||
f.write(''.join(buffer))
|
||||
return True
|
||||
|
||||
def update_changes(filename, new_version):
|
||||
print(' adding new section to %s...' % filename, end='')
|
||||
print(' adding new section to %s...' % filename, end='', flush=True)
|
||||
matcher = re.compile(r'\d+\.\d+\.\d+\s+===')
|
||||
def edit(buffer, match, line):
|
||||
if new_version.dot in line:
|
||||
|
@ -98,12 +36,12 @@ def update_changes(filename, new_version):
|
|||
buffer.append(line)
|
||||
return match is not None
|
||||
|
||||
changed = update_file(filename, matcher, edit)
|
||||
changed = scriptutil.update_file(filename, matcher, edit)
|
||||
print('done' if changed else 'uptodate')
|
||||
|
||||
def add_constant(new_version, deprecate):
|
||||
filename = 'lucene/core/src/java/org/apache/lucene/util/Version.java'
|
||||
print(' adding constant %s...' % new_version.constant, end='')
|
||||
print(' adding constant %s...' % new_version.constant, end='', flush=True)
|
||||
constant_prefix = 'public static final Version LUCENE_'
|
||||
matcher = re.compile(constant_prefix)
|
||||
prev_matcher = new_version.make_previous_matcher(prefix=constant_prefix, sep='_')
|
||||
|
@ -152,12 +90,12 @@ def add_constant(new_version, deprecate):
|
|||
buffer.append(line)
|
||||
return False
|
||||
|
||||
changed = update_file(filename, matcher, Edit())
|
||||
changed = scriptutil.update_file(filename, matcher, Edit())
|
||||
print('done' if changed else 'uptodate')
|
||||
|
||||
version_prop_re = re.compile('version\.base=(.*)')
|
||||
def update_build_version(new_version):
|
||||
print(' changing version.base...', end='')
|
||||
print(' changing version.base...', end='', flush=True)
|
||||
filename = 'lucene/version.properties'
|
||||
def edit(buffer, match, line):
|
||||
if new_version.dot in line:
|
||||
|
@ -165,11 +103,11 @@ def update_build_version(new_version):
|
|||
buffer.append('version.base=' + new_version.dot + '\n')
|
||||
return True
|
||||
|
||||
changed = update_file(filename, version_prop_re, edit)
|
||||
changed = scriptutil.update_file(filename, version_prop_re, edit)
|
||||
print('done' if changed else 'uptodate')
|
||||
|
||||
def update_latest_constant(new_version):
|
||||
print(' changing Version.LATEST to %s...' % new_version.constant, end='')
|
||||
print(' changing Version.LATEST to %s...' % new_version.constant, end='', flush=True)
|
||||
filename = 'lucene/core/src/java/org/apache/lucene/util/Version.java'
|
||||
matcher = re.compile('public static final Version LATEST')
|
||||
def edit(buffer, match, line):
|
||||
|
@ -178,7 +116,7 @@ def update_latest_constant(new_version):
|
|||
buffer.append(line.rpartition('=')[0] + ('= %s;\n' % new_version.constant))
|
||||
return True
|
||||
|
||||
changed = update_file(filename, matcher, edit)
|
||||
changed = scriptutil.update_file(filename, matcher, edit)
|
||||
print('done' if changed else 'uptodate')
|
||||
|
||||
def update_example_solrconfigs(new_version):
|
||||
|
@ -191,7 +129,7 @@ def update_example_solrconfigs(new_version):
|
|||
update_solrconfig(os.path.join(root, f), matcher, new_version)
|
||||
|
||||
def update_solrconfig(filename, matcher, new_version):
|
||||
print(' %s...' % filename, end='')
|
||||
print(' %s...' % filename, end='', flush=True)
|
||||
def edit(buffer, match, line):
|
||||
if new_version.dot in line:
|
||||
return None
|
||||
|
@ -201,98 +139,11 @@ def update_solrconfig(filename, matcher, new_version):
|
|||
buffer.append(line.replace(match.group(1), new_version.dot))
|
||||
return True
|
||||
|
||||
changed = update_file(filename, matcher, edit)
|
||||
print('done' if changed else 'uptodate')
|
||||
|
||||
def codec_exists(version):
|
||||
codecs_dir = 'lucene/core/src/java/org/apache/lucene/codecs'
|
||||
codec_file = '%(dir)s/lucene%(x)s%(y)s/Lucene%(x)s%(y)sCodec.java'
|
||||
return os.path.exists(codec_file % {'x': version.major, 'y': version.minor, 'dir': codecs_dir})
|
||||
|
||||
def create_backcompat_indexes(version, on_trunk):
|
||||
majorminor = '%d%d' % (version.major, version.minor)
|
||||
codec = 'Lucene%s' % majorminor
|
||||
backcompat_dir = 'lucene/backward-codecs' if on_trunk else 'lucene/core'
|
||||
|
||||
create_index(codec, backcompat_dir, 'cfs', majorminor)
|
||||
create_index(codec, backcompat_dir, 'nocfs', majorminor)
|
||||
|
||||
def create_index(codec, codecs_dir, type, majorminor):
|
||||
filename = 'index.%s.%s.zip' % (majorminor, type)
|
||||
print(' creating %s...' % filename, end='')
|
||||
index_dir = 'src/test/org/apache/lucene/index'
|
||||
if os.path.exists(os.path.join(codecs_dir, index_dir, filename)):
|
||||
print('uptodate')
|
||||
return
|
||||
|
||||
test = {'cfs': 'testCreateCFS', 'nocfs': 'testCreateNonCFS'}[type]
|
||||
ant_args = ' '.join([
|
||||
'-Dtests.codec=%s' % codec,
|
||||
'-Dtests.useSecurityManager=false',
|
||||
'-Dtestcase=CreateBackwardsCompatibilityIndex',
|
||||
'-Dtestmethod=%s' % test
|
||||
])
|
||||
base_dir = os.getcwd()
|
||||
bc_index_dir = '/tmp/idx/index.%s' % type
|
||||
bc_index_file = os.path.join(bc_index_dir, filename)
|
||||
|
||||
success = False
|
||||
if not os.path.exists(bc_index_file):
|
||||
os.chdir(codecs_dir)
|
||||
run('ant test %s' % ant_args)
|
||||
os.chdir('/tmp/idx/index.%s' % type)
|
||||
run('zip %s *' % filename)
|
||||
run('cp %s %s' % (bc_index_file, os.path.join(base_dir, codecs_dir, index_dir)))
|
||||
os.chdir(base_dir)
|
||||
run('svn add %s' % os.path.join(codecs_dir, index_dir, filename))
|
||||
success = True
|
||||
|
||||
os.chdir(base_dir)
|
||||
run('rm -rf %s' % bc_index_dir)
|
||||
if success:
|
||||
print('done')
|
||||
|
||||
def update_backcompat_tests(version, on_trunk):
|
||||
majorminor = '%d%d' % (version.major, version.minor)
|
||||
print(' adding new indexes to backcompat tests...', end='')
|
||||
basedir = 'lucene/backward-codecs' if on_trunk else 'lucene/core'
|
||||
filename = '%s/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java' % basedir
|
||||
matcher = re.compile(r'final static String\[\] oldNames = {|};')
|
||||
cfs_name = '%s.cfs' % majorminor
|
||||
nocfs_name = '%s.nocfs' % majorminor
|
||||
|
||||
class Edit(object):
|
||||
start = None
|
||||
def __call__(self, buffer, match, line):
|
||||
if self.start:
|
||||
# first check if the indexes we are adding already exist
|
||||
last_ndx = len(buffer) - 1
|
||||
i = last_ndx
|
||||
while i >= self.start:
|
||||
if cfs_name in buffer[i]:
|
||||
return None
|
||||
i -= 1
|
||||
|
||||
last = buffer[last_ndx]
|
||||
spaces = ' ' * (len(last) - len(last.lstrip()))
|
||||
quote_ndx = last.find('"')
|
||||
quote_ndx = last.find('"', quote_ndx + 1)
|
||||
buffer[last_ndx] = last[:quote_ndx + 1] + "," + last[quote_ndx + 1:]
|
||||
buffer.append(spaces + ('"%s",\n' % cfs_name))
|
||||
buffer.append(spaces + ('"%s"\n' % nocfs_name))
|
||||
buffer.append(line)
|
||||
return True
|
||||
|
||||
if 'oldNames' in line:
|
||||
self.start = len(buffer) # location of first index name
|
||||
buffer.append(line)
|
||||
return False
|
||||
|
||||
changed = update_file(filename, matcher, Edit())
|
||||
changed = scriptutil.update_file(filename, matcher, edit)
|
||||
print('done' if changed else 'uptodate')
|
||||
|
||||
def check_lucene_version_tests():
|
||||
print(' checking lucene version tests...', end='')
|
||||
print(' checking lucene version tests...', end='', flush=True)
|
||||
base_dir = os.getcwd()
|
||||
os.chdir('lucene/core')
|
||||
run('ant test -Dtestcase=TestVersion')
|
||||
|
@ -300,48 +151,13 @@ def check_lucene_version_tests():
|
|||
print('ok')
|
||||
|
||||
def check_solr_version_tests():
|
||||
print(' checking solr version tests...', end='')
|
||||
print(' checking solr version tests...', end='', flush=True)
|
||||
base_dir = os.getcwd()
|
||||
os.chdir('solr/core')
|
||||
run('ant test -Dtestcase=TestLuceneMatchVersion')
|
||||
os.chdir(base_dir)
|
||||
print('ok')
|
||||
|
||||
def check_backcompat_tests(on_trunk):
|
||||
print(' checking backcompat tests...', end='')
|
||||
base_dir = os.getcwd()
|
||||
basedir = 'lucene/backward-codecs' if on_trunk else 'lucene/core'
|
||||
os.chdir(basedir)
|
||||
run('ant test -Dtestcase=TestBackwardsCompatibility')
|
||||
os.chdir(base_dir)
|
||||
print('ok')
|
||||
|
||||
# branch types are "release", "stable" and "trunk"
|
||||
def find_branch_type():
|
||||
output = subprocess.check_output('svn info', shell=True)
|
||||
for line in output.split(b'\n'):
|
||||
if line.startswith(b'URL:'):
|
||||
url = line.split(b'/')[-1]
|
||||
break
|
||||
else:
|
||||
raise Exception('svn info missing repo URL')
|
||||
|
||||
if url == b'trunk':
|
||||
return 'trunk'
|
||||
if url.startswith(b'branch_'):
|
||||
return 'stable'
|
||||
if url.startswith(b'lucene_solr_'):
|
||||
return 'release'
|
||||
raise Exception('Cannot run bumpVersion.py on feature branch')
|
||||
|
||||
def find_previous_version():
|
||||
return version_prop_re.search(open('lucene/version.properties').read()).group(1)
|
||||
|
||||
def merge_change(changeid, repo):
|
||||
print('\nMerging downstream change %d...' % changeid, end='')
|
||||
run('svn merge -c %d --record-only %s' % (changeid, repo))
|
||||
print('done')
|
||||
|
||||
def read_config():
|
||||
parser = argparse.ArgumentParser(description='Add a new version')
|
||||
parser.add_argument('version', type=Version.parse)
|
||||
|
@ -349,18 +165,11 @@ def read_config():
|
|||
parser.add_argument('-r', '--downstream-repo', help='Path to downstream checkout for given changeid')
|
||||
c = parser.parse_args()
|
||||
|
||||
c.branch_type = find_branch_type()
|
||||
c.branch_type = scriptutil.find_branch_type()
|
||||
c.matching_branch = c.version.is_bugfix_release() and c.branch_type == 'release' or \
|
||||
c.version.is_minor_release() and c.branch_type == 'stable' or \
|
||||
c.branch_type == 'major'
|
||||
|
||||
if c.matching_branch:
|
||||
c.previous_version = Version.parse(find_previous_version())
|
||||
elif c.version.is_minor_release():
|
||||
c.previous_version = Version(c.version.major, c.version.minor - 1, 0)
|
||||
elif c.version.is_bugfix_release():
|
||||
c.previous_version = Version(c.version.major, c.version.minor, c.version.bugfix - 1)
|
||||
|
||||
if bool(c.changeid) != bool(c.downstream_repo):
|
||||
parser.error('--changeid and --upstream-repo must be used together')
|
||||
if not c.changeid and not c.matching_branch:
|
||||
|
@ -389,25 +198,14 @@ def main():
|
|||
update_latest_constant(c.version)
|
||||
update_example_solrconfigs(c.version)
|
||||
|
||||
run_backcompat_tests = False
|
||||
on_trunk = c.branch_type == 'trunk'
|
||||
if not c.version.is_bugfix_release() and codec_exists(c.previous_version):
|
||||
print('\nCreating backwards compatibility tests')
|
||||
create_backcompat_indexes(c.previous_version, on_trunk)
|
||||
update_backcompat_tests(c.previous_version, on_trunk)
|
||||
run_backcompat_tests = True
|
||||
|
||||
if c.version.is_major_release():
|
||||
print('\nTODO: ')
|
||||
print(' - Update major version bounds in Version.java')
|
||||
print(' - Move backcompat oldIndexes to unsupportedIndexes in TestBackwardsCompatibility')
|
||||
print(' - Update IndexFormatTooOldException throw cases')
|
||||
else:
|
||||
print('\nTesting changes')
|
||||
check_lucene_version_tests()
|
||||
check_solr_version_tests()
|
||||
if run_backcompat_tests:
|
||||
check_backcompat_tests(on_trunk)
|
||||
|
||||
print()
|
||||
|
||||
|
|
|
@ -0,0 +1,127 @@
|
|||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import argparse
|
||||
import io
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
class Version(object):
|
||||
def __init__(self, major, minor, bugfix, prerelease):
|
||||
self.major = major
|
||||
self.minor = minor
|
||||
self.bugfix = bugfix
|
||||
self.prerelease = prerelease
|
||||
self.previous_dot_matcher = self.make_previous_matcher()
|
||||
self.dot = '%d.%d.%d' % (self.major, self.minor, self.bugfix)
|
||||
self.constant = 'LUCENE_%d_%d_%d' % (self.major, self.minor, self.bugfix)
|
||||
|
||||
@classmethod
|
||||
def parse(cls, value):
|
||||
match = re.search(r'(\d+)\.(\d+).(\d+)(.1|.2)?', value)
|
||||
if match is None:
|
||||
raise argparse.ArgumentTypeError('Version argument must be of format x.y.z(.1|.2)?')
|
||||
parts = [int(v) for v in match.groups()[:-1]]
|
||||
parts.append({ None: 0, '.1': 1, '.2': 2 }[match.groups()[-1]])
|
||||
return Version(*parts)
|
||||
|
||||
def __str__(self):
|
||||
return self.dot
|
||||
|
||||
def make_previous_matcher(self, prefix='', suffix='', sep='\\.'):
|
||||
if self.is_bugfix_release():
|
||||
pattern = '%s%s%s%s%d' % (self.major, sep, self.minor, sep, self.bugfix - 1)
|
||||
elif self.is_minor_release():
|
||||
pattern = '%s%s%d%s\\d+' % (self.major, sep, self.minor - 1, sep)
|
||||
else:
|
||||
pattern = '%d%s\\d+%s\\d+' % (self.major - 1, sep, sep)
|
||||
|
||||
return re.compile(prefix + '(' + pattern + ')' + suffix)
|
||||
|
||||
def is_bugfix_release(self):
|
||||
return self.bugfix != 0
|
||||
|
||||
def is_minor_release(self):
|
||||
return self.bugfix == 0 and self.minor != 0
|
||||
|
||||
def is_major_release(self):
|
||||
return self.bugfix == 0 and self.minor == 0
|
||||
|
||||
def on_or_after(self, other):
|
||||
return (self.major > other.major or self.major == other.major and
|
||||
(self.minor > other.minor or self.minor == other.minor and
|
||||
(self.bugfix > other.bugfix or self.bugfix == other.bugfix and
|
||||
self.prerelease >= other.prerelease)))
|
||||
|
||||
def run(cmd):
|
||||
try:
|
||||
output = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(e.output.decode('utf-8'))
|
||||
raise e
|
||||
return output.decode('utf-8')
|
||||
|
||||
def update_file(filename, line_re, edit):
|
||||
infile = open(filename, 'r')
|
||||
buffer = []
|
||||
|
||||
changed = False
|
||||
for line in infile:
|
||||
if not changed:
|
||||
match = line_re.search(line)
|
||||
if match:
|
||||
changed = edit(buffer, match, line)
|
||||
if changed is None:
|
||||
return False
|
||||
continue
|
||||
buffer.append(line)
|
||||
if not changed:
|
||||
raise Exception('Could not find %s in %s' % (line_re, filename))
|
||||
with open(filename, 'w') as f:
|
||||
f.write(''.join(buffer))
|
||||
return True
|
||||
|
||||
# branch types are "release", "stable" and "trunk"
|
||||
def find_branch_type():
|
||||
output = subprocess.check_output('svn info', shell=True)
|
||||
for line in output.split(b'\n'):
|
||||
if line.startswith(b'URL:'):
|
||||
url = line.split(b'/')[-1]
|
||||
break
|
||||
else:
|
||||
raise Exception('svn info missing repo URL')
|
||||
|
||||
if url == b'trunk':
|
||||
return 'trunk'
|
||||
if url.startswith(b'branch_'):
|
||||
return 'stable'
|
||||
if url.startswith(b'lucene_solr_'):
|
||||
return 'release'
|
||||
raise Exception('Cannot run bumpVersion.py on feature branch')
|
||||
|
||||
version_prop_re = re.compile('version\.base=(.*)')
|
||||
def find_current_version():
|
||||
return version_prop_re.search(open('lucene/version.properties').read()).group(1)
|
||||
|
||||
def merge_change(changeid, repo):
|
||||
print('\nMerging downstream change %d...' % changeid, end='')
|
||||
run('svn merge -c %d --record-only %s' % (changeid, repo))
|
||||
print('done')
|
||||
|
||||
if __name__ == '__main__':
|
||||
print('This is only a support module, it cannot be run')
|
||||
sys.exit(1)
|
|
@ -185,6 +185,11 @@ Tests
|
|||
* LUCENE-5974: Add check that backcompat indexes use default codecs
|
||||
(Ryan Ernst)
|
||||
|
||||
* LUCENE-5971: Create addBackcompatIndexes.py script to build and add
|
||||
backcompat test indexes for a given lucene version. Also renamed backcompat
|
||||
index files to use Version.toString() in filename.
|
||||
(Ryan Ernst)
|
||||
|
||||
Optimizations
|
||||
|
||||
* LUCENE-5960: Use a more efficient bitset, not a Set<Integer>, to
|
||||
|
|
|
@ -1,59 +0,0 @@
|
|||
package org.apache.lucene.index;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
// This class exists only so it has a name that the junit runner will not pickup,
|
||||
// so these index creation "tests" can only be run explicitly
|
||||
public class CreateBackwardsCompatibilityIndex extends TestBackwardsCompatibility {
|
||||
|
||||
// These indexes will be created under directory /tmp/idx/.
|
||||
//
|
||||
// Be sure to create the indexes with the actual format:
|
||||
// ant test -Dtestcase=TestBackwardsCompatibility -Dversion=x.y.z -Dtests.codec=LuceneXY -Dtests.useSecurityManager=false
|
||||
//
|
||||
// Zip up the generated indexes:
|
||||
//
|
||||
// cd /tmp/idx/index.cfs ; zip index.<VERSION>.cfs.zip *
|
||||
// cd /tmp/idx/index.nocfs ; zip index.<VERSION>.nocfs.zip *
|
||||
//
|
||||
// Then move those 2 zip files to your trunk checkout and add them
|
||||
// to the oldNames array.
|
||||
|
||||
public void testCreateCFS() throws IOException {
|
||||
createIndex("index.cfs", true, false);
|
||||
}
|
||||
|
||||
public void testCreateNonCFS() throws IOException {
|
||||
createIndex("index.nocfs", false, false);
|
||||
}
|
||||
|
||||
// These are only needed for the special upgrade test to verify
|
||||
// that also single-segment indexes are correctly upgraded by IndexUpgrader.
|
||||
// You don't need them to be build for non-4.0 (the test is happy with just one
|
||||
// "old" segment format, version is unimportant:
|
||||
|
||||
public void testCreateSingleSegmentCFS() throws IOException {
|
||||
createIndex("index.singlesegment.cfs", true, true);
|
||||
}
|
||||
|
||||
public void testCreateSingleSegmentNoCFS() throws IOException {
|
||||
createIndex("index.singlesegment.nocfs", false, true);
|
||||
}
|
||||
}
|
|
@ -19,8 +19,11 @@ package org.apache.lucene.index;
|
|||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.PrintStream;
|
||||
import java.lang.reflect.Modifier;
|
||||
import java.net.URL;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.ArrayList;
|
||||
|
@ -67,6 +70,7 @@ import org.apache.lucene.util.Bits;
|
|||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.InfoStream;
|
||||
import org.apache.lucene.util.LineFileDocs;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
|
@ -80,17 +84,54 @@ import org.junit.BeforeClass;
|
|||
*/
|
||||
public class TestBackwardsCompatibility extends LuceneTestCase {
|
||||
|
||||
/*
|
||||
// To generate backcompat indexes with the current default codec, run the following ant command:
|
||||
// ant test -Dtestcase=TestBackwardsCompatibility -Dbwc.indexdir=/path/to/store/indexes
|
||||
// -Dtests.codec=default -Dtests.useSecurityManager=false
|
||||
// Also add testmethod with one of the index creation methods below, for example:
|
||||
// -Dtestmethod=testCreateCFS
|
||||
//
|
||||
// Zip up the generated indexes:
|
||||
//
|
||||
// cd /path/to/store/indexes/index.cfs ; zip index.<VERSION>-cfs.zip *
|
||||
// cd /path/to/store/indexes/index.nocfs ; zip index.<VERSION>-nocfs.zip *
|
||||
//
|
||||
// Then move those 2 zip files to your trunk checkout and add them
|
||||
// to the oldNames array.
|
||||
|
||||
public void testCreateCFS() throws IOException {
|
||||
createIndex("index.cfs", true, false);
|
||||
}
|
||||
|
||||
public void testCreateNoCFS() throws IOException {
|
||||
createIndex("index.nocfs", false, false);
|
||||
}
|
||||
|
||||
// These are only needed for the special upgrade test to verify
|
||||
// that also single-segment indexes are correctly upgraded by IndexUpgrader.
|
||||
// You don't need them to be build for non-4.0 (the test is happy with just one
|
||||
// "old" segment format, version is unimportant:
|
||||
|
||||
public void testCreateSingleSegmentCFS() throws IOException {
|
||||
createIndex("index.singlesegment-cfs", true, true);
|
||||
}
|
||||
|
||||
public void testCreateSingleSegmentNoCFS() throws IOException {
|
||||
createIndex("index.singlesegment-nocfs", false, true);
|
||||
}
|
||||
|
||||
private Path getIndexDir() {
|
||||
String path = System.getProperty("tests.bwcdir");
|
||||
assumeTrue("backcompat creation tests must be run with -Dtests,bwcdir=/path/to/write/indexes", path != null);
|
||||
return Paths.get(path);
|
||||
}
|
||||
|
||||
public void testCreateMoreTermsIndex() throws Exception {
|
||||
// we use a real directory name that is not cleaned up,
|
||||
// because this method is only used to create backwards
|
||||
// indexes:
|
||||
File indexDir = new File("moreterms");
|
||||
_TestUtil.rmDir(indexDir);
|
||||
|
||||
Path indexDir = getIndexDir().resolve("moreterms");
|
||||
Files.deleteIfExists(indexDir);
|
||||
Directory dir = newFSDirectory(indexDir);
|
||||
|
||||
LogByteSizeMergePolicy mp = new LogByteSizeMergePolicy();
|
||||
mp.setUseCompoundFile(false);
|
||||
mp.setNoCFSRatio(1.0);
|
||||
mp.setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY);
|
||||
MockAnalyzer analyzer = new MockAnalyzer(random());
|
||||
|
@ -98,8 +139,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
|
||||
// TODO: remove randomness
|
||||
IndexWriterConfig conf = new IndexWriterConfig(analyzer)
|
||||
.setMergePolicy(mp);
|
||||
conf.setCodec(Codec.forName("Lucene40"));
|
||||
.setMergePolicy(mp).setUseCompoundFile(false);
|
||||
IndexWriter writer = new IndexWriter(dir, conf);
|
||||
LineFileDocs docs = new LineFileDocs(null, true);
|
||||
for(int i=0;i<50;i++) {
|
||||
|
@ -112,7 +152,6 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
// a test option to not remove temp dir...):
|
||||
Thread.sleep(100000);
|
||||
}
|
||||
*/
|
||||
|
||||
private void updateNumeric(IndexWriter writer, String id, String f, String cf, long value) throws IOException {
|
||||
writer.updateNumericDocValue(new Term("id", id), f, value);
|
||||
|
@ -124,13 +163,10 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
writer.updateBinaryDocValue(new Term("id", id), cf, TestDocValuesUpdatesOnOldSegments.toBytes(value*2));
|
||||
}
|
||||
|
||||
/* // Creates an index with DocValues updates
|
||||
// Creates an index with DocValues updates
|
||||
public void testCreateIndexWithDocValuesUpdates() throws Exception {
|
||||
// we use a real directory name that is not cleaned up,
|
||||
// because this method is only used to create backwards
|
||||
// indexes:
|
||||
File indexDir = new File("/tmp/idx/dvupdates");
|
||||
TestUtil.rm(indexDir);
|
||||
Path indexDir = getIndexDir().resolve("dvupdates");
|
||||
Files.deleteIfExists(indexDir);
|
||||
Directory dir = newFSDirectory(indexDir);
|
||||
|
||||
IndexWriterConfig conf = new IndexWriterConfig(new MockAnalyzer(random()))
|
||||
|
@ -139,7 +175,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
// create an index w/ few doc-values fields, some with updates and some without
|
||||
for (int i = 0; i < 30; i++) {
|
||||
Document doc = new Document();
|
||||
doc.add(new StringField("id", "" + i, Store.NO));
|
||||
doc.add(new StringField("id", "" + i, Field.Store.NO));
|
||||
doc.add(new NumericDocValuesField("ndv1", i));
|
||||
doc.add(new NumericDocValuesField("ndv1_c", i*2));
|
||||
doc.add(new NumericDocValuesField("ndv2", i*3));
|
||||
|
@ -170,104 +206,104 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
|
||||
writer.close();
|
||||
dir.close();
|
||||
}*/
|
||||
}
|
||||
|
||||
final static String[] oldNames = {
|
||||
"40a.cfs",
|
||||
"40a.nocfs",
|
||||
"40b.cfs",
|
||||
"40b.nocfs",
|
||||
"40.cfs",
|
||||
"40.nocfs",
|
||||
"41.cfs",
|
||||
"41.nocfs",
|
||||
"42.cfs",
|
||||
"42.nocfs",
|
||||
"421.cfs",
|
||||
"421.nocfs",
|
||||
"43.cfs",
|
||||
"43.nocfs",
|
||||
"431.cfs",
|
||||
"431.nocfs",
|
||||
"44.cfs",
|
||||
"44.nocfs",
|
||||
"45.cfs",
|
||||
"45.nocfs",
|
||||
"451.cfs",
|
||||
"451.nocfs",
|
||||
"46.cfs",
|
||||
"46.nocfs",
|
||||
"461.cfs",
|
||||
"461.nocfs",
|
||||
"47.cfs",
|
||||
"47.nocfs",
|
||||
"471.cfs",
|
||||
"471.nocfs",
|
||||
"472.cfs",
|
||||
"472.nocfs",
|
||||
"48.cfs",
|
||||
"48.nocfs",
|
||||
"481.cfs",
|
||||
"481.nocfs",
|
||||
"49.cfs",
|
||||
"49.nocfs",
|
||||
"410.cfs",
|
||||
"410.nocfs"
|
||||
"4.0.0-cfs",
|
||||
"4.0.0-nocfs",
|
||||
"4.0.0.1-cfs",
|
||||
"4.0.0.1-nocfs",
|
||||
"4.0.0.2-cfs",
|
||||
"4.0.0.2-nocfs",
|
||||
"4.1.0-cfs",
|
||||
"4.1.0-nocfs",
|
||||
"4.2.0-cfs",
|
||||
"4.2.0-nocfs",
|
||||
"4.2.1-cfs",
|
||||
"4.2.1-nocfs",
|
||||
"4.3.0-cfs",
|
||||
"4.3.0-nocfs",
|
||||
"4.3.1-cfs",
|
||||
"4.3.1-nocfs",
|
||||
"4.4.0-cfs",
|
||||
"4.4.0-nocfs",
|
||||
"4.5.0-cfs",
|
||||
"4.5.0-nocfs",
|
||||
"4.5.1-cfs",
|
||||
"4.5.1-nocfs",
|
||||
"4.6.0-cfs",
|
||||
"4.6.0-nocfs",
|
||||
"4.6.1-cfs",
|
||||
"4.6.1-nocfs",
|
||||
"4.7.0-cfs",
|
||||
"4.7.0-nocfs",
|
||||
"4.7.1-cfs",
|
||||
"4.7.1-nocfs",
|
||||
"4.7.2-cfs",
|
||||
"4.7.2-nocfs",
|
||||
"4.8.0-cfs",
|
||||
"4.8.0-nocfs",
|
||||
"4.8.1-cfs",
|
||||
"4.8.1-nocfs",
|
||||
"4.9.0-cfs",
|
||||
"4.9.0-nocfs",
|
||||
"4.10.0-cfs",
|
||||
"4.10.0-nocfs"
|
||||
};
|
||||
|
||||
final String[] unsupportedNames = {
|
||||
"19.cfs",
|
||||
"19.nocfs",
|
||||
"20.cfs",
|
||||
"20.nocfs",
|
||||
"21.cfs",
|
||||
"21.nocfs",
|
||||
"22.cfs",
|
||||
"22.nocfs",
|
||||
"23.cfs",
|
||||
"23.nocfs",
|
||||
"24.cfs",
|
||||
"24.nocfs",
|
||||
"241.cfs",
|
||||
"241.nocfs",
|
||||
"29.cfs",
|
||||
"29.nocfs",
|
||||
"291.cfs",
|
||||
"291.nocfs",
|
||||
"292.cfs",
|
||||
"292.nocfs",
|
||||
"293.cfs",
|
||||
"293.nocfs",
|
||||
"294.cfs",
|
||||
"294.nocfs",
|
||||
"30.cfs",
|
||||
"30.nocfs",
|
||||
"301.cfs",
|
||||
"301.nocfs",
|
||||
"302.cfs",
|
||||
"302.nocfs",
|
||||
"303.cfs",
|
||||
"303.nocfs",
|
||||
"31.cfs",
|
||||
"31.nocfs",
|
||||
"32.cfs",
|
||||
"32.nocfs",
|
||||
"33.cfs",
|
||||
"33.nocfs",
|
||||
"34.cfs",
|
||||
"34.nocfs",
|
||||
"35.cfs",
|
||||
"35.nocfs",
|
||||
"36.cfs",
|
||||
"36.nocfs",
|
||||
"361.cfs",
|
||||
"361.nocfs",
|
||||
"362.cfs",
|
||||
"362.nocfs"
|
||||
"1.9.0-cfs",
|
||||
"1.9.0-nocfs",
|
||||
"2.0.0-cfs",
|
||||
"2.0.0-nocfs",
|
||||
"2.1.0-cfs",
|
||||
"2.1.0-nocfs",
|
||||
"2.2.0-cfs",
|
||||
"2.2.0-nocfs",
|
||||
"2.3.0-cfs",
|
||||
"2.3.0-nocfs",
|
||||
"2.4.0-cfs",
|
||||
"2.4.0-nocfs",
|
||||
"2.4.1-cfs",
|
||||
"2.4.1-nocfs",
|
||||
"2.9.0-cfs",
|
||||
"2.9.0-nocfs",
|
||||
"2.9.1-cfs",
|
||||
"2.9.1-nocfs",
|
||||
"2.9.2-cfs",
|
||||
"2.9.2-nocfs",
|
||||
"2.9.3-cfs",
|
||||
"2.9.3-nocfs",
|
||||
"2.9.4-cfs",
|
||||
"2.9.4-nocfs",
|
||||
"3.0.0-cfs",
|
||||
"3.0.0-nocfs",
|
||||
"3.0.1-cfs",
|
||||
"3.0.1-nocfs",
|
||||
"3.0.2-cfs",
|
||||
"3.0.2-nocfs",
|
||||
"3.0.3-cfs",
|
||||
"3.0.3-nocfs",
|
||||
"3.1.0-cfs",
|
||||
"3.1.0-nocfs",
|
||||
"3.2.0-cfs",
|
||||
"3.2.0-nocfs",
|
||||
"3.3.0-cfs",
|
||||
"3.3.0-nocfs",
|
||||
"3.4.0-cfs",
|
||||
"3.4.0-nocfs",
|
||||
"3.5.0-cfs",
|
||||
"3.5.0-nocfs",
|
||||
"3.6.0-cfs",
|
||||
"3.6.0-nocfs",
|
||||
"3.6.1-cfs",
|
||||
"3.6.1-nocfs",
|
||||
"3.6.2-cfs",
|
||||
"3.6.2-nocfs"
|
||||
};
|
||||
|
||||
final static String[] oldSingleSegmentNames = {"40a.optimized.cfs",
|
||||
"40a.optimized.nocfs",
|
||||
final static String[] oldSingleSegmentNames = {"4.0.0-optimized-cfs",
|
||||
"4.0.0-optimized-nocfs",
|
||||
};
|
||||
|
||||
static Map<String,Directory> oldIndexDirs;
|
||||
|
@ -295,7 +331,9 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
oldIndexDirs = new HashMap<>();
|
||||
for (String name : names) {
|
||||
Path dir = createTempDir(name);
|
||||
TestUtil.unzip(TestBackwardsCompatibility.class.getResourceAsStream("index." + name + ".zip"), dir);
|
||||
InputStream resource = TestBackwardsCompatibility.class.getResourceAsStream("index." + name + ".zip");
|
||||
assertNotNull("Index name " + name + " not found", resource);
|
||||
TestUtil.unzip(resource, dir);
|
||||
oldIndexDirs.put(name, newFSDirectory(dir));
|
||||
}
|
||||
}
|
||||
|
@ -315,30 +353,13 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
Arrays.sort(files);
|
||||
String prevFile = "";
|
||||
for (String file : files) {
|
||||
if (prevFile.endsWith(".cfs")) {
|
||||
String prefix = prevFile.replace(".cfs", "");
|
||||
assertEquals("Missing .nocfs for backcompat index " + prefix, prefix + ".nocfs", file);
|
||||
if (prevFile.endsWith("-cfs")) {
|
||||
String prefix = prevFile.replace("-cfs", "");
|
||||
assertEquals("Missing -nocfs for backcompat index " + prefix, prefix + "-nocfs", file);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private String cfsFilename(Version v) {
|
||||
String bugfix = "";
|
||||
if (v.bugfix != 0) {
|
||||
bugfix = Integer.toString(v.bugfix);
|
||||
}
|
||||
String prerelease = "";
|
||||
if (v.minor == 0 && v.bugfix == 0) {
|
||||
if (v.prerelease == 0) {
|
||||
prerelease = "a";
|
||||
} else if (v.prerelease == 1) {
|
||||
prerelease = "b";
|
||||
}
|
||||
}
|
||||
|
||||
return Integer.toString(v.major) + v.minor + bugfix + prerelease + ".cfs";
|
||||
}
|
||||
|
||||
public void testAllVersionsTested() throws Exception {
|
||||
Pattern constantPattern = Pattern.compile("LUCENE_(\\d+)_(\\d+)_(\\d+)(_ALPHA|_BETA)?");
|
||||
// find the unique versions according to Version.java
|
||||
|
@ -351,7 +372,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
Matcher constant = constantPattern.matcher(field.getName());
|
||||
if (constant.matches() == false) continue;
|
||||
|
||||
expectedVersions.add(cfsFilename(v));
|
||||
expectedVersions.add(v.toString() + "-cfs");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -370,7 +391,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
}
|
||||
}
|
||||
assertNotNull(lastPrevMajorVersion);
|
||||
expectedVersions.remove(cfsFilename(lastPrevMajorVersion));
|
||||
expectedVersions.remove(lastPrevMajorVersion.toString() + "-cfs");
|
||||
// END TRUNK ONLY BLOCK
|
||||
|
||||
Collections.sort(expectedVersions);
|
||||
|
@ -378,7 +399,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
// find what versions we are testing
|
||||
List<String> testedVersions = new ArrayList<>();
|
||||
for (String testedVersion : oldNames) {
|
||||
if (testedVersion.endsWith(".cfs") == false) continue;
|
||||
if (testedVersion.endsWith("-cfs") == false) continue;
|
||||
testedVersions.add(testedVersion);
|
||||
}
|
||||
Collections.sort(testedVersions);
|
||||
|
@ -412,13 +433,14 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
++j;
|
||||
}
|
||||
|
||||
if (missingFiles.isEmpty() && extraFiles.isEmpty()) {
|
||||
// we could be missing up to 1 file, which may be due to a release that is in progress
|
||||
if (missingFiles.size() <= 1 && extraFiles.isEmpty()) {
|
||||
// success
|
||||
return;
|
||||
}
|
||||
|
||||
StringBuffer msg = new StringBuffer();
|
||||
if (missingFiles.isEmpty() == false) {
|
||||
if (missingFiles.size() > 1) {
|
||||
msg.append("Missing backcompat test files:\n");
|
||||
for (String missingFile : missingFiles) {
|
||||
msg.append(" " + missingFile + "\n");
|
||||
|
@ -555,13 +577,14 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testIndexOldIndex() throws IOException {
|
||||
public void testIndexOldIndex() throws Exception {
|
||||
for (String name : oldNames) {
|
||||
if (VERBOSE) {
|
||||
System.out.println("TEST: oldName=" + name);
|
||||
}
|
||||
Directory dir = newDirectory(oldIndexDirs.get(name));
|
||||
changeIndexWithAdds(random(), dir, name);
|
||||
Version v = Version.parse(name.substring(0, name.indexOf('-')));
|
||||
changeIndexWithAdds(random(), dir, v);
|
||||
dir.close();
|
||||
}
|
||||
}
|
||||
|
@ -730,7 +753,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
return v0 - v1;
|
||||
}
|
||||
|
||||
public void changeIndexWithAdds(Random random, Directory dir, String origOldName) throws IOException {
|
||||
public void changeIndexWithAdds(Random random, Directory dir, Version nameVersion) throws IOException {
|
||||
// open writer
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random))
|
||||
.setOpenMode(OpenMode.APPEND)
|
||||
|
@ -741,12 +764,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
}
|
||||
|
||||
// make sure writer sees right total -- writer seems not to know about deletes in .del?
|
||||
final int expected;
|
||||
if (compare(origOldName, "24") < 0) {
|
||||
expected = 44;
|
||||
} else {
|
||||
expected = 45;
|
||||
}
|
||||
final int expected = 45;
|
||||
assertEquals("wrong doc count", expected, writer.numDocs());
|
||||
writer.close();
|
||||
|
||||
|
@ -800,22 +818,16 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
reader.close();
|
||||
}
|
||||
|
||||
public Path createIndex(String dirName, boolean doCFS, boolean fullyMerged) throws IOException {
|
||||
// we use a real directory name that is not cleaned up, because this method is only used to create backwards indexes:
|
||||
Path indexDir = Paths.get("/tmp/idx").resolve(dirName);
|
||||
IOUtils.rm(indexDir);
|
||||
public void createIndex(String dirName, boolean doCFS, boolean fullyMerged) throws IOException {
|
||||
Path indexDir = getIndexDir().resolve(dirName);
|
||||
Files.deleteIfExists(indexDir);
|
||||
Directory dir = newFSDirectory(indexDir);
|
||||
LogByteSizeMergePolicy mp = new LogByteSizeMergePolicy();
|
||||
mp.setNoCFSRatio(doCFS ? 1.0 : 0.0);
|
||||
mp.setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY);
|
||||
// TODO: remove randomness
|
||||
String codecName = System.getProperty("tests.codec");
|
||||
if (codecName == null || codecName.trim().isEmpty() || codecName.equals("random")) {
|
||||
fail("Must provide 'tests.codec' property to create BWC index");
|
||||
}
|
||||
Codec codec = Codec.forName(codecName);
|
||||
IndexWriterConfig conf = new IndexWriterConfig(new MockAnalyzer(random()))
|
||||
.setMaxBufferedDocs(10).setMergePolicy(mp).setCodec(codec);
|
||||
.setMaxBufferedDocs(10).setMergePolicy(mp);
|
||||
IndexWriter writer = new IndexWriter(dir, conf);
|
||||
|
||||
for(int i=0;i<35;i++) {
|
||||
|
@ -833,13 +845,13 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
mp.setNoCFSRatio(doCFS ? 1.0 : 0.0);
|
||||
// TODO: remove randomness
|
||||
conf = new IndexWriterConfig(new MockAnalyzer(random()))
|
||||
.setMaxBufferedDocs(10).setMergePolicy(mp).setCodec(codec);
|
||||
.setMaxBufferedDocs(10).setMergePolicy(mp);
|
||||
writer = new IndexWriter(dir, conf);
|
||||
addNoProxDoc(writer);
|
||||
writer.close();
|
||||
|
||||
conf = new IndexWriterConfig(new MockAnalyzer(random()))
|
||||
.setMaxBufferedDocs(10).setMergePolicy(NoMergePolicy.INSTANCE).setCodec(codec);
|
||||
.setMaxBufferedDocs(10).setMergePolicy(NoMergePolicy.INSTANCE);
|
||||
writer = new IndexWriter(dir, conf);
|
||||
Term searchTerm = new Term("id", "7");
|
||||
writer.deleteDocuments(searchTerm);
|
||||
|
@ -847,8 +859,6 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
}
|
||||
|
||||
dir.close();
|
||||
|
||||
return indexDir;
|
||||
}
|
||||
|
||||
private void addDoc(IndexWriter writer, int id) throws IOException
|
||||
|
@ -1215,7 +1225,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public static final String moreTermsIndex = "moreterms.40.zip";
|
||||
public static final String moreTermsIndex = "moreterms.4.0.0.zip";
|
||||
|
||||
public void testMoreTerms() throws Exception {
|
||||
Path oldIndexDir = createTempDir("moreterms");
|
||||
|
@ -1227,7 +1237,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
dir.close();
|
||||
}
|
||||
|
||||
public static final String dvUpdatesIndex = "dvupdates.48.zip";
|
||||
public static final String dvUpdatesIndex = "dvupdates.4.8.0.zip";
|
||||
|
||||
private void assertNumericDocValues(LeafReader r, String f, String cf) throws IOException {
|
||||
NumericDocValues ndvf = r.getNumericDocValues(f);
|
||||
|
|
|
@ -1044,6 +1044,7 @@
|
|||
<propertyref prefix="tests.maxfailures" />
|
||||
<propertyref prefix="tests.failfast" />
|
||||
<propertyref prefix="tests.badapples" />
|
||||
<propertyref prefix="tests.bwcdir" />
|
||||
<propertyref prefix="tests.timeoutSuite" />
|
||||
<propertyref prefix="tests.jettyConnector" />
|
||||
<propertyref prefix="tests.disableHdfs" />
|
||||
|
|
Loading…
Reference in New Issue