mirror of https://github.com/apache/lucene.git
LUCENE-6938: Fix Lucene's src.tgz file; remove svnkit stuff
This commit is contained in:
parent
55005f0a7d
commit
9f0e2cc83f
121
build.xml
121
build.xml
|
@ -20,8 +20,6 @@
|
||||||
<project name="lucene-solr" default="-projecthelp" basedir=".">
|
<project name="lucene-solr" default="-projecthelp" basedir=".">
|
||||||
<import file="lucene/common-build.xml"/>
|
<import file="lucene/common-build.xml"/>
|
||||||
|
|
||||||
<property name="svnkit.version" value="1.8.10"/>
|
|
||||||
|
|
||||||
<property name="tests.heap-dump-dir" location="heapdumps"/>
|
<property name="tests.heap-dump-dir" location="heapdumps"/>
|
||||||
|
|
||||||
<property name="maven-build-dir" value="maven-build"/>
|
<property name="maven-build-dir" value="maven-build"/>
|
||||||
|
@ -45,7 +43,7 @@
|
||||||
</target>
|
</target>
|
||||||
|
|
||||||
<target name="precommit" description="Run basic checks before committing"
|
<target name="precommit" description="Run basic checks before committing"
|
||||||
depends="check-svn-working-copy,validate,documentation-lint"/>
|
depends="check-working-copy,validate,documentation-lint"/>
|
||||||
|
|
||||||
<target name="test" description="Test both Lucene and Solr" depends="resolve-groovy">
|
<target name="test" description="Test both Lucene and Solr" depends="resolve-groovy">
|
||||||
<mkdir dir="lucene/build" />
|
<mkdir dir="lucene/build" />
|
||||||
|
@ -553,115 +551,8 @@ File | Project Structure | Platform Settings | SDKs):
|
||||||
<delete dir="${smokeTestRelease.tmp}"/>
|
<delete dir="${smokeTestRelease.tmp}"/>
|
||||||
</target>
|
</target>
|
||||||
|
|
||||||
<macrodef xmlns:ivy="antlib:org.apache.ivy.ant" name="svn-checker">
|
<target name="check-working-copy">
|
||||||
<attribute name="failonmodifications" default="true"/> <!-- false if file modifications are allowed -->
|
<echo>This task is currently disabled due to migration to GIT</echo>
|
||||||
<sequential>
|
|
||||||
<ivy:cachepath organisation="org.tmatesoft.svnkit" module="svnkit" revision="${svnkit.version}"
|
|
||||||
inline="true" conf="default" transitive="true" pathid="svnkit.classpath"/>
|
|
||||||
<local name="svn.checkprops.failed"/>
|
|
||||||
<local name="svn.unversioned.failed"/>
|
|
||||||
<local name="svn.keywords.failed"/>
|
|
||||||
<local name="svn.changed.failed"/>
|
|
||||||
<groovy taskname="svn" classpathref="svnkit.classpath"><![CDATA[
|
|
||||||
import org.tmatesoft.svn.core.*;
|
|
||||||
import org.tmatesoft.svn.core.wc.*;
|
|
||||||
import org.apache.tools.ant.Project;
|
|
||||||
|
|
||||||
SVNClientManager manager = SVNClientManager.newInstance();
|
|
||||||
SVNStatusClient statusClient = manager.getStatusClient();
|
|
||||||
SVNWCClient wcClient = manager.getWCClient();
|
|
||||||
|
|
||||||
File basedir = new File(properties['basedir']).getAbsoluteFile();
|
|
||||||
int baseLen = basedir.toString().length();
|
|
||||||
|
|
||||||
// do some fake check, to verify if this is valid SVN working copy. If this fails ignore checks but log some useful message.
|
|
||||||
task.log('Initializing working copy...', Project.MSG_INFO);
|
|
||||||
try {
|
|
||||||
wcClient.doInfo(basedir, SVNRevision.WORKING);
|
|
||||||
} catch (SVNException ex) {
|
|
||||||
def ec = ex.getErrorMessage().getErrorCode();
|
|
||||||
int code = ec.getCode();
|
|
||||||
int category = ec.getCategory();
|
|
||||||
if (code == SVNErrorCode.WC_NOT_DIRECTORY.getCode() || code == SVNErrorCode.WC_NOT_FILE.getCode()) {
|
|
||||||
task.log('WARNING: Development directory is not an SVN checkout! Disabling checks...', Project.MSG_WARN);
|
|
||||||
return;
|
|
||||||
} else if (category == SVNErrorCode.WC_CATEGORY) {
|
|
||||||
task.log('WARNING: Development directory is not a valid SVN checkout (' + ex.getErrorMessage() + '). Disabling checks...', Project.MSG_WARN);
|
|
||||||
return;
|
|
||||||
} else {
|
|
||||||
throw ex;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
def convertRelative = {
|
|
||||||
file -> '.' + file.getAbsolutePath().substring(baseLen).replace(File.separatorChar, (char)'/');
|
|
||||||
}
|
|
||||||
|
|
||||||
Set missingProps = new TreeSet(), withKeywords = new TreeSet(), unversioned = new TreeSet(), changed = new TreeSet();
|
|
||||||
|
|
||||||
task.log('Getting all versioned and unversioned files...', Project.MSG_INFO);
|
|
||||||
statusClient.doStatus(basedir, SVNRevision.WORKING, SVNDepth.fromRecurse(true), false, true, false, false, {
|
|
||||||
status ->
|
|
||||||
SVNStatusType nodeStatus = status.getNodeStatus();
|
|
||||||
if (nodeStatus == SVNStatusType.STATUS_UNVERSIONED || nodeStatus == SVNStatusType.STATUS_MISSING) {
|
|
||||||
unversioned.add(convertRelative(status.getFile()));
|
|
||||||
} else if (status.getKind() == SVNNodeKind.FILE && nodeStatus != SVNStatusType.STATUS_DELETED) {
|
|
||||||
missingProps.add(convertRelative(status.getFile()));
|
|
||||||
}
|
|
||||||
if (nodeStatus == SVNStatusType.STATUS_MODIFIED || nodeStatus == SVNStatusType.STATUS_REPLACED ||
|
|
||||||
nodeStatus == SVNStatusType.STATUS_DELETED || nodeStatus == SVNStatusType.STATUS_ADDED) {
|
|
||||||
changed.add(convertRelative(status.getFile()));
|
|
||||||
}
|
|
||||||
} as ISVNStatusHandler, null);
|
|
||||||
|
|
||||||
task.log('Filtering files with existing svn:eol-style...', Project.MSG_INFO);
|
|
||||||
wcClient.doGetProperty(basedir, 'svn:eol-style', SVNRevision.WORKING, SVNRevision.WORKING, true, {
|
|
||||||
file, prop -> missingProps.remove(convertRelative(file));
|
|
||||||
} as ISVNPropertyHandler);
|
|
||||||
|
|
||||||
task.log('Filtering files with binary svn:mime-type...', Project.MSG_INFO);
|
|
||||||
wcClient.doGetProperty(basedir, 'svn:mime-type', SVNRevision.WORKING, SVNRevision.WORKING, true, {
|
|
||||||
file, prop ->
|
|
||||||
prop = SVNPropertyValue.getPropertyAsString(prop.getValue());
|
|
||||||
if (prop.startsWith('application/') || prop.startsWith('image/')) {
|
|
||||||
missingProps.remove(convertRelative(file));
|
|
||||||
}
|
|
||||||
} as ISVNPropertyHandler);
|
|
||||||
|
|
||||||
task.log('Scanning for files with svn:keywords property...', Project.MSG_INFO);
|
|
||||||
wcClient.doGetProperty(basedir, 'svn:keywords', SVNRevision.WORKING, SVNRevision.WORKING, true, {
|
|
||||||
file, prop -> withKeywords.add(convertRelative(file));
|
|
||||||
} as ISVNPropertyHandler);
|
|
||||||
|
|
||||||
def setProjectPropertyFromSet(prop, set) {
|
|
||||||
if (set) {
|
|
||||||
properties[prop] = '* ' + set.join(properties['line.separator'] + '* ');
|
|
||||||
}
|
|
||||||
};
|
|
||||||
setProjectPropertyFromSet('svn.checkprops.failed', missingProps);
|
|
||||||
setProjectPropertyFromSet('svn.keywords.failed', withKeywords);
|
|
||||||
setProjectPropertyFromSet('svn.unversioned.failed', unversioned);
|
|
||||||
setProjectPropertyFromSet('svn.changed.failed', changed);
|
|
||||||
]]></groovy>
|
|
||||||
<fail if="svn.checkprops.failed"
|
|
||||||
message="The following files are missing svn:eol-style (or binary svn:mime-type):${line.separator}${svn.checkprops.failed}"/>
|
|
||||||
<fail if="svn.keywords.failed"
|
|
||||||
message="The following files have the svn:keywords property set:${line.separator}${svn.keywords.failed}"/>
|
|
||||||
<fail if="svn.unversioned.failed"
|
|
||||||
message="Source checkout is dirty after running tests!!! Offending files:${line.separator}${svn.unversioned.failed}"/>
|
|
||||||
<fail message="Source checkout is modified !!! Offending files:${line.separator}${svn.changed.failed}">
|
|
||||||
<condition>
|
|
||||||
<and>
|
|
||||||
<istrue value="@{failonmodifications}"/>
|
|
||||||
<isset property="svn.changed.failed"/>
|
|
||||||
</and>
|
|
||||||
</condition>
|
|
||||||
</fail>
|
|
||||||
</sequential>
|
|
||||||
</macrodef>
|
|
||||||
|
|
||||||
<target name="check-svn-working-copy" depends="ivy-availability-check,ivy-fail,ivy-configure,resolve-groovy">
|
|
||||||
<svn-checker failonmodifications="false"/>
|
|
||||||
</target>
|
</target>
|
||||||
|
|
||||||
<target name="run-clover" description="Runs all tests to measure coverage and generates report (pass "ANT_OPTS=-Xmx1536M" as environment)" depends="clean">
|
<target name="run-clover" description="Runs all tests to measure coverage and generates report (pass "ANT_OPTS=-Xmx1536M" as environment)" depends="clean">
|
||||||
|
@ -754,8 +645,8 @@ File | Project Structure | Platform Settings | SDKs):
|
||||||
</target>
|
</target>
|
||||||
|
|
||||||
<!-- should only be called by jenkins, not precommit! -->
|
<!-- should only be called by jenkins, not precommit! -->
|
||||||
<target name="-check-after-regeneration" depends="ivy-availability-check,ivy-fail,ivy-configure,resolve-groovy">
|
<target name="-check-after-regeneration">
|
||||||
<svn-checker failonmodifications="true"/>
|
<!-- TODO -->
|
||||||
</target>
|
</target>
|
||||||
|
|
||||||
<!-- TODO: remove me when jenkins works -->
|
<!-- TODO: remove me when jenkins works -->
|
||||||
|
@ -771,7 +662,7 @@ File | Project Structure | Platform Settings | SDKs):
|
||||||
</target>
|
</target>
|
||||||
|
|
||||||
<!-- Jenkins tasks -->
|
<!-- Jenkins tasks -->
|
||||||
<target name="-jenkins-base" depends="-print-java-info,clean,test-with-heapdumps,validate,documentation-lint,jar-checksums,check-svn-working-copy"/>
|
<target name="-jenkins-base" depends="-print-java-info,clean,test-with-heapdumps,validate,documentation-lint,jar-checksums,check-working-copy"/>
|
||||||
|
|
||||||
<target name="-print-java-info">
|
<target name="-print-java-info">
|
||||||
<echo level="info" taskname="java-info">java version "${java.version}"
|
<echo level="info" taskname="java-info">java version "${java.version}"
|
||||||
|
|
|
@ -341,7 +341,7 @@
|
||||||
<target name="package-tgz-src" depends="init-dist"
|
<target name="package-tgz-src" depends="init-dist"
|
||||||
description="--> Generates the Lucene source distribution as .tgz">
|
description="--> Generates the Lucene source distribution as .tgz">
|
||||||
<property name="source.package.file"
|
<property name="source.package.file"
|
||||||
value="${dist.dir}/lucene-${version}-src.tgz"/>
|
location="${dist.dir}/lucene-${version}-src.tgz"/>
|
||||||
<delete file="${source.package.file}"/>
|
<delete file="${source.package.file}"/>
|
||||||
<export-source source.dir="."/>
|
<export-source source.dir="."/>
|
||||||
|
|
||||||
|
@ -350,11 +350,14 @@
|
||||||
<!-- Exclude clover license files incompatible with the ASL -->
|
<!-- Exclude clover license files incompatible with the ASL -->
|
||||||
<delete dir="${src.export.dir}/tools/clover"/>
|
<delete dir="${src.export.dir}/tools/clover"/>
|
||||||
|
|
||||||
<build-changes changes.src.file="${src.export.dir}/CHANGES.txt"
|
<!-- because we only package the "lucene/" folder, we have to adjust dir to work on: ->
|
||||||
changes.target.dir="${src.export.dir}/docs/changes"
|
<property name="local.src.export.dir" location="${src.export.dir}/lucene"/>
|
||||||
|
|
||||||
|
<build-changes changes.src.file="${local.src.export.dir}/CHANGES.txt"
|
||||||
|
changes.target.dir="${local.src.export.dir}/docs/changes"
|
||||||
changes.product="LUCENE"/>
|
changes.product="LUCENE"/>
|
||||||
<tar tarfile="${source.package.file}" compression="gzip" longfile="gnu">
|
<tar tarfile="${source.package.file}" compression="gzip" longfile="gnu">
|
||||||
<tarfileset prefix="lucene-${version}" dir="${src.export.dir}"/>
|
<tarfileset prefix="lucene-${version}" dir="${local.src.export.dir}"/>
|
||||||
</tar>
|
</tar>
|
||||||
<make-checksums file="${source.package.file}"/>
|
<make-checksums file="${source.package.file}"/>
|
||||||
</target>
|
</target>
|
||||||
|
|
|
@ -453,7 +453,7 @@
|
||||||
<target name="package-src-tgz" depends="init-dist"
|
<target name="package-src-tgz" depends="init-dist"
|
||||||
description="Packages the Solr Source Distribution">
|
description="Packages the Solr Source Distribution">
|
||||||
<property name="source.package.file"
|
<property name="source.package.file"
|
||||||
value="${package.dir}/${fullnamever}-src.tgz"/>
|
location="${package.dir}/${fullnamever}-src.tgz"/>
|
||||||
<delete file="${source.package.file}" failonerror="false" />
|
<delete file="${source.package.file}" failonerror="false" />
|
||||||
<export-source source.dir=".."/>
|
<export-source source.dir=".."/>
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue