mirror of https://github.com/apache/lucene.git
SOLR-2747: Added 'ant changes-to-html' to Solr.
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1378452 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
223b127eae
commit
c8aa9c1f42
|
@ -7553,7 +7553,7 @@ Infrastructure
|
||||||
11. Fixed bugs in GermanAnalyzer (gschwarz)
|
11. Fixed bugs in GermanAnalyzer (gschwarz)
|
||||||
|
|
||||||
|
|
||||||
1.2 RC2:
|
1.2 RC2
|
||||||
- added sources to distribution
|
- added sources to distribution
|
||||||
- removed broken build scripts and libraries from distribution
|
- removed broken build scripts and libraries from distribution
|
||||||
- SegmentsReader: fixed potential race condition
|
- SegmentsReader: fixed potential race condition
|
||||||
|
@ -7568,7 +7568,8 @@ Infrastructure
|
||||||
- JDK 1.1 compatibility fix: disabled lock files for JDK 1.1,
|
- JDK 1.1 compatibility fix: disabled lock files for JDK 1.1,
|
||||||
since they rely on a feature added in JDK 1.2.
|
since they rely on a feature added in JDK 1.2.
|
||||||
|
|
||||||
1.2 RC1 (first Apache release):
|
1.2 RC1
|
||||||
|
- first Apache release
|
||||||
- packages renamed from com.lucene to org.apache.lucene
|
- packages renamed from com.lucene to org.apache.lucene
|
||||||
- license switched from LGPL to Apache
|
- license switched from LGPL to Apache
|
||||||
- ant-only build -- no more makefiles
|
- ant-only build -- no more makefiles
|
||||||
|
@ -7579,7 +7580,8 @@ Infrastructure
|
||||||
- Analyzers can choose tokenizer based on field name
|
- Analyzers can choose tokenizer based on field name
|
||||||
- misc bug fixes.
|
- misc bug fixes.
|
||||||
|
|
||||||
1.01b (last Sourceforge release)
|
1.01b
|
||||||
|
. last Sourceforge release
|
||||||
. a few bug fixes
|
. a few bug fixes
|
||||||
. new Query Parser
|
. new Query Parser
|
||||||
. new prefix query (search for "foo*" matches "food")
|
. new prefix query (search for "foo*" matches "food")
|
||||||
|
|
|
@ -470,24 +470,6 @@
|
||||||
<modules-crawl target="test" failonerror="true"/>
|
<modules-crawl target="test" failonerror="true"/>
|
||||||
</target>
|
</target>
|
||||||
|
|
||||||
<!--
|
|
||||||
compile changes.txt into an html file
|
|
||||||
-->
|
|
||||||
<macrodef name="build-changes">
|
|
||||||
<attribute name="changes.src.dir" default="${changes.src.dir}"/>
|
|
||||||
<attribute name="changes.target.dir" default="${changes.target.dir}"/>
|
|
||||||
<sequential>
|
|
||||||
<mkdir dir="@{changes.target.dir}"/>
|
|
||||||
<exec executable="perl" input="CHANGES.txt" output="@{changes.target.dir}/Changes.html"
|
|
||||||
failonerror="true" logError="true">
|
|
||||||
<arg value="@{changes.src.dir}/changes2html.pl"/>
|
|
||||||
</exec>
|
|
||||||
<copy todir="@{changes.target.dir}">
|
|
||||||
<fileset dir="@{changes.src.dir}" includes="*.css"/>
|
|
||||||
</copy>
|
|
||||||
</sequential>
|
|
||||||
</macrodef>
|
|
||||||
|
|
||||||
<target name="changes-to-html">
|
<target name="changes-to-html">
|
||||||
<build-changes changes.src.dir="${changes.src.dir}" changes.target.dir="${changes.target.dir}" />
|
<build-changes changes.src.dir="${changes.src.dir}" changes.target.dir="${changes.target.dir}" />
|
||||||
</target>
|
</target>
|
||||||
|
|
|
@ -1681,4 +1681,22 @@ ${tests-output}/junit4-*.suites - per-JVM executed suites
|
||||||
</sequential>
|
</sequential>
|
||||||
</macrodef>
|
</macrodef>
|
||||||
|
|
||||||
|
<!--
|
||||||
|
compile changes.txt into an html file
|
||||||
|
-->
|
||||||
|
<macrodef name="build-changes">
|
||||||
|
<attribute name="changes.src.dir" default="${changes.src.dir}"/>
|
||||||
|
<attribute name="changes.target.dir" default="${changes.target.dir}"/>
|
||||||
|
<sequential>
|
||||||
|
<mkdir dir="@{changes.target.dir}"/>
|
||||||
|
<exec executable="perl" input="CHANGES.txt" output="@{changes.target.dir}/Changes.html"
|
||||||
|
failonerror="true" logError="true">
|
||||||
|
<arg value="@{changes.src.dir}/changes2html.pl"/>
|
||||||
|
</exec>
|
||||||
|
<copy todir="@{changes.target.dir}">
|
||||||
|
<fileset dir="@{changes.src.dir}" includes="*.css"/>
|
||||||
|
</copy>
|
||||||
|
</sequential>
|
||||||
|
</macrodef>
|
||||||
|
|
||||||
</project>
|
</project>
|
||||||
|
|
|
@ -25,13 +25,12 @@ use strict;
|
||||||
use warnings;
|
use warnings;
|
||||||
|
|
||||||
# JIRA REST API documentation: <http://docs.atlassian.com/jira/REST/latest/>
|
# JIRA REST API documentation: <http://docs.atlassian.com/jira/REST/latest/>
|
||||||
my $project_info_url = 'https://issues.apache.org/jira/rest/api/2/project/LUCENE';
|
my $project_info_url = 'https://issues.apache.org/jira/rest/api/2/project';
|
||||||
my $jira_url_prefix = 'http://issues.apache.org/jira/browse/';
|
my $jira_url_prefix = 'http://issues.apache.org/jira/browse/';
|
||||||
my $bugzilla_url_prefix = 'http://issues.apache.org/bugzilla/show_bug.cgi?id=';
|
my $bugzilla_url_prefix = 'http://issues.apache.org/bugzilla/show_bug.cgi?id=';
|
||||||
my %release_dates = &setup_release_dates;
|
|
||||||
my $month_regex = &setup_month_regex;
|
my $month_regex = &setup_month_regex;
|
||||||
my %month_nums = &setup_month_nums;
|
my %month_nums = &setup_month_nums;
|
||||||
my %bugzilla_jira_map = &setup_bugzilla_jira_map;
|
my %lucene_bugzilla_jira_map = &setup_lucene_bugzilla_jira_map;
|
||||||
my $title = undef;
|
my $title = undef;
|
||||||
my $release = undef;
|
my $release = undef;
|
||||||
my $reldate = undef;
|
my $reldate = undef;
|
||||||
|
@ -44,20 +43,35 @@ my @releases = ();
|
||||||
|
|
||||||
my @lines = <>; # Get all input at once
|
my @lines = <>; # Get all input at once
|
||||||
|
|
||||||
|
my $product = '';
|
||||||
|
for my $line (@lines) {
|
||||||
|
($product) = $line =~ /(Solr|Lucene)/i;
|
||||||
|
if ($product) {
|
||||||
|
$product = uc($product);
|
||||||
|
last;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
my %release_dates = &setup_release_dates;
|
||||||
|
my $in_major_component_versions_section = 0;
|
||||||
|
|
||||||
|
|
||||||
#
|
#
|
||||||
# Parse input and build hierarchical release structure in @releases
|
# Parse input and build hierarchical release structure in @releases
|
||||||
#
|
#
|
||||||
for (my $line_num = 0 ; $line_num <= $#lines ; ++$line_num) {
|
for (my $line_num = 0 ; $line_num <= $#lines ; ++$line_num) {
|
||||||
$_ = $lines[$line_num];
|
$_ = $lines[$line_num];
|
||||||
next unless (/\S/); # Skip blank lines
|
unless (/\S/) { # Skip blank lines
|
||||||
|
$in_major_component_versions_section = 0;
|
||||||
|
next;
|
||||||
|
}
|
||||||
next if (/^\s*\$Id(?::.*)?\$/); # Skip $Id$ lines
|
next if (/^\s*\$Id(?::.*)?\$/); # Skip $Id$ lines
|
||||||
|
next if (/^\s{0,4}-{5,}\s*$/); # Skip Solr's section underlines
|
||||||
|
|
||||||
unless ($title) {
|
unless ($title) {
|
||||||
if (/\S/) {
|
if (/\S/) {
|
||||||
s/^\s+//; # Trim leading whitespace
|
s/^[^\p{N}\p{L}]*//; # Trim leading non-alphanum chars, including BOM chars, if any
|
||||||
s/\s+$//; # Trim trailing whitespace
|
s/\s+$//; # Trim trailing whitespace
|
||||||
}
|
}
|
||||||
s/^[^Ll]*//; # Trim leading BOM characters if exists
|
|
||||||
$title = $_;
|
$title = $_;
|
||||||
next;
|
next;
|
||||||
}
|
}
|
||||||
|
@ -71,8 +85,11 @@ for (my $line_num = 0 ; $line_num <= $#lines ; ++$line_num) {
|
||||||
($reldate, $relinfo) = get_release_date($release, $relinfo);
|
($reldate, $relinfo) = get_release_date($release, $relinfo);
|
||||||
$sections = [];
|
$sections = [];
|
||||||
push @releases, [ $release, $reldate, $relinfo, $sections ];
|
push @releases, [ $release, $reldate, $relinfo, $sections ];
|
||||||
($first_relid = lc($release)) =~ s/\s+/_/g if ($#releases == 0);
|
($first_relid = lc($release)) =~ s/\s+/_/g
|
||||||
($second_relid = lc($release)) =~ s/\s+/_/g if ($#releases == 1);
|
if ($#releases == 0 or ($#releases == 1 and not ($releases[0][0])));
|
||||||
|
($second_relid = lc($release)) =~ s/\s+/_/g
|
||||||
|
if ( ($#releases == 1 and $releases[0][0])
|
||||||
|
or ($#releases == 2 and not $releases[0][0]));
|
||||||
$items = undef;
|
$items = undef;
|
||||||
next;
|
next;
|
||||||
}
|
}
|
||||||
|
@ -90,18 +107,43 @@ for (my $line_num = 0 ; $line_num <= $#lines ; ++$line_num) {
|
||||||
next;
|
next;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (m!^20\d\d[-/]\d{1,2}[-/]\d{1,2}!) { # Collect dated postscripts
|
||||||
|
my $item = $_;
|
||||||
|
my $line = '';
|
||||||
|
while ($line_num < $#lines and ($line = $lines[++$line_num]) =~ /\S/) {
|
||||||
|
$line =~ s/^\s+//; # Trim leading whitespace
|
||||||
|
$line =~ s/\s+$//; # Trim trailing whitespace
|
||||||
|
$item .= "$line\n";
|
||||||
|
}
|
||||||
|
push @releases, [ $item, '', '', [] ];
|
||||||
|
next;
|
||||||
|
}
|
||||||
|
|
||||||
# Section heading: no leading whitespace, initial word capitalized,
|
# Section heading: no leading whitespace, initial word capitalized,
|
||||||
# five words or less, and no trailing punctuation
|
# five words or less, and no trailing punctuation
|
||||||
if (/^([A-Z]\S*(?:\s+\S+){0,4})(?<![-.:;!()])\s*$/) {
|
if ( /^([A-Z]\S*(?:\s+\S+){0,4})(?<![-.:;!()])\s*$/
|
||||||
|
and not $in_major_component_versions_section) {
|
||||||
my $heading = $1;
|
my $heading = $1;
|
||||||
$items = [];
|
$items = [];
|
||||||
|
unless (@releases) {
|
||||||
|
$sections = [];
|
||||||
|
# Make a fake release to hold pre-release sections
|
||||||
|
push @releases, [ undef, undef, undef, $sections ];
|
||||||
|
}
|
||||||
push @$sections, [ $heading, $items ];
|
push @$sections, [ $heading, $items ];
|
||||||
|
$in_major_component_versions_section
|
||||||
|
= ($heading =~ /Versions of Major Components/i);
|
||||||
next;
|
next;
|
||||||
}
|
}
|
||||||
|
|
||||||
# Handle earlier releases without sections - create a headless section
|
# Handle earlier releases without sections - create a headless section
|
||||||
unless ($items) {
|
unless ($items) {
|
||||||
$items = [];
|
$items = [];
|
||||||
|
unless (@releases) {
|
||||||
|
$sections = [];
|
||||||
|
# Make a fake release to hold pre-release sections and items
|
||||||
|
push @releases, [ undef, undef, undef, $sections ];
|
||||||
|
}
|
||||||
push @$sections, [ undef, $items ];
|
push @$sections, [ undef, $items ];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -130,7 +172,7 @@ for (my $line_num = 0 ; $line_num <= $#lines ; ++$line_num) {
|
||||||
}
|
}
|
||||||
$item =~ s/\n+\Z/\n/; # Trim trailing blank lines
|
$item =~ s/\n+\Z/\n/; # Trim trailing blank lines
|
||||||
push @$items, $item;
|
push @$items, $item;
|
||||||
--$line_num unless ($line_num == $#lines);
|
--$line_num unless ($line_num == $#lines && $lines[$line_num] !~ /^20/);
|
||||||
} elsif ($type eq 'paragraph') { # List item boundary is a blank line
|
} elsif ($type eq 'paragraph') { # List item boundary is a blank line
|
||||||
my $line;
|
my $line;
|
||||||
my $item = $_;
|
my $item = $_;
|
||||||
|
@ -139,13 +181,17 @@ for (my $line_num = 0 ; $line_num <= $#lines ; ++$line_num) {
|
||||||
$item =~ s/\s+$//; # Trim trailing whitespace
|
$item =~ s/\s+$//; # Trim trailing whitespace
|
||||||
$item .= "\n";
|
$item .= "\n";
|
||||||
|
|
||||||
|
unless ($in_major_component_versions_section) {
|
||||||
while ($line_num < $#lines and ($line = $lines[++$line_num]) =~ /\S/) {
|
while ($line_num < $#lines and ($line = $lines[++$line_num]) =~ /\S/) {
|
||||||
$line =~ s/^\s{$leading_ws_width}//; # Trim leading whitespace
|
$line =~ s/^\s{$leading_ws_width}//; # Trim leading whitespace
|
||||||
$line =~ s/\s+$//; # Trim trailing whitespace
|
$line =~ s/\s+$//; # Trim trailing whitespace
|
||||||
$item .= "$line\n";
|
$item .= "$line\n";
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
++$line_num;
|
||||||
|
}
|
||||||
push @$items, $item;
|
push @$items, $item;
|
||||||
--$line_num unless ($line_num == $#lines);
|
--$line_num unless ($line_num == $#lines && $lines[$line_num] !~ /^20/);
|
||||||
} else { # $type is one of the bulleted types
|
} else { # $type is one of the bulleted types
|
||||||
# List item boundary is another bullet or a blank line
|
# List item boundary is another bullet or a blank line
|
||||||
my $line;
|
my $line;
|
||||||
|
@ -162,7 +208,7 @@ for (my $line_num = 0 ; $line_num <= $#lines ; ++$line_num) {
|
||||||
$item .= "$line\n";
|
$item .= "$line\n";
|
||||||
}
|
}
|
||||||
push @$items, $item;
|
push @$items, $item;
|
||||||
--$line_num unless ($line_num == $#lines);
|
--$line_num unless ($line_num == $#lines && $lines[$line_num] !~ /^20/);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -357,9 +403,12 @@ __HTML_HEADER__
|
||||||
my $heading;
|
my $heading;
|
||||||
my $relcnt = 0;
|
my $relcnt = 0;
|
||||||
my $header = 'h2';
|
my $header = 'h2';
|
||||||
|
my $subheader = 'h3';
|
||||||
|
|
||||||
for my $rel (@releases) {
|
for my $rel (@releases) {
|
||||||
if (++$relcnt == 3) {
|
if ($relcnt == 2) {
|
||||||
$header = 'h3';
|
$header = 'h3';
|
||||||
|
$subheader = 'h4';
|
||||||
print "<h2><a id=\"older\" href=\"javascript:toggleList('older')\">";
|
print "<h2><a id=\"older\" href=\"javascript:toggleList('older')\">";
|
||||||
print "Older Releases";
|
print "Older Releases";
|
||||||
print "</a></h2>\n";
|
print "</a></h2>\n";
|
||||||
|
@ -371,37 +420,54 @@ for my $rel (@releases) {
|
||||||
# The first section heading is undefined for the older sectionless releases
|
# The first section heading is undefined for the older sectionless releases
|
||||||
my $has_release_sections = has_release_sections($sections);
|
my $has_release_sections = has_release_sections($sections);
|
||||||
|
|
||||||
(my $relid = lc($release)) =~ s/\s+/_/g;
|
my $relid = '';
|
||||||
print "<$header><a id=\"$relid\" href=\"javascript:toggleList('$relid')\">";
|
if ($release) { # Pre-release sections have no release ID
|
||||||
print "Release " unless ($release =~ /^trunk$/i);
|
++$relcnt;
|
||||||
|
($relid = lc($release)) =~ s/\s+/_/g;
|
||||||
|
print "<$header>";
|
||||||
|
print "<a id=\"$relid\" href=\"javascript:toggleList('$relid')\">"
|
||||||
|
unless ($release =~ /^20\d\d/);
|
||||||
|
print "Release " unless ($release =~ /^trunk$|^20\d\d/i);
|
||||||
print "$release $relinfo";
|
print "$release $relinfo";
|
||||||
print " [$reldate]" unless ($reldate eq 'unknown');
|
print " [$reldate]" unless ($reldate eq 'unknown' or not $reldate);
|
||||||
print "</a></$header>\n";
|
print "</a>" unless ($release =~ /^20\d\d/);
|
||||||
|
print "</$header>\n";
|
||||||
print "<ul id=\"$relid.list\">\n"
|
print "<ul id=\"$relid.list\">\n"
|
||||||
if ($has_release_sections);
|
if ($has_release_sections);
|
||||||
|
}
|
||||||
|
|
||||||
for my $section (@$sections) {
|
for my $section (@$sections) {
|
||||||
($heading, $items) = @$section;
|
($heading, $items) = @$section;
|
||||||
(my $sectid = lc($heading)) =~ s/\s+/_/g;
|
(my $sectid = lc($heading)) =~ s/\s+/_/g;
|
||||||
my $numItemsStr = $#{$items} > 0 ? "($#{$items})" : "(none)";
|
my $numItemsStr = $#{$items} > 0 ? "($#{$items})" : "(none)";
|
||||||
|
|
||||||
|
my $list_item = "li";
|
||||||
|
if ($release) {
|
||||||
|
if ($heading and $heading eq 'Detailed Change List') {
|
||||||
|
print " <$subheader>$heading</$subheader>\n";
|
||||||
|
next;
|
||||||
|
} elsif ($has_release_sections and $heading) {
|
||||||
print " <li><a id=\"$relid.$sectid\"",
|
print " <li><a id=\"$relid.$sectid\"",
|
||||||
" href=\"javascript:toggleList('$relid.$sectid')\">",
|
" href=\"javascript:toggleList('$relid.$sectid')\">",
|
||||||
($heading || ''), "</a> $numItemsStr\n"
|
($heading || ''), "</a> $numItemsStr\n"
|
||||||
if ($has_release_sections and $heading);
|
}
|
||||||
|
} else {
|
||||||
|
print "<h2>$heading</h2>\n" if ($heading);
|
||||||
|
$list_item = "p";
|
||||||
|
}
|
||||||
|
|
||||||
my $list_type = $items->[0] || '';
|
my $list_type = $items->[0] || '';
|
||||||
my $list = ($has_release_sections || $list_type eq 'numbered' ? 'ol' : 'ul');
|
my $list = ($has_release_sections || $list_type eq 'numbered' ? 'ol' : 'ul');
|
||||||
my $listid = $sectid ? "$relid.$sectid" : $relid;
|
my $listid = $sectid ? "$relid.$sectid" : $relid;
|
||||||
print " <$list id=\"$listid.list\">\n"
|
print " <$list id=\"$listid.list\">\n"
|
||||||
unless ($has_release_sections and not $heading);
|
unless (not $release or ($has_release_sections and not $heading));
|
||||||
|
|
||||||
for my $itemnum (1..$#{$items}) {
|
for my $itemnum (1..$#{$items}) {
|
||||||
my $item = $items->[$itemnum];
|
my $item = $items->[$itemnum];
|
||||||
$item =~ s:&:&:g; # Escape HTML metachars, but leave
|
$item =~ s:&:&:g; # Escape HTML metachars, but leave <code> tags
|
||||||
$item =~ s:<(?!/?code>):<:gi; # <code> tags intact and add <pre>
|
$item =~ s~<(?!/?code>(?:[^,]|$))~<~gi; # intact - unless followed by a comma - and
|
||||||
$item =~ s:(?<!code)>:>:gi; # wrappers for non-inline sections
|
$item =~ s:(?<!code)>:>:gi; # add <pre> wrappers for non-inline sections
|
||||||
$item =~ s{((?:^|.*\n)\s*)<code>(?!</code>.+)(.+)</code>(?![ \t]*\S)}
|
$item =~ s{((?:^|.*\n)\s*)<code>(?!,)(?!</code>.+)(.+)</code>(?![ \t]*\S)}
|
||||||
{
|
{
|
||||||
my $prefix = $1;
|
my $prefix = $1;
|
||||||
my $code = $2;
|
my $code = $2;
|
||||||
|
@ -409,62 +475,67 @@ for my $rel (@releases) {
|
||||||
"$prefix<code><pre>$code</pre></code>"
|
"$prefix<code><pre>$code</pre></code>"
|
||||||
}gise;
|
}gise;
|
||||||
|
|
||||||
# Put attributions on their own lines.
|
$item = markup_trailing_attribution($item) unless ($item =~ /\n[ ]*-/);
|
||||||
# Check for trailing parenthesized attribution with no following period.
|
|
||||||
# Exclude things like "(see #3 above)" and "(use the bug number instead of xxxx)"
|
|
||||||
unless ($item =~ s:\s*(\((?!see #|use the bug number)[^()"]+?\))\s*$:\n<br /><span class="attrib">$1</span>:) {
|
|
||||||
# If attribution is not found, then look for attribution with a
|
|
||||||
# trailing period, but try not to include trailing parenthesized things
|
|
||||||
# that are not attributions.
|
|
||||||
#
|
|
||||||
# Rule of thumb: if a trailing parenthesized expression with a following
|
|
||||||
# period does not contain "LUCENE-XXX", and it either has three or
|
|
||||||
# fewer words or it includes the word "via" or the phrase "updates from",
|
|
||||||
# then it is considered to be an attribution.
|
|
||||||
|
|
||||||
$item =~ s{(\s*(\((?!see \#|use the bug number)[^()"]+?\)))
|
|
||||||
((?:\.|(?i:\.?\s*Issue\s+\d{3,}|LUCENE-\d+)\.?)\s*)$}
|
|
||||||
{
|
|
||||||
my $subst = $1; # default: no change
|
|
||||||
my $parenthetical = $2;
|
|
||||||
my $trailing_period_and_or_issue = $3;
|
|
||||||
if ($parenthetical !~ /LUCENE-\d+/) {
|
|
||||||
my ($no_parens) = $parenthetical =~ /^\((.*)\)$/s;
|
|
||||||
my @words = grep {/\S/} split /\s+/, $no_parens;
|
|
||||||
if ($no_parens =~ /\b(?:via|updates\s+from)\b/i || scalar(@words) <= 3) {
|
|
||||||
$subst = "\n<br /><span class=\"attrib\">$parenthetical</span>";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
$subst . $trailing_period_and_or_issue;
|
|
||||||
}ex;
|
|
||||||
}
|
|
||||||
|
|
||||||
$item =~ s{(.*?)(<code><pre>.*?</pre></code>)|(.*)}
|
$item =~ s{(.*?)(<code><pre>.*?</pre></code>)|(.*)}
|
||||||
{
|
{
|
||||||
my $uncode = undef;
|
my $uncode = undef;
|
||||||
if (defined($2)) {
|
my ($one,$two,$three) = ($1,$2,$3);
|
||||||
$uncode = $1 || '';
|
if (defined($two)) {
|
||||||
|
$uncode = $one || '';
|
||||||
|
$uncode =~ s{^(.*?)(?=\n[ ]*-)}
|
||||||
|
{
|
||||||
|
my $prefix = $1;
|
||||||
|
my ($primary,$additional_work) = $prefix =~ /^(.*?)((?:\s*Additional\s+Work:\s*)?)$/si;
|
||||||
|
my $result = markup_trailing_attribution($primary);
|
||||||
|
$result .= "<br />\n$additional_work<br />" if ($additional_work);
|
||||||
|
$result;
|
||||||
|
}se;
|
||||||
$uncode =~ s{((?<=\n)[ ]*-.*\n(?:.*\n)*)}
|
$uncode =~ s{((?<=\n)[ ]*-.*\n(?:.*\n)*)}
|
||||||
{
|
{
|
||||||
my $bulleted_list = $1;
|
my $bulleted_list = $1;
|
||||||
$bulleted_list
|
$bulleted_list
|
||||||
=~ s{(?:(?<=\n)|\A)[ ]*-[ ]*(.*(?:\n|\z)(?:[ ]+[^ -].*(?:\n|\z))*)}
|
=~ s{(?:(?<=\n)|\A)[ ]*-[ ]*(.*(?:\n|\z)(?:[ ]+[^ -].*(?:\n|\z))*)}
|
||||||
{<li class="bulleted-list">\n$1</li>\n}g;
|
{
|
||||||
|
qq!<li class="bulleted-list">\n!
|
||||||
|
. markup_trailing_attribution($1)
|
||||||
|
. "</li>\n"
|
||||||
|
}ge;
|
||||||
$bulleted_list
|
$bulleted_list
|
||||||
=~ s!(<li.*</li>\n)!<ul class="bulleted-list">\n$1</ul>\n!s;
|
=~ s{(<li.*</li>\n)(.*)}
|
||||||
|
{
|
||||||
|
qq!<ul class="bulleted-list">\n$1</ul>\n!
|
||||||
|
. markup_trailing_attribution($2 || '')
|
||||||
|
}se;
|
||||||
$bulleted_list;
|
$bulleted_list;
|
||||||
}ge;
|
}ge;
|
||||||
"$uncode$2";
|
"$uncode$two";
|
||||||
} else {
|
} else {
|
||||||
$uncode = $3 || '';
|
$uncode = $three || '';
|
||||||
|
$uncode =~ s{^(.*?)(?=\n[ ]*-)}
|
||||||
|
{
|
||||||
|
my $prefix = $1;
|
||||||
|
my ($primary,$additional_work) = $prefix =~ /^(.*?)((?:\s*Additional\s+Work:\s*)?)$/si;
|
||||||
|
my $result = markup_trailing_attribution($primary);
|
||||||
|
$result .= "<br />\n$additional_work<br />" if ($additional_work);
|
||||||
|
$result;
|
||||||
|
}se;
|
||||||
$uncode =~ s{((?<=\n)[ ]*-.*\n(?:.*\n)*)}
|
$uncode =~ s{((?<=\n)[ ]*-.*\n(?:.*\n)*)}
|
||||||
{
|
{
|
||||||
my $bulleted_list = $1;
|
my $bulleted_list = $1;
|
||||||
$bulleted_list
|
$bulleted_list
|
||||||
=~ s{(?:(?<=\n)|\A)[ ]*-[ ]*(.*(?:\n|\z)(?:[ ]+[^ -].*(?:\n|\z))*)}
|
=~ s{(?:(?<=\n)|\A)[ ]*-[ ]*(.*(?:\n|\z)(?:[ ]+[^ -].*(?:\n|\z))*)}
|
||||||
{<li class="bulleted-list">\n$1</li>\n}g;
|
{
|
||||||
|
qq!<li class="bulleted-list">\n!
|
||||||
|
. markup_trailing_attribution($1)
|
||||||
|
. "</li>\n"
|
||||||
|
}ge;
|
||||||
$bulleted_list
|
$bulleted_list
|
||||||
=~ s!(<li.*</li>\n)!<ul class="bulleted-list">\n$1</ul>\n!s;
|
=~ s{(<li.*</li>\n)(.*)}
|
||||||
|
{
|
||||||
|
qq!<ul class="bulleted-list">\n$1</ul>\n!
|
||||||
|
. markup_trailing_attribution($2 || '')
|
||||||
|
}se;
|
||||||
$bulleted_list;
|
$bulleted_list;
|
||||||
}ge;
|
}ge;
|
||||||
$uncode;
|
$uncode;
|
||||||
|
@ -480,10 +551,11 @@ for my $rel (@releases) {
|
||||||
# Link Lucene XXX, SOLR XXX and INFRA XXX to JIRA
|
# Link Lucene XXX, SOLR XXX and INFRA XXX to JIRA
|
||||||
$item =~ s{((LUCENE|SOLR|INFRA)\s+(\d{3,}))}
|
$item =~ s{((LUCENE|SOLR|INFRA)\s+(\d{3,}))}
|
||||||
{<a href="${jira_url_prefix}\U$2\E-$3">$1</a>}gi;
|
{<a href="${jira_url_prefix}\U$2\E-$3">$1</a>}gi;
|
||||||
|
if ($product eq 'LUCENE') {
|
||||||
# Find single Bugzilla issues
|
# Find single Bugzilla issues
|
||||||
$item =~ s~((?i:bug|patch|issue)\s*\#?\s*(\d+))
|
$item =~ s~((?i:bug|patch|issue)\s*\#?\s*(\d+))
|
||||||
~ my $issue = $1;
|
~ my $issue = $1;
|
||||||
my $jira_issue_num = $bugzilla_jira_map{$2}; # Link to JIRA copies
|
my $jira_issue_num = $lucene_bugzilla_jira_map{$2}; # Link to JIRA copies
|
||||||
$issue = qq!<a href="${jira_url_prefix}LUCENE-$jira_issue_num">!
|
$issue = qq!<a href="${jira_url_prefix}LUCENE-$jira_issue_num">!
|
||||||
. qq!$issue [LUCENE-$jira_issue_num]</a>!
|
. qq!$issue [LUCENE-$jira_issue_num]</a>!
|
||||||
if (defined($jira_issue_num));
|
if (defined($jira_issue_num));
|
||||||
|
@ -496,33 +568,102 @@ for my $rel (@releases) {
|
||||||
my $interlude = $3;
|
my $interlude = $3;
|
||||||
my $issue_num_2 = $4;
|
my $issue_num_2 = $4;
|
||||||
# Link to JIRA copies
|
# Link to JIRA copies
|
||||||
my $jira_issue_1 = $bugzilla_jira_map{$issue_num_1};
|
my $jira_issue_1 = $lucene_bugzilla_jira_map{$issue_num_1};
|
||||||
my $issue1
|
my $issue1
|
||||||
= qq!<a href="${jira_url_prefix}LUCENE-$jira_issue_1">!
|
= qq!<a href="${jira_url_prefix}LUCENE-$jira_issue_1">!
|
||||||
. qq!$issue_num_1 [LUCENE-$jira_issue_1]</a>!
|
. qq!$issue_num_1 [LUCENE-$jira_issue_1]</a>!
|
||||||
if (defined($jira_issue_1));
|
if (defined($jira_issue_1));
|
||||||
my $jira_issue_2 = $bugzilla_jira_map{$issue_num_2};
|
my $jira_issue_2 = $lucene_bugzilla_jira_map{$issue_num_2};
|
||||||
my $issue2
|
my $issue2
|
||||||
= qq!<a href="${jira_url_prefix}LUCENE-$jira_issue_2">!
|
= qq!<a href="${jira_url_prefix}LUCENE-$jira_issue_2">!
|
||||||
. qq!$issue_num_2 [LUCENE-$jira_issue_2]</a>!
|
. qq!$issue_num_2 [LUCENE-$jira_issue_2]</a>!
|
||||||
if (defined($jira_issue_2));
|
if (defined($jira_issue_2));
|
||||||
$leading_whitespace . $issue1 . $interlude . $issue2;
|
$leading_whitespace . $issue1 . $interlude . $issue2;
|
||||||
~gex;
|
~gex;
|
||||||
|
}
|
||||||
|
|
||||||
# Linkify URLs, except Bugzilla links, which don't work anymore
|
# Linkify URLs, except Bugzilla links, which don't work anymore
|
||||||
$item =~ s~(?<![">])(https?://(?!(?:nagoya|issues)\.apache\.org/bugzilla)\S+)~<a href="$1">$1</a>~g;
|
$item =~ s~(?<![">])(https?://(?!(?:nagoya|issues)\.apache\.org/bugzilla)[^\s\)]+)~<a href="$1">$1</a>~g;
|
||||||
|
|
||||||
print " <li>$item</li>\n";
|
$item =~ s~</ul>\s+<p/>\s+<br\s*/>~</ul>~;
|
||||||
|
|
||||||
|
print " <$list_item>$item</$list_item>\n";
|
||||||
}
|
}
|
||||||
print " </$list>\n" unless ($has_release_sections and not $heading);
|
print " </$list>\n" unless (not $release or ($has_release_sections and not $heading));
|
||||||
print " </li>\n" if ($has_release_sections);
|
print " </li>\n" if ($release and $has_release_sections);
|
||||||
}
|
}
|
||||||
print "</ul>\n" if ($has_release_sections);
|
print "</ul>\n" if ($release and $has_release_sections);
|
||||||
}
|
}
|
||||||
print "</ul>\n" if ($relcnt > 3);
|
print "</ul>\n" if ($relcnt > 3);
|
||||||
print "</body>\n</html>\n";
|
print "</body>\n</html>\n";
|
||||||
|
|
||||||
|
|
||||||
|
# Subroutine: markup_trailing_attribution
|
||||||
|
#
|
||||||
|
# Takes one parameter:
|
||||||
|
#
|
||||||
|
# - text possibly containing a trailing parenthesized attribution
|
||||||
|
#
|
||||||
|
# Returns one scalar:
|
||||||
|
#
|
||||||
|
# - text with the the trailing attribution, if any, marked up with the color green
|
||||||
|
#
|
||||||
|
sub markup_trailing_attribution {
|
||||||
|
my $item = shift;
|
||||||
|
|
||||||
|
# Put attributions on their own lines.
|
||||||
|
# Check for trailing parenthesized attribution with no following period.
|
||||||
|
# Exclude things like "(see #3 above)" and "(use the bug number instead of xxxx)"
|
||||||
|
unless ($item =~ s{\s*(\((?![Ss]ee )
|
||||||
|
(?!spans\b)
|
||||||
|
(?!mainly\ )
|
||||||
|
(?!LUCENE-\d+\))
|
||||||
|
(?!and\ )
|
||||||
|
(?!backported\ )
|
||||||
|
(?!in\ )
|
||||||
|
(?!inverse\ )
|
||||||
|
(?![Tt]he\ )
|
||||||
|
(?!use\ the\ bug\ number)
|
||||||
|
[^()"]+?\))\s*$}
|
||||||
|
{\n<br /><span class="attrib">$1</span>}x) {
|
||||||
|
# If attribution is not found, then look for attribution with a
|
||||||
|
# trailing period, but try not to include trailing parenthesized things
|
||||||
|
# that are not attributions.
|
||||||
|
#
|
||||||
|
# Rule of thumb: if a trailing parenthesized expression with a following
|
||||||
|
# period does not contain "LUCENE-XXX", and it either has three or
|
||||||
|
# fewer words or it includes the word "via" or the phrase "updates from",
|
||||||
|
# then it is considered to be an attribution.
|
||||||
|
|
||||||
|
$item =~ s{(\s*(\((?![Ss]ee\ )
|
||||||
|
(?!spans\b)
|
||||||
|
(?!mainly\ )
|
||||||
|
(?!LUCENE-\d+\))
|
||||||
|
(?!and\ )
|
||||||
|
(?!backported\ )
|
||||||
|
(?!in\ )
|
||||||
|
(?!inverse\ )
|
||||||
|
(?![Tt]he\ )
|
||||||
|
(?!use\ the\ bug\ number)
|
||||||
|
[^()"]+?\)))
|
||||||
|
((?:\.|(?i:\.?\s*Issue\s+\d{3,}|LUCENE-\d+)\.?)\s*)$}
|
||||||
|
{
|
||||||
|
my $subst = $1; # default: no change
|
||||||
|
my $parenthetical = $2;
|
||||||
|
my $trailing_period_and_or_issue = $3;
|
||||||
|
if ($parenthetical !~ /LUCENE-\d+/) {
|
||||||
|
my ($no_parens) = $parenthetical =~ /^\((.*)\)$/s;
|
||||||
|
my @words = grep {/\S/} split /\s+/, $no_parens;
|
||||||
|
if ($no_parens =~ /\b(?:via|updates\s+from)\b/i || scalar(@words) <= 4) {
|
||||||
|
$subst = "\n<br /><span class=\"attrib\">$parenthetical</span>";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
$subst . $trailing_period_and_or_issue;
|
||||||
|
}ex;
|
||||||
|
}
|
||||||
|
return $item;
|
||||||
|
}
|
||||||
|
|
||||||
#
|
#
|
||||||
# Subroutine: has_release_sections
|
# Subroutine: has_release_sections
|
||||||
#
|
#
|
||||||
|
@ -636,6 +777,8 @@ sub get_release_date {
|
||||||
# Handle '1.2 RC6', which should be '1.2 final'
|
# Handle '1.2 RC6', which should be '1.2 final'
|
||||||
$release = '1.2 final' if ($release eq '1.2 RC6');
|
$release = '1.2 final' if ($release eq '1.2 RC6');
|
||||||
|
|
||||||
|
$release =~ s/\.0\.0/\.0/;
|
||||||
|
|
||||||
$reldate = ( exists($release_dates{$release})
|
$reldate = ( exists($release_dates{$release})
|
||||||
? $release_dates{$release}
|
? $release_dates{$release}
|
||||||
: 'unknown');
|
: 'unknown');
|
||||||
|
@ -658,7 +801,9 @@ sub get_release_date {
|
||||||
# as well as those named "final" are included below.
|
# as well as those named "final" are included below.
|
||||||
#
|
#
|
||||||
sub setup_release_dates {
|
sub setup_release_dates {
|
||||||
my %release_dates
|
my %release_dates;
|
||||||
|
if (uc($product) eq 'LUCENE') {
|
||||||
|
%release_dates
|
||||||
= ( '0.01' => '2000-03-30', '0.04' => '2000-04-19',
|
= ( '0.01' => '2000-03-30', '0.04' => '2000-04-19',
|
||||||
'1.0' => '2000-10-04', '1.01b' => '2001-06-02',
|
'1.0' => '2000-10-04', '1.01b' => '2001-06-02',
|
||||||
'1.2 RC1' => '2001-10-02', '1.2 RC2' => '2001-10-19',
|
'1.2 RC1' => '2001-10-02', '1.2 RC2' => '2001-10-19',
|
||||||
|
@ -677,8 +822,10 @@ sub setup_release_dates {
|
||||||
'2.4.0' => '2008-10-06', '2.4.1' => '2009-03-09',
|
'2.4.0' => '2008-10-06', '2.4.1' => '2009-03-09',
|
||||||
'2.9.0' => '2009-09-23', '2.9.1' => '2009-11-06',
|
'2.9.0' => '2009-09-23', '2.9.1' => '2009-11-06',
|
||||||
'3.0.0' => '2009-11-25');
|
'3.0.0' => '2009-11-25');
|
||||||
|
}
|
||||||
|
|
||||||
my $project_info_json = get_url_contents($project_info_url);
|
print STDERR "Retrieving $project_info_url/$product ...\n";
|
||||||
|
my $project_info_json = get_url_contents("$project_info_url/$product");
|
||||||
|
|
||||||
my $project_info = json2perl($project_info_json);
|
my $project_info = json2perl($project_info_json);
|
||||||
for my $version (@{$project_info->{versions}}) {
|
for my $version (@{$project_info->{versions}}) {
|
||||||
|
@ -750,12 +897,12 @@ sub setup_month_nums {
|
||||||
|
|
||||||
|
|
||||||
#
|
#
|
||||||
# setup_bugzilla_jira_map
|
# setup_lucene_bugzilla_jira_map
|
||||||
#
|
#
|
||||||
# Returns a list of alternating Bugzilla bug IDs and LUCENE-* JIRA issue
|
# Returns a list of alternating Bugzilla bug IDs and LUCENE-* JIRA issue
|
||||||
# numbers, for use in populating the %bugzilla_jira_map hash
|
# numbers, for use in populating the %lucene_bugzilla_jira_map hash
|
||||||
#
|
#
|
||||||
sub setup_bugzilla_jira_map {
|
sub setup_lucene_bugzilla_jira_map {
|
||||||
return ( 4049 => 1, 4102 => 2, 4105 => 3, 4254 => 4,
|
return ( 4049 => 1, 4102 => 2, 4105 => 3, 4254 => 4,
|
||||||
4555 => 5, 4568 => 6, 4754 => 7, 5313 => 8,
|
4555 => 5, 4568 => 6, 4754 => 7, 5313 => 8,
|
||||||
5456 => 9, 6078 => 10, 6091 => 11, 6140 => 12,
|
5456 => 9, 6078 => 10, 6091 => 11, 6140 => 12,
|
||||||
|
@ -879,7 +1026,14 @@ sub json2perl {
|
||||||
my $json_string = shift;
|
my $json_string = shift;
|
||||||
$json_string =~ s/(:\s*)(true|false)/$1"$2"/g;
|
$json_string =~ s/(:\s*)(true|false)/$1"$2"/g;
|
||||||
$json_string =~ s/":/",/g;
|
$json_string =~ s/":/",/g;
|
||||||
return eval $json_string;
|
$json_string =~ s/\'/\\'/g;
|
||||||
|
$json_string =~ s/\"/\'/g;
|
||||||
|
my $project_info = eval $json_string;
|
||||||
|
die "ERROR eval'ing munged JSON string ||$json_string||: $@\n"
|
||||||
|
if ($@);
|
||||||
|
die "ERROR empty value after eval'ing JSON string ||$json_string||\n"
|
||||||
|
unless $project_info;
|
||||||
|
return $project_info;
|
||||||
}
|
}
|
||||||
|
|
||||||
1;
|
1;
|
||||||
|
|
112
solr/CHANGES.txt
112
solr/CHANGES.txt
|
@ -103,6 +103,10 @@ Other Changes
|
||||||
|
|
||||||
* SOLR-3707: Upgrade Solr to Tika 1.2 (janhoy)
|
* SOLR-3707: Upgrade Solr to Tika 1.2 (janhoy)
|
||||||
|
|
||||||
|
* SOLR-2747: Updated changes2html.pl to handle Solr's CHANGES.txt; added
|
||||||
|
target 'changes-to-html' to solr/build.xml.
|
||||||
|
(Steve Rowe, Robert Muir)
|
||||||
|
|
||||||
================== 4.0.0-BETA ===================
|
================== 4.0.0-BETA ===================
|
||||||
|
|
||||||
|
|
||||||
|
@ -311,6 +315,7 @@ Other Changes
|
||||||
Also, the configuration itself can be passed using the "dataConfig" parameter rather than
|
Also, the configuration itself can be passed using the "dataConfig" parameter rather than
|
||||||
using a file (this previously worked in debug mode only). When configuration errors are
|
using a file (this previously worked in debug mode only). When configuration errors are
|
||||||
encountered, the error message is returned in XML format. (James Dyer)
|
encountered, the error message is returned in XML format. (James Dyer)
|
||||||
|
|
||||||
* SOLR-3439: Make SolrCell easier to use out of the box. Also improves "/browse" to display
|
* SOLR-3439: Make SolrCell easier to use out of the box. Also improves "/browse" to display
|
||||||
rich-text documents correctly, along with facets for author and content_type.
|
rich-text documents correctly, along with facets for author and content_type.
|
||||||
With the new "content" field, highlighting of body is supported. See also SOLR-3672 for
|
With the new "content" field, highlighting of body is supported. See also SOLR-3672 for
|
||||||
|
@ -424,25 +429,25 @@ New Features
|
||||||
built-in load balancing, and distributed indexing.
|
built-in load balancing, and distributed indexing.
|
||||||
(Jamie Johnson, Sami Siren, Ted Dunning, yonik, Mark Miller)
|
(Jamie Johnson, Sami Siren, Ted Dunning, yonik, Mark Miller)
|
||||||
Additional Work:
|
Additional Work:
|
||||||
SOLR-2324: SolrCloud solr.xml parameters are not persisted by CoreContainer.
|
- SOLR-2324: SolrCloud solr.xml parameters are not persisted by CoreContainer.
|
||||||
(Massimo Schiavon, Mark Miller)
|
(Massimo Schiavon, Mark Miller)
|
||||||
SOLR-2287: Allow users to query by multiple, compatible collections with SolrCloud.
|
- SOLR-2287: Allow users to query by multiple, compatible collections with SolrCloud.
|
||||||
(Soheb Mahmood, Alex Cowell, Mark Miller)
|
(Soheb Mahmood, Alex Cowell, Mark Miller)
|
||||||
SOLR-2622: ShowFileRequestHandler does not work in SolrCloud mode.
|
- SOLR-2622: ShowFileRequestHandler does not work in SolrCloud mode.
|
||||||
(Stefan Matheis, Mark Miller)
|
(Stefan Matheis, Mark Miller)
|
||||||
SOLR-3108: Error in SolrCloud's replica lookup code when replica's are hosted in same Solr instance.
|
- SOLR-3108: Error in SolrCloud's replica lookup code when replica's are hosted in same Solr instance.
|
||||||
(Bruno Dumon, Sami Siren, Mark Miller)
|
(Bruno Dumon, Sami Siren, Mark Miller)
|
||||||
SOLR-3080: Remove shard info from zookeeper when SolrCore is explicitly unloaded.
|
- SOLR-3080: Remove shard info from zookeeper when SolrCore is explicitly unloaded.
|
||||||
(yonik, Mark Miller, siren)
|
(yonik, Mark Miller, siren)
|
||||||
SOLR-3437: Recovery issues a spurious commit to the cluster. (Trym R. Møller via Mark Miller)
|
- SOLR-3437: Recovery issues a spurious commit to the cluster. (Trym R. Møller via Mark Miller)
|
||||||
SOLR-2822: Skip update processors already run on other nodes (hossman)
|
- SOLR-2822: Skip update processors already run on other nodes (hossman)
|
||||||
|
|
||||||
* SOLR-1566: Transforming documents in the ResponseWriters. This will allow
|
* SOLR-1566: Transforming documents in the ResponseWriters. This will allow
|
||||||
for more complex results in responses and open the door for function queries
|
for more complex results in responses and open the door for function queries
|
||||||
as results.
|
as results.
|
||||||
(ryan with patches from grant, noble, cmale, yonik, Jan Høydahl,
|
(ryan with patches from grant, noble, cmale, yonik, Jan Høydahl,
|
||||||
Arul Kalaipandian, Luca Cavanna, hossman)
|
Arul Kalaipandian, Luca Cavanna, hossman)
|
||||||
SOLR-2037: Thanks to SOLR-1566, documents boosted by the QueryElevationComponent
|
- SOLR-2037: Thanks to SOLR-1566, documents boosted by the QueryElevationComponent
|
||||||
can be marked as boosted. (gsingers, ryan, yonik)
|
can be marked as boosted. (gsingers, ryan, yonik)
|
||||||
|
|
||||||
* SOLR-2396: Add CollationField, which is much more efficient than
|
* SOLR-2396: Add CollationField, which is much more efficient than
|
||||||
|
@ -459,9 +464,9 @@ New Features
|
||||||
(Jan Høydahl via yonik)
|
(Jan Høydahl via yonik)
|
||||||
|
|
||||||
* SOLR-2272: Pseudo-join queries / filters. Examples:
|
* SOLR-2272: Pseudo-join queries / filters. Examples:
|
||||||
To restrict to the set of parents with at least one blue-eyed child:
|
- To restrict to the set of parents with at least one blue-eyed child:
|
||||||
fq={!join from=parent to=name}eyes:blue
|
fq={!join from=parent to=name}eyes:blue
|
||||||
To restrict to the set of children with at least one blue-eyed parent:
|
- To restrict to the set of children with at least one blue-eyed parent:
|
||||||
fq={!join from=name to=parent}eyes:blue
|
fq={!join from=name to=parent}eyes:blue
|
||||||
(yonik)
|
(yonik)
|
||||||
|
|
||||||
|
@ -509,11 +514,11 @@ New Features
|
||||||
* SOLR-2703: Added support for Lucene's "surround" query parser. (Simon Rosenthal, ehatcher)
|
* SOLR-2703: Added support for Lucene's "surround" query parser. (Simon Rosenthal, ehatcher)
|
||||||
|
|
||||||
* SOLR-2754: Added factories for several ranking algorithms:
|
* SOLR-2754: Added factories for several ranking algorithms:
|
||||||
BM25SimilarityFactory: Okapi BM25
|
- BM25SimilarityFactory: Okapi BM25
|
||||||
DFRSimilarityFactory: Divergence from Randomness models
|
- DFRSimilarityFactory: Divergence from Randomness models
|
||||||
IBSimilarityFactory: Information-based models
|
- IBSimilarityFactory: Information-based models
|
||||||
LMDirichletSimilarity: LM with Dirichlet smoothing
|
- LMDirichletSimilarity: LM with Dirichlet smoothing
|
||||||
LMJelinekMercerSimilarity: LM with Jelinek-Mercer smoothing
|
- LMJelinekMercerSimilarity: LM with Jelinek-Mercer smoothing
|
||||||
(David Mark Nemeskey, Robert Muir)
|
(David Mark Nemeskey, Robert Muir)
|
||||||
|
|
||||||
* SOLR-2134 Trie* fields should support sortMissingLast=true, and deprecate Sortable* Field Types
|
* SOLR-2134 Trie* fields should support sortMissingLast=true, and deprecate Sortable* Field Types
|
||||||
|
@ -537,18 +542,18 @@ New Features
|
||||||
* SOLR-2802: New FieldMutatingUpdateProcessor and Factory to simplify the
|
* SOLR-2802: New FieldMutatingUpdateProcessor and Factory to simplify the
|
||||||
development of UpdateProcessors that modify field values of documents as
|
development of UpdateProcessors that modify field values of documents as
|
||||||
they are indexed. Also includes several useful new implementations:
|
they are indexed. Also includes several useful new implementations:
|
||||||
RemoveBlankFieldUpdateProcessorFactory
|
- RemoveBlankFieldUpdateProcessorFactory
|
||||||
TrimFieldUpdateProcessorFactory
|
- TrimFieldUpdateProcessorFactory
|
||||||
HTMLStripFieldUpdateProcessorFactory
|
- HTMLStripFieldUpdateProcessorFactory
|
||||||
RegexReplaceProcessorFactory
|
- RegexReplaceProcessorFactory
|
||||||
FieldLengthUpdateProcessorFactory
|
- FieldLengthUpdateProcessorFactory
|
||||||
ConcatFieldUpdateProcessorFactory
|
- ConcatFieldUpdateProcessorFactory
|
||||||
FirstFieldValueUpdateProcessorFactory
|
- FirstFieldValueUpdateProcessorFactory
|
||||||
LastFieldValueUpdateProcessorFactory
|
- LastFieldValueUpdateProcessorFactory
|
||||||
MinFieldValueUpdateProcessorFactory
|
- MinFieldValueUpdateProcessorFactory
|
||||||
MaxFieldValueUpdateProcessorFactory
|
- MaxFieldValueUpdateProcessorFactory
|
||||||
TruncateFieldUpdateProcessorFactory
|
- TruncateFieldUpdateProcessorFactory
|
||||||
IgnoreFieldUpdateProcessorFactory
|
- IgnoreFieldUpdateProcessorFactory
|
||||||
(hossman, janhoy)
|
(hossman, janhoy)
|
||||||
|
|
||||||
* SOLR-3120: Optional post filtering for spatial queries bbox and geofilt
|
* SOLR-3120: Optional post filtering for spatial queries bbox and geofilt
|
||||||
|
@ -562,7 +567,7 @@ New Features
|
||||||
|
|
||||||
* SOLR-2898: Support grouped faceting. (Martijn van Groningen)
|
* SOLR-2898: Support grouped faceting. (Martijn van Groningen)
|
||||||
Additional Work:
|
Additional Work:
|
||||||
SOLR-3406: Extended grouped faceting support to facet.query and facet.range parameters.
|
- SOLR-3406: Extended grouped faceting support to facet.query and facet.range parameters.
|
||||||
(David Boychuck, Martijn van Groningen)
|
(David Boychuck, Martijn van Groningen)
|
||||||
|
|
||||||
* SOLR-2949: QueryElevationComponent is now supported with distributed search.
|
* SOLR-2949: QueryElevationComponent is now supported with distributed search.
|
||||||
|
@ -646,9 +651,9 @@ New Features
|
||||||
|
|
||||||
* SOLR-3508: Simplify JSON update format for deletes as well as allow
|
* SOLR-3508: Simplify JSON update format for deletes as well as allow
|
||||||
version specification for optimistic locking. Examples:
|
version specification for optimistic locking. Examples:
|
||||||
{"delete":"myid"}
|
- {"delete":"myid"}
|
||||||
{"delete":["id1","id2","id3"]}
|
- {"delete":["id1","id2","id3"]}
|
||||||
{"delete":{"id":"myid", "_version_":123456789}}
|
- {"delete":{"id":"myid", "_version_":123456789}}
|
||||||
(yonik)
|
(yonik)
|
||||||
|
|
||||||
* SOLR-3211: Allow parameter overrides in conjunction with "spellcheck.maxCollationTries".
|
* SOLR-3211: Allow parameter overrides in conjunction with "spellcheck.maxCollationTries".
|
||||||
|
@ -692,7 +697,7 @@ Optimizations
|
||||||
works with SolrCore to provide faster 'soft' commits, and has an improved API
|
works with SolrCore to provide faster 'soft' commits, and has an improved API
|
||||||
that requires less instanceof special casing. (Mark Miller, Robert Muir)
|
that requires less instanceof special casing. (Mark Miller, Robert Muir)
|
||||||
Additional Work:
|
Additional Work:
|
||||||
SOLR-2697: commit and autocommit operations don't reset
|
- SOLR-2697: commit and autocommit operations don't reset
|
||||||
DirectUpdateHandler2.numDocsPending stats attribute.
|
DirectUpdateHandler2.numDocsPending stats attribute.
|
||||||
(Alexey Serba, Mark Miller)
|
(Alexey Serba, Mark Miller)
|
||||||
|
|
||||||
|
@ -739,7 +744,7 @@ Bug Fixes
|
||||||
* SOLR-2193, SOLR-2565, SOLR-2651: SolrCores now properly share IndexWriters across SolrCore reloads.
|
* SOLR-2193, SOLR-2565, SOLR-2651: SolrCores now properly share IndexWriters across SolrCore reloads.
|
||||||
(Mark Miller, Robert Muir)
|
(Mark Miller, Robert Muir)
|
||||||
Additional Work:
|
Additional Work:
|
||||||
SOLR-2705: On reload, IndexWriterProvider holds onto the initial SolrCore it was created with.
|
- SOLR-2705: On reload, IndexWriterProvider holds onto the initial SolrCore it was created with.
|
||||||
(Yury Kats, Mark Miller)
|
(Yury Kats, Mark Miller)
|
||||||
|
|
||||||
* SOLR-2682: Remove addException() in SimpleFacet. FacetComponent no longer catches and embeds
|
* SOLR-2682: Remove addException() in SimpleFacet. FacetComponent no longer catches and embeds
|
||||||
|
@ -919,7 +924,7 @@ Other Changes
|
||||||
* SOLR-2607: Removed deprecated client/ruby directory, which included solr-ruby and flare.
|
* SOLR-2607: Removed deprecated client/ruby directory, which included solr-ruby and flare.
|
||||||
(ehatcher)
|
(ehatcher)
|
||||||
|
|
||||||
* Solr-3032: logOnce from SolrException logOnce and all the supporting
|
* SOLR-3032: logOnce from SolrException logOnce and all the supporting
|
||||||
structure is gone. abortOnConfugrationError is also gone as it is no longer referenced.
|
structure is gone. abortOnConfugrationError is also gone as it is no longer referenced.
|
||||||
Errors should be caught and logged at the top-most level or logged and NOT propagated up the
|
Errors should be caught and logged at the top-most level or logged and NOT propagated up the
|
||||||
chain. (Erick Erickson)
|
chain. (Erick Erickson)
|
||||||
|
@ -999,7 +1004,7 @@ More information about this release, including any errata related to the
|
||||||
release notes, upgrade instructions, or other changes may be found online at:
|
release notes, upgrade instructions, or other changes may be found online at:
|
||||||
https://wiki.apache.org/solr/Solr3.6.1
|
https://wiki.apache.org/solr/Solr3.6.1
|
||||||
|
|
||||||
Bug Fixes:
|
Bug Fixes
|
||||||
|
|
||||||
* LUCENE-3969: Throw IAE on bad arguments that could cause confusing errors in
|
* LUCENE-3969: Throw IAE on bad arguments that could cause confusing errors in
|
||||||
PatternTokenizer. CommonGrams populates PositionLengthAttribute correctly.
|
PatternTokenizer. CommonGrams populates PositionLengthAttribute correctly.
|
||||||
|
@ -1174,7 +1179,7 @@ New Features
|
||||||
* SOLR-2001: The query component will substitute an empty query that matches
|
* SOLR-2001: The query component will substitute an empty query that matches
|
||||||
no documents if the query parser returns null. This also prevents an
|
no documents if the query parser returns null. This also prevents an
|
||||||
exception from being thrown by the default parser if "q" is missing. (yonik)
|
exception from being thrown by the default parser if "q" is missing. (yonik)
|
||||||
SOLR-435: if q is "" then it's also acceptable. (dsmiley, hoss)
|
- SOLR-435: if q is "" then it's also acceptable. (dsmiley, hoss)
|
||||||
|
|
||||||
* SOLR-2919: Added parametric tailoring options to ICUCollationKeyFilterFactory.
|
* SOLR-2919: Added parametric tailoring options to ICUCollationKeyFilterFactory.
|
||||||
These can be used to customize range query/sort behavior, for example to
|
These can be used to customize range query/sort behavior, for example to
|
||||||
|
@ -2913,10 +2918,10 @@ Use of the "charset" option when configuring the following Analysis
|
||||||
Factories has been deprecated and will cause a warning to be logged.
|
Factories has been deprecated and will cause a warning to be logged.
|
||||||
In future versions of Solr attempting to use this option will cause an
|
In future versions of Solr attempting to use this option will cause an
|
||||||
error. See SOLR-1410 for more information.
|
error. See SOLR-1410 for more information.
|
||||||
* GreekLowerCaseFilterFactory
|
- GreekLowerCaseFilterFactory
|
||||||
* RussianStemFilterFactory
|
- RussianStemFilterFactory
|
||||||
* RussianLowerCaseFilterFactory
|
- RussianLowerCaseFilterFactory
|
||||||
* RussianLetterTokenizerFactory
|
- RussianLetterTokenizerFactory
|
||||||
|
|
||||||
DIH: Evaluator API has been changed in a non back-compatible way. Users who
|
DIH: Evaluator API has been changed in a non back-compatible way. Users who
|
||||||
have developed custom Evaluators will need to change their code according to
|
have developed custom Evaluators will need to change their code according to
|
||||||
|
@ -2993,7 +2998,7 @@ New Features
|
||||||
7. SOLR-680: Add StatsComponent. This gets simple statistics on matched numeric fields,
|
7. SOLR-680: Add StatsComponent. This gets simple statistics on matched numeric fields,
|
||||||
including: min, max, mean, median, stddev. (koji, ryan)
|
including: min, max, mean, median, stddev. (koji, ryan)
|
||||||
|
|
||||||
7.1 SOLR-1380: Added support for multi-valued fields (Harish Agarwal via gsingers)
|
- SOLR-1380: Added support for multi-valued fields (Harish Agarwal via gsingers)
|
||||||
|
|
||||||
8. SOLR-561: Added Replication implemented in Java as a request handler. Supports index replication
|
8. SOLR-561: Added Replication implemented in Java as a request handler. Supports index replication
|
||||||
as well as configuration replication and exposes detailed statistics and progress information
|
as well as configuration replication and exposes detailed statistics and progress information
|
||||||
|
@ -3514,11 +3519,12 @@ Bug Fixes
|
||||||
28. SOLR-1008: Fix stats.jsp XML encoding for <stat> item entries with ampersands in their names. (ehatcher)
|
28. SOLR-1008: Fix stats.jsp XML encoding for <stat> item entries with ampersands in their names. (ehatcher)
|
||||||
|
|
||||||
29. SOLR-976: deleteByQuery is ignored when deleteById is placed prior to deleteByQuery in a <delete>.
|
29. SOLR-976: deleteByQuery is ignored when deleteById is placed prior to deleteByQuery in a <delete>.
|
||||||
Now both delete by id and delete by query can be specified at the same time as follows. (koji)
|
Now both delete by id and delete by query can be specified at the same time as follows.
|
||||||
<delete>
|
<delete>
|
||||||
<id>05991</id><id>06000</id>
|
<id>05991</id><id>06000</id>
|
||||||
<query>office:Bridgewater</query><query>office:Osaka</query>
|
<query>office:Bridgewater</query><query>office:Osaka</query>
|
||||||
</delete>
|
</delete>
|
||||||
|
(koji)
|
||||||
|
|
||||||
30. SOLR-1016: HTTP 503 error changes 500 in SolrCore (koji)
|
30. SOLR-1016: HTTP 503 error changes 500 in SolrCore (koji)
|
||||||
|
|
||||||
|
@ -3777,7 +3783,7 @@ Other Changes
|
||||||
8. SOLR-875: Upgraded to Lucene 2.9-dev (r723985) and consolidated the BitSet implementations (Michael Busch, gsingers)
|
8. SOLR-875: Upgraded to Lucene 2.9-dev (r723985) and consolidated the BitSet implementations (Michael Busch, gsingers)
|
||||||
|
|
||||||
9. SOLR-819: Upgraded to Lucene 2.9-dev (r724059) to get access to Arabic public constructors (gsingers)
|
9. SOLR-819: Upgraded to Lucene 2.9-dev (r724059) to get access to Arabic public constructors (gsingers)
|
||||||
and
|
|
||||||
10. SOLR-900: Moved solrj into /src/solrj. The contents of solr-common.jar is now included
|
10. SOLR-900: Moved solrj into /src/solrj. The contents of solr-common.jar is now included
|
||||||
in the solr-solrj.jar. (ryan)
|
in the solr-solrj.jar. (ryan)
|
||||||
|
|
||||||
|
@ -3861,10 +3867,10 @@ Other Changes
|
||||||
(hossman)
|
(hossman)
|
||||||
|
|
||||||
42. Upgraded to Lucene 2.9-dev r794238. Other changes include:
|
42. Upgraded to Lucene 2.9-dev r794238. Other changes include:
|
||||||
LUCENE-1614 - Use Lucene's DocIdSetIterator.NO_MORE_DOCS as the sentinel value.
|
- LUCENE-1614 - Use Lucene's DocIdSetIterator.NO_MORE_DOCS as the sentinel value.
|
||||||
LUCENE-1630 - Add acceptsDocsOutOfOrder method to Collector implementations.
|
- LUCENE-1630 - Add acceptsDocsOutOfOrder method to Collector implementations.
|
||||||
LUCENE-1673, LUCENE-1701 - Trie has moved to Lucene core and renamed to NumericRangeQuery.
|
- LUCENE-1673, LUCENE-1701 - Trie has moved to Lucene core and renamed to NumericRangeQuery.
|
||||||
LUCENE-1662, LUCENE-1687 - Replace usage of ExtendedFieldCache by FieldCache.
|
- LUCENE-1662, LUCENE-1687 - Replace usage of ExtendedFieldCache by FieldCache.
|
||||||
(shalin)
|
(shalin)
|
||||||
|
|
||||||
42. SOLR-1241: Solr's CharFilter has been moved to Lucene. Remove CharFilter and related classes
|
42. SOLR-1241: Solr's CharFilter has been moved to Lucene. Remove CharFilter and related classes
|
||||||
|
@ -3874,7 +3880,7 @@ Other Changes
|
||||||
|
|
||||||
44. Upgraded to Lucene 2.9-dev r801856 (Mark Miller)
|
44. Upgraded to Lucene 2.9-dev r801856 (Mark Miller)
|
||||||
|
|
||||||
45. SOLR1276: Added StatsComponentTest (Rafa<EFBFBD>ł Ku<EFBFBD>ć, gsingers)
|
45. SOLR-1276: Added StatsComponentTest (Rafał Kuć, gsingers)
|
||||||
|
|
||||||
46. SOLR-1377: The TokenizerFactory API has changed to explicitly return a Tokenizer
|
46. SOLR-1377: The TokenizerFactory API has changed to explicitly return a Tokenizer
|
||||||
rather then a TokenStream (that may be or may not be a Tokenizer). This change
|
rather then a TokenStream (that may be or may not be a Tokenizer). This change
|
||||||
|
@ -3971,7 +3977,7 @@ Build
|
||||||
|
|
||||||
Documentation
|
Documentation
|
||||||
----------------------
|
----------------------
|
||||||
1. SOLR-789: The javadoc of RandomSortField is not readable (Nicolas Lalev<EFBFBD>Á<EFBFBD>e via koji)
|
1. SOLR-789: The javadoc of RandomSortField is not readable (Nicolas Lalevée via koji)
|
||||||
|
|
||||||
2. SOLR-962: Note about null handling in ModifiableSolrParams.add javadoc
|
2. SOLR-962: Note about null handling in ModifiableSolrParams.add javadoc
|
||||||
(Kay Kay via hossman)
|
(Kay Kay via hossman)
|
||||||
|
@ -4016,8 +4022,8 @@ example solrconfig.xml) for more details...
|
||||||
In Solr 1.2, DateField did not enforce the canonical representation of
|
In Solr 1.2, DateField did not enforce the canonical representation of
|
||||||
the ISO 8601 format when parsing incoming data, and did not generation
|
the ISO 8601 format when parsing incoming data, and did not generation
|
||||||
the canonical format when generating dates from "Date Math" strings
|
the canonical format when generating dates from "Date Math" strings
|
||||||
(particularly as it pertains to milliseconds ending in trailing zeros)
|
(particularly as it pertains to milliseconds ending in trailing zeros).
|
||||||
-- As a result equivalent dates could not always be compared properly.
|
As a result equivalent dates could not always be compared properly.
|
||||||
This problem is corrected in Solr 1.3, but DateField users that might
|
This problem is corrected in Solr 1.3, but DateField users that might
|
||||||
have been affected by indexing inconsistent formats of equivilent
|
have been affected by indexing inconsistent formats of equivilent
|
||||||
dates (ie: 1995-12-31T23:59:59Z vs 1995-12-31T23:59:59.000Z) may want
|
dates (ie: 1995-12-31T23:59:59Z vs 1995-12-31T23:59:59.000Z) may want
|
||||||
|
@ -4419,7 +4425,7 @@ Bug Fixes
|
||||||
9. SOLR-294: Logging of elapsed time broken on Solaris because the date command
|
9. SOLR-294: Logging of elapsed time broken on Solaris because the date command
|
||||||
there does not support the %s output format. (bill)
|
there does not support the %s output format. (bill)
|
||||||
|
|
||||||
10. SOLR-136: Snappuller - "date -d" and locales don't mix. (J<EFBFBD>Á<EFBFBD>rgen Hermann via bill)
|
10. SOLR-136: Snappuller - "date -d" and locales don't mix. (Jürgen Hermann via bill)
|
||||||
|
|
||||||
11. SOLR-333: Changed distributiondump.jsp to use Solr HOME instead of CWD to set path.
|
11. SOLR-333: Changed distributiondump.jsp to use Solr HOME instead of CWD to set path.
|
||||||
|
|
||||||
|
@ -4470,7 +4476,7 @@ Bug Fixes
|
||||||
messages to be output by the SolrCore via a NamedList toLog member variable.
|
messages to be output by the SolrCore via a NamedList toLog member variable.
|
||||||
(Will Johnson, yseeley, gsingers)
|
(Will Johnson, yseeley, gsingers)
|
||||||
|
|
||||||
SOLR-267: Removed adding values to the HTTP headers in SolrDispatchFilter (gsingers)
|
- SOLR-267: Removed adding values to the HTTP headers in SolrDispatchFilter (gsingers)
|
||||||
|
|
||||||
28. SOLR-509: Moved firstSearcher event notification to the end of the SolrCore constructor
|
28. SOLR-509: Moved firstSearcher event notification to the end of the SolrCore constructor
|
||||||
(Koji Sekiguchi via gsingers)
|
(Koji Sekiguchi via gsingers)
|
||||||
|
|
|
@ -320,6 +320,9 @@
|
||||||
<!-- Exclude clover license files incompatible with the ASL -->
|
<!-- Exclude clover license files incompatible with the ASL -->
|
||||||
<delete dir="${svn.export.dir}/lucene/tools/clover"/>
|
<delete dir="${svn.export.dir}/lucene/tools/clover"/>
|
||||||
|
|
||||||
|
<build-changes changes.src.dir="${svn.export.dir}/site/changes"
|
||||||
|
changes.target.dir="${svn.export.dir}/docs/changes"/>
|
||||||
|
|
||||||
<tar destfile="${source.package.file}" compression="gzip" longfile="gnu">
|
<tar destfile="${source.package.file}" compression="gzip" longfile="gnu">
|
||||||
<tarfileset dir="${svn.export.dir}"
|
<tarfileset dir="${svn.export.dir}"
|
||||||
prefix="${fullnamever}"
|
prefix="${fullnamever}"
|
||||||
|
@ -368,7 +371,7 @@
|
||||||
|
|
||||||
<target name="create-package"
|
<target name="create-package"
|
||||||
description="Packages the Solr Binary Distribution"
|
description="Packages the Solr Binary Distribution"
|
||||||
depends="init-dist, dist, example, javadocs">
|
depends="init-dist, dist, example, javadocs, changes-to-html">
|
||||||
<mkdir dir="${dest}/${fullnamever}"/>
|
<mkdir dir="${dest}/${fullnamever}"/>
|
||||||
<delete includeemptydirs="true">
|
<delete includeemptydirs="true">
|
||||||
<fileset dir="${example}/solr-webapp" includes="**/*"/>
|
<fileset dir="${example}/solr-webapp" includes="**/*"/>
|
||||||
|
@ -445,6 +448,10 @@
|
||||||
<make-checksums file="${package.dir}/${fullnamever}.zip"/>
|
<make-checksums file="${package.dir}/${fullnamever}.zip"/>
|
||||||
</target>
|
</target>
|
||||||
|
|
||||||
|
<target name="changes-to-html">
|
||||||
|
<build-changes changes.src.dir="${changes.src.dir}" changes.target.dir="${changes.target.dir}" />
|
||||||
|
</target>
|
||||||
|
|
||||||
<target name="sign-artifacts">
|
<target name="sign-artifacts">
|
||||||
<sign-artifacts-macro artifacts.dir="${package.dir}"/>
|
<sign-artifacts-macro artifacts.dir="${package.dir}"/>
|
||||||
</target>
|
</target>
|
||||||
|
|
|
@ -49,6 +49,8 @@
|
||||||
<property name="tests.loggingfile" value="${common-solr.dir}/testlogging.properties"/>
|
<property name="tests.loggingfile" value="${common-solr.dir}/testlogging.properties"/>
|
||||||
<property name="tests.cleanthreads.sysprop" value="perClass"/>
|
<property name="tests.cleanthreads.sysprop" value="perClass"/>
|
||||||
|
|
||||||
|
<property name="changes.target.dir" value="${dest}/docs/changes"/>
|
||||||
|
|
||||||
<import file="${common-solr.dir}/../lucene/module-build.xml"/>
|
<import file="${common-solr.dir}/../lucene/module-build.xml"/>
|
||||||
|
|
||||||
<path id="additional.dependencies">
|
<path id="additional.dependencies">
|
||||||
|
|
Loading…
Reference in New Issue