LUCENE-3877: Lucene should not call System.out.println

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1360240 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Robert Muir 2012-07-11 15:52:40 +00:00
parent aa87124205
commit 124edc221c
32 changed files with 225 additions and 246 deletions

View File

@ -18,6 +18,7 @@
package org.apache.lucene.analysis.compound.hyphenation;
import java.io.File;
import java.io.PrintStream;
import java.net.MalformedURLException;
import java.util.ArrayList;
import java.util.HashMap;
@ -463,10 +464,10 @@ public class HyphenationTree extends TernaryTree implements PatternConsumer {
}
@Override
public void printStats() {
System.out.println("Value space size = "
public void printStats(PrintStream out) {
out.println("Value space size = "
+ Integer.toString(vspace.length()));
super.printStats();
super.printStats(out);
}
}

View File

@ -40,7 +40,7 @@ import javax.xml.parsers.SAXParserFactory;
*
* This class has been taken from the Apache FOP project (http://xmlgraphics.apache.org/fop/). They have been slightly modified.
*/
public class PatternParser extends DefaultHandler implements PatternConsumer {
public class PatternParser extends DefaultHandler {
XMLReader parser;
@ -402,25 +402,4 @@ public class PatternParser extends DefaultHandler implements PatternConsumer {
return str.toString();
} // getLocationString(SAXParseException):String
// PatternConsumer implementation for testing purposes
public void addClass(String c) {
System.out.println("class: " + c);
}
public void addException(String w, ArrayList<Object> e) {
System.out.println("exception: " + w + " : " + e.toString());
}
public void addPattern(String p, String v) {
System.out.println("pattern: " + p + " : " + v);
}
public static void main(String[] args) throws Exception {
if (args.length > 0) {
PatternParser pp = new PatternParser();
pp.setConsumer(pp);
pp.parse(args[0]);
}
}
}

View File

@ -17,6 +17,7 @@
package org.apache.lucene.analysis.compound.hyphenation;
import java.io.PrintStream;
import java.util.Enumeration;
import java.util.Stack;
@ -633,11 +634,11 @@ public class TernaryTree implements Cloneable {
}
public void printStats() {
System.out.println("Number of keys = " + Integer.toString(length));
System.out.println("Node count = " + Integer.toString(freenode));
public void printStats(PrintStream out) {
out.println("Number of keys = " + Integer.toString(length));
out.println("Node count = " + Integer.toString(freenode));
// System.out.println("Array length = " + Integer.toString(eq.length));
System.out.println("Key Array length = " + Integer.toString(kv.length()));
out.println("Key Array length = " + Integer.toString(kv.length()));
/*
* for(int i=0; i<kv.length(); i++) if ( kv.get(i) != 0 )
@ -647,7 +648,7 @@ public class TernaryTree implements Cloneable {
*/
}
/*
public static void main(String[] args) {
TernaryTree tt = new TernaryTree();
tt.insert("Carlos", 'C');
@ -658,7 +659,8 @@ public class TernaryTree implements Cloneable {
System.out.println((char) tt.find("Car"));
System.out.println((char) tt.find("Carlos"));
System.out.println((char) tt.find("alto"));
tt.printStats();
tt.printStats(System.out);
}
*/
}

View File

@ -289,7 +289,7 @@ public class KStemmer {
entry = new DictEntry(exceptionWords[i], true);
d.put(exceptionWords[i], entry);
} else {
System.out.println("Warning: Entry [" + exceptionWords[i]
throw new RuntimeException("Warning: Entry [" + exceptionWords[i]
+ "] already in dictionary 1");
}
}
@ -299,7 +299,7 @@ public class KStemmer {
entry = new DictEntry(directConflations[i][1], false);
d.put(directConflations[i][0], entry);
} else {
System.out.println("Warning: Entry [" + directConflations[i][0]
throw new RuntimeException("Warning: Entry [" + directConflations[i][0]
+ "] already in dictionary 2");
}
}
@ -309,7 +309,7 @@ public class KStemmer {
entry = new DictEntry(countryNationality[i][1], false);
d.put(countryNationality[i][0], entry);
} else {
System.out.println("Warning: Entry [" + countryNationality[i][0]
throw new RuntimeException("Warning: Entry [" + countryNationality[i][0]
+ "] already in dictionary 3");
}
}
@ -323,7 +323,7 @@ public class KStemmer {
if (!d.containsKey(array[i])) {
d.put(array[i], defaultEntry);
} else {
System.out.println("Warning: Entry [" + array[i]
throw new RuntimeException("Warning: Entry [" + array[i]
+ "] already in dictionary 4");
}
}
@ -333,7 +333,7 @@ public class KStemmer {
if (!d.containsKey(array[i])) {
d.put(array[i], defaultEntry);
} else {
System.out.println("Warning: Entry [" + array[i]
throw new RuntimeException("Warning: Entry [" + array[i]
+ "] already in dictionary 4");
}
}
@ -343,7 +343,7 @@ public class KStemmer {
if (!d.containsKey(array[i])) {
d.put(array[i], defaultEntry);
} else {
System.out.println("Warning: Entry [" + array[i]
throw new RuntimeException("Warning: Entry [" + array[i]
+ "] already in dictionary 4");
}
}
@ -353,7 +353,7 @@ public class KStemmer {
if (!d.containsKey(array[i])) {
d.put(array[i], defaultEntry);
} else {
System.out.println("Warning: Entry [" + array[i]
throw new RuntimeException("Warning: Entry [" + array[i]
+ "] already in dictionary 4");
}
}
@ -363,7 +363,7 @@ public class KStemmer {
if (!d.containsKey(array[i])) {
d.put(array[i], defaultEntry);
} else {
System.out.println("Warning: Entry [" + array[i]
throw new RuntimeException("Warning: Entry [" + array[i]
+ "] already in dictionary 4");
}
}
@ -373,7 +373,7 @@ public class KStemmer {
if (!d.containsKey(array[i])) {
d.put(array[i], defaultEntry);
} else {
System.out.println("Warning: Entry [" + array[i]
throw new RuntimeException("Warning: Entry [" + array[i]
+ "] already in dictionary 4");
}
}
@ -383,7 +383,7 @@ public class KStemmer {
if (!d.containsKey(array[i])) {
d.put(array[i], defaultEntry);
} else {
System.out.println("Warning: Entry [" + array[i]
throw new RuntimeException("Warning: Entry [" + array[i]
+ "] already in dictionary 4");
}
}
@ -392,7 +392,7 @@ public class KStemmer {
if (!d.containsKey(KStemData8.data[i])) {
d.put(KStemData8.data[i], defaultEntry);
} else {
System.out.println("Warning: Entry [" + KStemData8.data[i]
throw new RuntimeException("Warning: Entry [" + KStemData8.data[i]
+ "] already in dictionary 4");
}
}
@ -401,7 +401,7 @@ public class KStemmer {
if (!d.containsKey(supplementDict[i])) {
d.put(supplementDict[i], defaultEntry);
} else {
System.out.println("Warning: Entry [" + supplementDict[i]
throw new RuntimeException("Warning: Entry [" + supplementDict[i]
+ "] already in dictionary 5");
}
}
@ -410,7 +410,7 @@ public class KStemmer {
if (!d.containsKey(properNouns[i])) {
d.put(properNouns[i], defaultEntry);
} else {
System.out.println("Warning: Entry [" + properNouns[i]
throw new RuntimeException("Warning: Entry [" + properNouns[i]
+ "] already in dictionary 6");
}
}

View File

@ -492,10 +492,9 @@ class PorterStemmer
return dirty;
}
/** Test program for demonstrating the Stemmer. It reads a file and
/* Test program for demonstrating the Stemmer. It reads a file and
* stems each word, writing the result to standard out.
* Usage: Stemmer file-name
*/
public static void main(String[] args) {
PorterStemmer s = new PorterStemmer();
@ -542,6 +541,6 @@ class PorterStemmer
System.out.println("error reading " + args[i]);
}
}
}
}*/
}

View File

@ -299,13 +299,12 @@ public class HunspellStemmer {
// ================================================= Entry Point ===================================================
/**
/*
* HunspellStemmer entry point. Accepts two arguments: location of affix file and location of dic file
*
* @param args Program arguments. Should contain location of affix file and location of dic file
* @throws IOException Can be thrown while reading from the files
* @throws ParseException Can be thrown while parsing the files
*/
public static void main(String[] args) throws IOException, ParseException {
boolean ignoreCase = false;
int offset = 0;
@ -347,12 +346,10 @@ public class HunspellStemmer {
}
}
/**
* Prints the results of the stemming of a word
*
* @param originalWord Word that has been stemmed
* @param stems Stems of the word
*/
private static void printStemResults(String originalWord, List<Stem> stems) {
StringBuilder builder = new StringBuilder().append("stem(").append(originalWord).append(")").append("\n");
@ -382,13 +379,12 @@ public class HunspellStemmer {
System.out.println(builder);
}
/**
* Simple utility to check if the given String has any text
*
* @param str String to check if it has any text
* @return {@code true} if the String has text, {@code false} otherwise
*/
private static boolean hasText(String str) {
return str != null && str.length() > 0;
}
*/
}

View File

@ -132,7 +132,7 @@ public abstract class RSLPStemmerBase {
super(suffix, min, replacement);
for (int i = 0; i < exceptions.length; i++) {
if (!exceptions[i].endsWith(suffix))
System.err.println("warning: useless exception '" + exceptions[i] + "' does not end with '" + suffix + "'");
throw new RuntimeException("useless exception '" + exceptions[i] + "' does not end with '" + suffix + "'");
}
this.exceptions = new CharArraySet(Version.LUCENE_50,
Arrays.asList(exceptions), false);
@ -156,7 +156,7 @@ public abstract class RSLPStemmerBase {
super(suffix, min, replacement);
for (int i = 0; i < exceptions.length; i++) {
if (!exceptions[i].endsWith(suffix))
System.err.println("warning: useless exception '" + exceptions[i] + "' does not end with '" + suffix + "'");
throw new RuntimeException("warning: useless exception '" + exceptions[i] + "' does not end with '" + suffix + "'");
}
this.exceptions = new char[exceptions.length][];
for (int i = 0; i < exceptions.length; i++)

View File

@ -435,7 +435,7 @@ public abstract class SnowballProgram {
bra > ket ||
ket > limit)
{
System.err.println("faulty slice operation");
throw new IllegalArgumentException("faulty slice operation: bra=" + bra + ",ket=" + ket + ",limit=" + limit);
// FIXME: report error somehow.
/*
fprintf(stderr, "faulty slice operation:\n");

View File

@ -66,12 +66,9 @@ public class AnalyzerProfile {
if (ANALYSIS_DATA_DIR.length() == 0) {
// Dictionary directory cannot be found.
System.err
.println("WARNING: Can not find lexical dictionary directory!");
System.err
.println("WARNING: This will cause unpredictable exceptions in your application!");
System.err
.println("WARNING: Please refer to the manual to download the dictionaries.");
throw new RuntimeException("WARNING: Can not find lexical dictionary directory!"
+ " This will cause unpredictable exceptions in your application!"
+ " Please refer to the manual to download the dictionaries.");
}
}

View File

@ -118,9 +118,8 @@ abstract class AbstractDictionary {
// Therefore, each code page only has 16*6-2=94 characters.
return (short) (b0 * 94 + b1);
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
throw new RuntimeException(e);
}
return -1;
}
/**

View File

@ -79,14 +79,9 @@ class BigramDictionary extends AbstractDictionary {
try {
loadFromInputStream(new FileInputStream(serialObj));
return true;
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} catch (ClassNotFoundException e) {
e.printStackTrace();
} catch (Exception e) {
throw new RuntimeException(e);
}
return false;
}
private void loadFromInputStream(InputStream serialObjectInputStream)

View File

@ -145,14 +145,9 @@ class WordDictionary extends AbstractDictionary {
try {
loadFromObjectInputStream(new FileInputStream(serialObj));
return true;
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} catch (ClassNotFoundException e) {
e.printStackTrace();
} catch (Exception e) {
throw new RuntimeException(e);
}
return false;
}
private void loadFromObjectInputStream(InputStream serialObjectInputStream)

View File

@ -85,7 +85,7 @@ public class Compile {
*
* @param args the command line arguments
*/
public static void main(java.lang.String[] args) {
public static void main(java.lang.String[] args) throws Exception {
if (args.length < 1) {
return;
}
@ -117,82 +117,75 @@ public class Compile {
LineNumberReader in;
// System.out.println("[" + args[i] + "]");
Diff diff = new Diff();
try {
int stems = 0;
int words = 0;
allocTrie();
System.out.println(args[i]);
in = new LineNumberReader(new BufferedReader(new InputStreamReader(
new FileInputStream(args[i]), charset)));
for (String line = in.readLine(); line != null; line = in.readLine()) {
try {
line = line.toLowerCase(Locale.ROOT);
StringTokenizer st = new StringTokenizer(line);
String stem = st.nextToken();
if (storeorig) {
trie.add(stem, "-a");
int stems = 0;
int words = 0;
allocTrie();
System.out.println(args[i]);
in = new LineNumberReader(new BufferedReader(new InputStreamReader(
new FileInputStream(args[i]), charset)));
for (String line = in.readLine(); line != null; line = in.readLine()) {
try {
line = line.toLowerCase(Locale.ROOT);
StringTokenizer st = new StringTokenizer(line);
String stem = st.nextToken();
if (storeorig) {
trie.add(stem, "-a");
words++;
}
while (st.hasMoreTokens()) {
String token = st.nextToken();
if (token.equals(stem) == false) {
trie.add(token, diff.exec(token, stem));
words++;
}
while (st.hasMoreTokens()) {
String token = st.nextToken();
if (token.equals(stem) == false) {
trie.add(token, diff.exec(token, stem));
words++;
}
}
} catch (java.util.NoSuchElementException x) {
// no base token (stem) on a line
}
} catch (java.util.NoSuchElementException x) {
// no base token (stem) on a line
}
Optimizer o = new Optimizer();
Optimizer2 o2 = new Optimizer2();
Lift l = new Lift(true);
Lift e = new Lift(false);
Gener g = new Gener();
for (int j = 0; j < optimizer.length; j++) {
String prefix;
switch (optimizer[j]) {
case 'G':
trie = trie.reduce(g);
prefix = "G: ";
break;
case 'L':
trie = trie.reduce(l);
prefix = "L: ";
break;
case 'E':
trie = trie.reduce(e);
prefix = "E: ";
break;
case '2':
trie = trie.reduce(o2);
prefix = "2: ";
break;
case '1':
trie = trie.reduce(o);
prefix = "1: ";
break;
default:
continue;
}
trie.printInfo(prefix + " ");
}
DataOutputStream os = new DataOutputStream(new BufferedOutputStream(
new FileOutputStream(args[i] + ".out")));
os.writeUTF(args[0]);
trie.store(os);
os.close();
} catch (FileNotFoundException x) {
x.printStackTrace();
} catch (IOException x) {
x.printStackTrace();
}
Optimizer o = new Optimizer();
Optimizer2 o2 = new Optimizer2();
Lift l = new Lift(true);
Lift e = new Lift(false);
Gener g = new Gener();
for (int j = 0; j < optimizer.length; j++) {
String prefix;
switch (optimizer[j]) {
case 'G':
trie = trie.reduce(g);
prefix = "G: ";
break;
case 'L':
trie = trie.reduce(l);
prefix = "L: ";
break;
case 'E':
trie = trie.reduce(e);
prefix = "E: ";
break;
case '2':
trie = trie.reduce(o2);
prefix = "2: ";
break;
case '1':
trie = trie.reduce(o);
prefix = "1: ";
break;
default:
continue;
}
trie.printInfo(System.out, prefix + " ");
}
DataOutputStream os = new DataOutputStream(new BufferedOutputStream(
new FileOutputStream(args[i] + ".out")));
os.writeUTF(args[0]);
trie.store(os);
os.close();
}
}

View File

@ -56,7 +56,6 @@ package org.egothor.stemmer;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.LineNumberReader;
import java.util.Locale;
@ -85,7 +84,7 @@ public class DiffIt {
*
* @param args the path to a file containing a stemmer table
*/
public static void main(java.lang.String[] args) {
public static void main(java.lang.String[] args) throws Exception {
int ins = get(0, args[0]);
int del = get(1, args[0]);
@ -96,28 +95,23 @@ public class DiffIt {
LineNumberReader in;
// System.out.println("[" + args[i] + "]");
Diff diff = new Diff(ins, del, rep, nop);
try {
String charset = System.getProperty("egothor.stemmer.charset", "UTF-8");
in = new LineNumberReader(new BufferedReader(new InputStreamReader(new FileInputStream(args[i]), charset)));
for (String line = in.readLine(); line != null; line = in.readLine()) {
try {
line = line.toLowerCase(Locale.ROOT);
StringTokenizer st = new StringTokenizer(line);
String stem = st.nextToken();
System.out.println(stem + " -a");
while (st.hasMoreTokens()) {
String token = st.nextToken();
if (token.equals(stem) == false) {
System.out.println(stem + " " + diff.exec(token, stem));
}
String charset = System.getProperty("egothor.stemmer.charset", "UTF-8");
in = new LineNumberReader(new BufferedReader(new InputStreamReader(new FileInputStream(args[i]), charset)));
for (String line = in.readLine(); line != null; line = in.readLine()) {
try {
line = line.toLowerCase(Locale.ROOT);
StringTokenizer st = new StringTokenizer(line);
String stem = st.nextToken();
System.out.println(stem + " -a");
while (st.hasMoreTokens()) {
String token = st.nextToken();
if (token.equals(stem) == false) {
System.out.println(stem + " " + diff.exec(token, stem));
}
} catch (java.util.NoSuchElementException x) {
// no base token (stem) on a line
}
} catch (java.util.NoSuchElementException x) {
// no base token (stem) on a line
}
} catch (IOException x) {
x.printStackTrace();
}
}
}

View File

@ -57,6 +57,7 @@ package org.egothor.stemmer;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.List;
@ -200,9 +201,9 @@ public class MultiTrie extends Trie {
* @param prefix the desired prefix
*/
@Override
public void printInfo(CharSequence prefix) {
public void printInfo(PrintStream out, CharSequence prefix) {
int c = 0;
for (Trie trie : tries)
trie.printInfo(prefix + "[" + (++c) + "] ");
trie.printInfo(out, prefix + "[" + (++c) + "] ");
}
}

View File

@ -57,6 +57,7 @@ package org.egothor.stemmer;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.io.PrintStream;
import java.util.Iterator;
import java.util.TreeMap;
@ -292,15 +293,15 @@ public class Row {
}
/**
* Write the contents of this Row to stdout.
* Write the contents of this Row to the printstream.
*/
public void print() {
public void print(PrintStream out) {
for (Iterator<Character> i = cells.keySet().iterator(); i.hasNext();) {
Character ch = i.next();
Cell c = at(ch);
System.out.print("[" + ch + ":" + c + "]");
out.print("[" + ch + ":" + c + "]");
}
System.out.println();
out.println();
}
Cell at(Character index) {

View File

@ -57,6 +57,7 @@ package org.egothor.stemmer;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.List;
@ -375,8 +376,8 @@ public class Trie {
return by.optimize(this);
}
public void printInfo(CharSequence prefix) {
System.out.println(prefix + "nds " + rows.size() + " cmds " + cmds.size()
public void printInfo(PrintStream out, CharSequence prefix) {
out.println(prefix + "nds " + rows.size() + " cmds " + cmds.size()
+ " cells " + getCells() + " valcells " + getCellsVal() + " pntcells "
+ getCellsPnt());
}

View File

@ -170,7 +170,7 @@
</target>
<!-- Validation (license/notice/api checks). -->
<target name="validate" depends="check-licenses,check-forbidden-apis" description="Validate stuff." />
<target name="validate" depends="check-licenses,check-forbidden-apis,check-system-out" description="Validate stuff." />
<target name="check-licenses" depends="compile-tools,resolve,load-custom-tasks" description="Validate license stuff.">
<license-check-macro dir="${basedir}" />
@ -186,6 +186,41 @@
</forbidden-apis>
</target>
<target name="check-system-out" depends="compile-tools,compile-test,load-custom-tasks" description="Check for use of System.out, etc in non-test code.">
<forbidden-apis>
<apiFileSet dir="${custom-tasks.dir}/forbiddenApis">
<include name="system-out.txt" />
</apiFileSet>
<fileset dir="${basedir}/build">
<include name="**/classes/java/**/*.class"/>
<!-- this is basically tests -->
<exclude name="test-framework/**"/>
<!-- exclude command line tools -->
<exclude name="core/classes/java/org/apache/lucene/index/CheckIndex.class"/>
<exclude name="core/classes/java/org/apache/lucene/index/IndexUpgrader.class"/>
<exclude name="core/classes/java/org/apache/lucene/store/LockVerifyServer.class"/>
<exclude name="core/classes/java/org/apache/lucene/store/LockStressTest.class"/>
<exclude name="analysis/stempel/classes/java/org/egothor/stemmer/Compile.class"/>
<exclude name="analysis/stempel/classes/java/org/egothor/stemmer/DiffIt.class"/>
<exclude name="benchmark/**"/>
<exclude name="demo/classes/java/org/apache/lucene/demo/IndexFiles.class"/>
<exclude name="demo/classes/java/org/apache/lucene/demo/SearchFiles.class"/>
<exclude name="misc/classes/java/org/apache/lucene/index/CompoundFileExtractor.class"/>
<exclude name="misc/classes/java/org/apache/lucene/index/IndexSplitter.class"/>
<exclude name="misc/classes/java/org/apache/lucene/index/MultiPassIndexSplitter.class"/>
<exclude name="misc/classes/java/org/apache/lucene/misc/GetTermInfo.class"/>
<exclude name="misc/classes/java/org/apache/lucene/misc/HighFreqTerms.class"/>
<exclude name="misc/classes/java/org/apache/lucene/misc/IndexMergeTool.class"/>
<!-- not printing, just checking if its a system stream -->
<exclude name="core/classes/java/org/apache/lucene/util/PrintStreamInfoStream.class"/>
<!-- not printing, just generated code from javacc -->
<exclude name="queryparser/classes/java/org/apache/lucene/queryparser/classic/QueryParserTokenManager.class"/>
<exclude name="queryparser/classes/java/org/apache/lucene/queryparser/flexible/standard/parser/StandardSyntaxParserTokenManager.class"/>
<exclude name="queryparser/classes/java/org/apache/lucene/queryparser/surround/parser/QueryParserTokenManager.class"/>
</fileset>
</forbidden-apis>
</target>
<target name="resolve">
<sequential>
<ant dir="test-framework" target="resolve" inheritall="false">

View File

@ -1958,11 +1958,11 @@ public class BlockTreeTermsReader extends FieldsProducer {
}
@SuppressWarnings("unused")
private void printSeekState() throws IOException {
private void printSeekState(PrintStream out) throws IOException {
if (currentFrame == staticFrame) {
System.out.println(" no prior seek");
out.println(" no prior seek");
} else {
System.out.println(" prior seek state:");
out.println(" prior seek state:");
int ord = 0;
boolean isSeekFrame = true;
while(true) {
@ -1970,26 +1970,26 @@ public class BlockTreeTermsReader extends FieldsProducer {
assert f != null;
final BytesRef prefix = new BytesRef(term.bytes, 0, f.prefix);
if (f.nextEnt == -1) {
System.out.println(" frame " + (isSeekFrame ? "(seek)" : "(next)") + " ord=" + ord + " fp=" + f.fp + (f.isFloor ? (" (fpOrig=" + f.fpOrig + ")") : "") + " prefixLen=" + f.prefix + " prefix=" + prefix + (f.nextEnt == -1 ? "" : (" (of " + f.entCount + ")")) + " hasTerms=" + f.hasTerms + " isFloor=" + f.isFloor + " code=" + ((f.fp<<BlockTreeTermsWriter.OUTPUT_FLAGS_NUM_BITS) + (f.hasTerms ? BlockTreeTermsWriter.OUTPUT_FLAG_HAS_TERMS:0) + (f.isFloor ? BlockTreeTermsWriter.OUTPUT_FLAG_IS_FLOOR:0)) + " isLastInFloor=" + f.isLastInFloor + " mdUpto=" + f.metaDataUpto + " tbOrd=" + f.getTermBlockOrd());
out.println(" frame " + (isSeekFrame ? "(seek)" : "(next)") + " ord=" + ord + " fp=" + f.fp + (f.isFloor ? (" (fpOrig=" + f.fpOrig + ")") : "") + " prefixLen=" + f.prefix + " prefix=" + prefix + (f.nextEnt == -1 ? "" : (" (of " + f.entCount + ")")) + " hasTerms=" + f.hasTerms + " isFloor=" + f.isFloor + " code=" + ((f.fp<<BlockTreeTermsWriter.OUTPUT_FLAGS_NUM_BITS) + (f.hasTerms ? BlockTreeTermsWriter.OUTPUT_FLAG_HAS_TERMS:0) + (f.isFloor ? BlockTreeTermsWriter.OUTPUT_FLAG_IS_FLOOR:0)) + " isLastInFloor=" + f.isLastInFloor + " mdUpto=" + f.metaDataUpto + " tbOrd=" + f.getTermBlockOrd());
} else {
System.out.println(" frame " + (isSeekFrame ? "(seek, loaded)" : "(next, loaded)") + " ord=" + ord + " fp=" + f.fp + (f.isFloor ? (" (fpOrig=" + f.fpOrig + ")") : "") + " prefixLen=" + f.prefix + " prefix=" + prefix + " nextEnt=" + f.nextEnt + (f.nextEnt == -1 ? "" : (" (of " + f.entCount + ")")) + " hasTerms=" + f.hasTerms + " isFloor=" + f.isFloor + " code=" + ((f.fp<<BlockTreeTermsWriter.OUTPUT_FLAGS_NUM_BITS) + (f.hasTerms ? BlockTreeTermsWriter.OUTPUT_FLAG_HAS_TERMS:0) + (f.isFloor ? BlockTreeTermsWriter.OUTPUT_FLAG_IS_FLOOR:0)) + " lastSubFP=" + f.lastSubFP + " isLastInFloor=" + f.isLastInFloor + " mdUpto=" + f.metaDataUpto + " tbOrd=" + f.getTermBlockOrd());
out.println(" frame " + (isSeekFrame ? "(seek, loaded)" : "(next, loaded)") + " ord=" + ord + " fp=" + f.fp + (f.isFloor ? (" (fpOrig=" + f.fpOrig + ")") : "") + " prefixLen=" + f.prefix + " prefix=" + prefix + " nextEnt=" + f.nextEnt + (f.nextEnt == -1 ? "" : (" (of " + f.entCount + ")")) + " hasTerms=" + f.hasTerms + " isFloor=" + f.isFloor + " code=" + ((f.fp<<BlockTreeTermsWriter.OUTPUT_FLAGS_NUM_BITS) + (f.hasTerms ? BlockTreeTermsWriter.OUTPUT_FLAG_HAS_TERMS:0) + (f.isFloor ? BlockTreeTermsWriter.OUTPUT_FLAG_IS_FLOOR:0)) + " lastSubFP=" + f.lastSubFP + " isLastInFloor=" + f.isLastInFloor + " mdUpto=" + f.metaDataUpto + " tbOrd=" + f.getTermBlockOrd());
}
if (index != null) {
assert !isSeekFrame || f.arc != null: "isSeekFrame=" + isSeekFrame + " f.arc=" + f.arc;
if (f.prefix > 0 && isSeekFrame && f.arc.label != (term.bytes[f.prefix-1]&0xFF)) {
System.out.println(" broken seek state: arc.label=" + (char) f.arc.label + " vs term byte=" + (char) (term.bytes[f.prefix-1]&0xFF));
out.println(" broken seek state: arc.label=" + (char) f.arc.label + " vs term byte=" + (char) (term.bytes[f.prefix-1]&0xFF));
throw new RuntimeException("seek state is broken");
}
BytesRef output = Util.get(index, prefix);
if (output == null) {
System.out.println(" broken seek state: prefix is not final in index");
out.println(" broken seek state: prefix is not final in index");
throw new RuntimeException("seek state is broken");
} else if (isSeekFrame && !f.isFloor) {
final ByteArrayDataInput reader = new ByteArrayDataInput(output.bytes, output.offset, output.length);
final long codeOrig = reader.readVLong();
final long code = (f.fp << BlockTreeTermsWriter.OUTPUT_FLAGS_NUM_BITS) | (f.hasTerms ? BlockTreeTermsWriter.OUTPUT_FLAG_HAS_TERMS:0) | (f.isFloor ? BlockTreeTermsWriter.OUTPUT_FLAG_IS_FLOOR:0);
if (codeOrig != code) {
System.out.println(" broken seek state: output code=" + codeOrig + " doesn't match frame code=" + code);
out.println(" broken seek state: output code=" + codeOrig + " doesn't match frame code=" + code);
throw new RuntimeException("seek state is broken");
}
}

View File

@ -113,7 +113,7 @@ public final class PulsingPostingsWriter extends PostingsWriterBase {
@Override
public void startTerm() {
if (DEBUG) System.out.println("PW startTerm");
//if (DEBUG) System.out.println("PW startTerm");
assert pendingCount == 0;
}
@ -125,7 +125,7 @@ public final class PulsingPostingsWriter extends PostingsWriterBase {
@Override
public void setField(FieldInfo fieldInfo) {
this.indexOptions = fieldInfo.getIndexOptions();
if (DEBUG) System.out.println("PW field=" + fieldInfo.name + " indexOptions=" + indexOptions);
//if (DEBUG) System.out.println("PW field=" + fieldInfo.name + " indexOptions=" + indexOptions);
storePayloads = fieldInfo.hasPayloads();
wrappedPostingsWriter.setField(fieldInfo);
//DEBUG = BlockTreeTermsWriter.DEBUG;
@ -147,11 +147,11 @@ public final class PulsingPostingsWriter extends PostingsWriterBase {
}
*/
if (DEBUG) System.out.println("PW doc=" + docID);
//if (DEBUG) System.out.println("PW doc=" + docID);
if (pendingCount == pending.length) {
push();
if (DEBUG) System.out.println("PW: wrapped.finishDoc");
//if (DEBUG) System.out.println("PW: wrapped.finishDoc");
wrappedPostingsWriter.finishDoc();
}
@ -177,7 +177,7 @@ public final class PulsingPostingsWriter extends PostingsWriterBase {
@Override
public void addPosition(int position, BytesRef payload, int startOffset, int endOffset) throws IOException {
if (DEBUG) System.out.println("PW pos=" + position + " payload=" + (payload == null ? "null" : payload.length + " bytes"));
//if (DEBUG) System.out.println("PW pos=" + position + " payload=" + (payload == null ? "null" : payload.length + " bytes"));
if (pendingCount == pending.length) {
push();
}
@ -207,7 +207,7 @@ public final class PulsingPostingsWriter extends PostingsWriterBase {
@Override
public void finishDoc() throws IOException {
if (DEBUG) System.out.println("PW finishDoc");
// if (DEBUG) System.out.println("PW finishDoc");
if (pendingCount == -1) {
wrappedPostingsWriter.finishDoc();
}
@ -220,7 +220,7 @@ public final class PulsingPostingsWriter extends PostingsWriterBase {
/** Called when we are done adding docs to this term */
@Override
public void finishTerm(TermStats stats) throws IOException {
if (DEBUG) System.out.println("PW finishTerm docCount=" + stats.docFreq + " pendingCount=" + pendingCount + " pendingTerms.size()=" + pendingTerms.size());
// if (DEBUG) System.out.println("PW finishTerm docCount=" + stats.docFreq + " pendingCount=" + pendingCount + " pendingTerms.size()=" + pendingTerms.size());
assert pendingCount > 0 || pendingCount == -1;
@ -252,7 +252,7 @@ public final class PulsingPostingsWriter extends PostingsWriterBase {
final int delta = doc.docID - lastDocID;
lastDocID = doc.docID;
if (DEBUG) System.out.println(" write doc=" + doc.docID + " freq=" + doc.termFreq);
// if (DEBUG) System.out.println(" write doc=" + doc.docID + " freq=" + doc.termFreq);
if (doc.termFreq == 1) {
buffer.writeVInt((delta<<1)|1);
@ -268,7 +268,7 @@ public final class PulsingPostingsWriter extends PostingsWriterBase {
assert pos.docID == doc.docID;
final int posDelta = pos.pos - lastPos;
lastPos = pos.pos;
if (DEBUG) System.out.println(" write pos=" + pos.pos);
// if (DEBUG) System.out.println(" write pos=" + pos.pos);
final int payloadLength = pos.payload == null ? 0 : pos.payload.length;
if (storePayloads) {
if (payloadLength != lastPayloadLength) {
@ -341,7 +341,7 @@ public final class PulsingPostingsWriter extends PostingsWriterBase {
@Override
public void flushTermsBlock(int start, int count) throws IOException {
if (DEBUG) System.out.println("PW: flushTermsBlock start=" + start + " count=" + count + " pendingTerms.size()=" + pendingTerms.size());
// if (DEBUG) System.out.println("PW: flushTermsBlock start=" + start + " count=" + count + " pendingTerms.size()=" + pendingTerms.size());
int wrappedCount = 0;
assert buffer.getFilePointer() == 0;
assert start >= count;
@ -375,7 +375,7 @@ public final class PulsingPostingsWriter extends PostingsWriterBase {
// Remove the terms we just wrote:
pendingTerms.subList(pendingTerms.size()-start, limit).clear();
if (DEBUG) System.out.println("PW: len=" + buffer.getFilePointer() + " fp=" + termsOut.getFilePointer() + " futureWrappedCount=" + futureWrappedCount + " wrappedCount=" + wrappedCount);
// if (DEBUG) System.out.println("PW: len=" + buffer.getFilePointer() + " fp=" + termsOut.getFilePointer() + " futureWrappedCount=" + futureWrappedCount + " wrappedCount=" + wrappedCount);
// TODO: can we avoid calling this if all terms
// were inlined...? Eg for a "primary key" field, the
// wrapped codec is never invoked...
@ -384,7 +384,7 @@ public final class PulsingPostingsWriter extends PostingsWriterBase {
// Pushes pending positions to the wrapped codec
private void push() throws IOException {
if (DEBUG) System.out.println("PW now push @ " + pendingCount + " wrapped=" + wrappedPostingsWriter);
// if (DEBUG) System.out.println("PW now push @ " + pendingCount + " wrapped=" + wrappedPostingsWriter);
assert pendingCount == pending.length;
wrappedPostingsWriter.startTerm();
@ -395,17 +395,17 @@ public final class PulsingPostingsWriter extends PostingsWriterBase {
for(Position pos : pending) {
if (doc == null) {
doc = pos;
if (DEBUG) System.out.println("PW: wrapped.startDoc docID=" + doc.docID + " tf=" + doc.termFreq);
// if (DEBUG) System.out.println("PW: wrapped.startDoc docID=" + doc.docID + " tf=" + doc.termFreq);
wrappedPostingsWriter.startDoc(doc.docID, doc.termFreq);
} else if (doc.docID != pos.docID) {
assert pos.docID > doc.docID;
if (DEBUG) System.out.println("PW: wrapped.finishDoc");
// if (DEBUG) System.out.println("PW: wrapped.finishDoc");
wrappedPostingsWriter.finishDoc();
doc = pos;
if (DEBUG) System.out.println("PW: wrapped.startDoc docID=" + doc.docID + " tf=" + doc.termFreq);
// if (DEBUG) System.out.println("PW: wrapped.startDoc docID=" + doc.docID + " tf=" + doc.termFreq);
wrappedPostingsWriter.startDoc(doc.docID, doc.termFreq);
}
if (DEBUG) System.out.println("PW: wrapped.addPos pos=" + pos.pos);
// if (DEBUG) System.out.println("PW: wrapped.addPos pos=" + pos.pos);
wrappedPostingsWriter.addPosition(pos.pos, pos.payload, pos.startOffset, pos.endOffset);
}
//wrappedPostingsWriter.finishDoc();

View File

@ -389,10 +389,8 @@ public class ArrayHashMap<K,V> implements Iterable<V> {
/** Prints the baseHash array, used for debugging purposes. */
@SuppressWarnings("unused")
private void printBaseHash() {
for (int i : baseHash) {
System.out.println(i + ".\t" + i);
}
private String getBaseHashAsString() {
return Arrays.toString(this.baseHash);
}
/**

View File

@ -462,10 +462,8 @@ public class FloatToObjectMap<T> implements Iterable<T> {
* Prints the baseHash array, used for DEBUG purposes.
*/
@SuppressWarnings("unused")
private void printBaseHash() {
for (int i = 0; i < this.baseHash.length; i++) {
System.out.println(i + ".\t" + baseHash[i]);
}
private String getBaseHashAsString() {
return Arrays.toString(this.baseHash);
}
/**

View File

@ -389,12 +389,9 @@ public class IntHashSet {
/**
* Prints the baseHash array, used for debug purposes.
*/
public void printBaseHash() {
for (int i = 0; i < this.baseHash.length; i++) {
if (baseHash[i] != 0) {
System.out.println(i + ".\t" + baseHash[i]);
}
}
@SuppressWarnings("unused")
private String getBaseHashAsString() {
return Arrays.toString(this.baseHash);
}
/**

View File

@ -461,10 +461,8 @@ public class IntToDoubleMap {
* Prints the baseHash array, used for debug purposes.
*/
@SuppressWarnings("unused")
private void printBaseHash() {
for (int i = 0; i < this.baseHash.length; i++) {
System.out.println(i + ".\t" + baseHash[i]);
}
private String getBaseHashAsString() {
return Arrays.toString(this.baseHash);
}
/**

View File

@ -458,10 +458,8 @@ public class IntToIntMap {
* Prints the baseHash array, used for debug purposes.
*/
@SuppressWarnings("unused")
private void printBaseHash() {
for (int i = 0; i < this.baseHash.length; i++) {
System.out.println(i + ".\t" + baseHash[i]);
}
private String getBaseHashAsString() {
return Arrays.toString(this.baseHash);
}
/**

View File

@ -462,10 +462,8 @@ public class IntToObjectMap<T> implements Iterable<T> {
* Prints the baseHash array, used for debug purposes.
*/
@SuppressWarnings("unused")
private void printBaseHash() {
for (int i = 0; i < this.baseHash.length; i++) {
System.out.println(i + ".\t" + baseHash[i]);
}
private String getBaseHashAsString() {
return Arrays.toString(baseHash);
}
/**

View File

@ -463,10 +463,8 @@ public class ObjectToFloatMap<K> {
* Prints the baseHash array, used for debug purposes.
*/
@SuppressWarnings("unused")
private void printBaseHash() {
for (int i = 0; i < this.baseHash.length; i++) {
System.out.println(i + ".\t" + baseHash[i]);
}
private String getBaseHashAsString() {
return Arrays.toString(baseHash);
}
/**

View File

@ -462,10 +462,8 @@ public class ObjectToIntMap<K> {
* Prints the baseHash array, used for debug purposes.
*/
@SuppressWarnings("unused")
private void printBaseHash() {
for (int i = 0; i < this.baseHash.length; i++) {
System.out.println(i + ".\t" + baseHash[i]);
}
private String getBaseHashAsString() {
return Arrays.toString(baseHash);
}
/**

View File

@ -174,13 +174,13 @@ public class NLS {
Locale.getDefault());
if (resourceBundle != null) {
Object obj = resourceBundle.getObject(key);
if (obj == null)
System.err.println("WARN: Message with key:" + key + " and locale: "
+ Locale.getDefault() + " not found.");
//if (obj == null)
// System.err.println("WARN: Message with key:" + key + " and locale: "
// + Locale.getDefault() + " not found.");
}
} catch (MissingResourceException e) {
System.err.println("WARN: Message with key:" + key + " and locale: "
+ Locale.getDefault() + " not found.");
//System.err.println("WARN: Message with key:" + key + " and locale: "
// + Locale.getDefault() + " not found.");
} catch (Throwable e) {
// ignore all other errors and exceptions
// since this code is just a test to see if the message is present on the

View File

@ -60,7 +60,7 @@ public class FieldsQuery extends SrndQuery { /* mostly untested */
OrQuery oq = new OrQuery(queries,
true /* infix OR for field names */,
OrOperatorName);
System.out.println(getClass().toString() + ", fields expanded: " + oq.toString()); /* needs testing */
// System.out.println(getClass().toString() + ", fields expanded: " + oq.toString()); /* needs testing */
return oq.makeLuceneQueryField(null, qf);
}
}

View File

@ -24,6 +24,7 @@ import com.spatial4j.core.shape.Shape;
import com.spatial4j.core.shape.SpatialRelation;
import com.spatial4j.core.shape.simple.PointImpl;
import java.io.PrintStream;
import java.text.NumberFormat;
import java.util.ArrayList;
import java.util.Collection;
@ -107,14 +108,14 @@ public class QuadPrefixTree extends SpatialPrefixTree {
this(ctx, ctx.getWorldBounds(), maxLevels);
}
public void printInfo() {
public void printInfo(PrintStream out) {
NumberFormat nf = NumberFormat.getNumberInstance(Locale.ROOT);
nf.setMaximumFractionDigits(5);
nf.setMinimumFractionDigits(5);
nf.setMinimumIntegerDigits(3);
for (int i = 0; i < maxLevels; i++) {
System.out.println(i + "]\t" + nf.format(levelW[i]) + "\t" + nf.format(levelH[i]) + "\t" +
out.println(i + "]\t" + nf.format(levelW[i]) + "\t" + nf.format(levelH[i]) + "\t" +
levelS[i] + "\t" + (levelS[i] * levelS[i]));
}
}

View File

@ -0,0 +1,7 @@
# Don't allow System.out/System.err usage in non-test Lucene code
java.lang.System#out
java.lang.System#err
java.lang.Thread#dumpStack()
# eclipse autogenerated stubs!
java.lang.Throwable#printStackTrace()