Cleanup imports of forbidden stuff. Fix small bug with empty tokenstreams.

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1651363 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Uwe Schindler 2015-01-13 14:47:50 +00:00
parent 847cd8aa59
commit 8c86381eab
2 changed files with 1 additions and 6 deletions

View File

@ -17,11 +17,8 @@ package org.apache.lucene.analysis.util;
* limitations under the License.
*/
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
@ -29,8 +26,6 @@ import java.nio.file.Path;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.TestUtil;
public class TestFilesystemResourceLoader extends LuceneTestCase {

View File

@ -260,7 +260,7 @@ public abstract class BaseTokenStreamTestCase extends LuceneTestCase {
}
if (ts.incrementToken()) {
fail("TokenStream has more tokens than expected (expected count=" + output.length + "); extra token=" + termAtt.toString());
fail("TokenStream has more tokens than expected (expected count=" + output.length + "); extra token=" + termAtt);
}
// repeat our extra safety checks for end()