missing svn:eol-style

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1097216 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Robert Muir 2011-04-27 19:40:18 +00:00
parent 9accd70058
commit 1f67321074
11 changed files with 687 additions and 496 deletions

View File

@ -1 +1,57 @@
package org.apache.lucene.search.highlight; /** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import org.apache.lucene.analysis.TokenFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; /** * This TokenFilter limits the number of tokens while indexing by adding up the * current offset. */ public final class OffsetLimitTokenFilter extends TokenFilter { private int offsetCount; private OffsetAttribute offsetAttrib = getAttribute(OffsetAttribute.class); private int offsetLimit; public OffsetLimitTokenFilter(TokenStream input, int offsetLimit) { super(input); this.offsetLimit = offsetLimit; } @Override public boolean incrementToken() throws IOException { if (offsetCount < offsetLimit && input.incrementToken()) { int offsetLength = offsetAttrib.endOffset() - offsetAttrib.startOffset(); offsetCount += offsetLength; return true; } return false; } @Override public void reset() throws IOException { super.reset(); offsetCount = 0; } }
package org.apache.lucene.search.highlight;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import org.apache.lucene.analysis.TokenFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
/**
* This TokenFilter limits the number of tokens while indexing by adding up the
* current offset.
*/
public final class OffsetLimitTokenFilter extends TokenFilter {
private int offsetCount;
private OffsetAttribute offsetAttrib = getAttribute(OffsetAttribute.class);
private int offsetLimit;
public OffsetLimitTokenFilter(TokenStream input, int offsetLimit) {
super(input);
this.offsetLimit = offsetLimit;
}
@Override
public boolean incrementToken() throws IOException {
if (offsetCount < offsetLimit && input.incrementToken()) {
int offsetLength = offsetAttrib.endOffset() - offsetAttrib.startOffset();
offsetCount += offsetLength;
return true;
}
return false;
}
@Override
public void reset() throws IOException {
super.reset();
offsetCount = 0;
}
}

View File

@ -1 +1,60 @@
package org.apache.lucene.search.highlight; /** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.Reader; import java.io.StringReader; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.BaseTokenStreamTestCase; import org.apache.lucene.analysis.MockTokenizer; import org.apache.lucene.analysis.TokenStream; public class OffsetLimitTokenFilterTest extends BaseTokenStreamTestCase { public void testFilter() throws Exception { TokenStream stream = new MockTokenizer(new StringReader( "short toolong evenmuchlongertext a ab toolong foo"), MockTokenizer.WHITESPACE, false); OffsetLimitTokenFilter filter = new OffsetLimitTokenFilter(stream, 10); assertTokenStreamContents(filter, new String[] {"short", "toolong"}); stream = new MockTokenizer(new StringReader( "short toolong evenmuchlongertext a ab toolong foo"), MockTokenizer.WHITESPACE, false); filter = new OffsetLimitTokenFilter(stream, 12); assertTokenStreamContents(filter, new String[] {"short", "toolong"}); stream = new MockTokenizer(new StringReader( "short toolong evenmuchlongertext a ab toolong foo"), MockTokenizer.WHITESPACE, false); filter = new OffsetLimitTokenFilter(stream, 30); assertTokenStreamContents(filter, new String[] {"short", "toolong", "evenmuchlongertext"}); checkOneTermReuse(new Analyzer() { @Override public TokenStream tokenStream(String fieldName, Reader reader) { return new OffsetLimitTokenFilter(new MockTokenizer(reader, MockTokenizer.WHITESPACE, false), 10); } }, "llenges", "llenges"); } }
package org.apache.lucene.search.highlight;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.Reader;
import java.io.StringReader;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.BaseTokenStreamTestCase;
import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.analysis.TokenStream;
public class OffsetLimitTokenFilterTest extends BaseTokenStreamTestCase {
public void testFilter() throws Exception {
TokenStream stream = new MockTokenizer(new StringReader(
"short toolong evenmuchlongertext a ab toolong foo"),
MockTokenizer.WHITESPACE, false);
OffsetLimitTokenFilter filter = new OffsetLimitTokenFilter(stream, 10);
assertTokenStreamContents(filter, new String[] {"short", "toolong"});
stream = new MockTokenizer(new StringReader(
"short toolong evenmuchlongertext a ab toolong foo"),
MockTokenizer.WHITESPACE, false);
filter = new OffsetLimitTokenFilter(stream, 12);
assertTokenStreamContents(filter, new String[] {"short", "toolong"});
stream = new MockTokenizer(new StringReader(
"short toolong evenmuchlongertext a ab toolong foo"),
MockTokenizer.WHITESPACE, false);
filter = new OffsetLimitTokenFilter(stream, 30);
assertTokenStreamContents(filter, new String[] {"short", "toolong",
"evenmuchlongertext"});
checkOneTermReuse(new Analyzer() {
@Override
public TokenStream tokenStream(String fieldName, Reader reader) {
return new OffsetLimitTokenFilter(new MockTokenizer(reader,
MockTokenizer.WHITESPACE, false), 10);
}
}, "llenges", "llenges");
}
}

View File

@ -1 +1,77 @@
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.update; import java.util.HashMap; import org.apache.solr.common.params.MapSolrParams; import org.apache.solr.common.params.UpdateParams; import org.apache.solr.core.*; import org.apache.solr.handler.XmlUpdateRequestHandler; import org.apache.solr.request.SolrQueryRequestBase; import org.apache.solr.response.SolrQueryResponse; import org.apache.solr.util.AbstractSolrTestCase; public class UpdateParamsTest extends AbstractSolrTestCase { @Override public String getSchemaFile() { return "schema.xml"; } @Override public String getSolrConfigFile() { return "solrconfig.xml"; } /** * Tests that both update.chain and update.processor works * NOTE: This test will fail when support for update.processor is removed and should then be removed */ public void testUpdateProcessorParamDeprecation() throws Exception { SolrCore core = h.getCore(); XmlUpdateRequestHandler handler = new XmlUpdateRequestHandler(); handler.init( null ); MapSolrParams params = new MapSolrParams( new HashMap<String, String>() ); params.getMap().put(UpdateParams.UPDATE_CHAIN_DEPRECATED, "nonexistant"); // Add a single document SolrQueryResponse rsp = new SolrQueryResponse(); SolrQueryRequestBase req = new SolrQueryRequestBase( core, params ) {}; // First check that the old param behaves as it should try { handler.handleRequestBody(req, rsp); assertFalse("Faulty update.processor parameter (deprecated but should work) not causing an error - i.e. it is not detected", true); } catch (Exception e) { assertEquals("Got wrong exception while testing update.chain", e.getMessage(), "unknown UpdateRequestProcessorChain: nonexistant"); } // Then check that the new param behaves correctly params.getMap().remove(UpdateParams.UPDATE_CHAIN_DEPRECATED); params.getMap().put(UpdateParams.UPDATE_CHAIN, "nonexistant"); req.setParams(params); try { handler.handleRequestBody(req, rsp); assertFalse("Faulty update.chain parameter not causing an error - i.e. it is not detected", true); } catch (Exception e) { assertEquals("Got wrong exception while testing update.chain", e.getMessage(), "unknown UpdateRequestProcessorChain: nonexistant"); } } }
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.update;
import java.util.HashMap;
import org.apache.solr.common.params.MapSolrParams;
import org.apache.solr.common.params.UpdateParams;
import org.apache.solr.core.*;
import org.apache.solr.handler.XmlUpdateRequestHandler;
import org.apache.solr.request.SolrQueryRequestBase;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.util.AbstractSolrTestCase;
public class UpdateParamsTest extends AbstractSolrTestCase {
@Override
public String getSchemaFile() { return "schema.xml"; }
@Override
public String getSolrConfigFile() { return "solrconfig.xml"; }
/**
* Tests that both update.chain and update.processor works
* NOTE: This test will fail when support for update.processor is removed and should then be removed
*/
public void testUpdateProcessorParamDeprecation() throws Exception {
SolrCore core = h.getCore();
XmlUpdateRequestHandler handler = new XmlUpdateRequestHandler();
handler.init( null );
MapSolrParams params = new MapSolrParams( new HashMap<String, String>() );
params.getMap().put(UpdateParams.UPDATE_CHAIN_DEPRECATED, "nonexistant");
// Add a single document
SolrQueryResponse rsp = new SolrQueryResponse();
SolrQueryRequestBase req = new SolrQueryRequestBase( core, params ) {};
// First check that the old param behaves as it should
try {
handler.handleRequestBody(req, rsp);
assertFalse("Faulty update.processor parameter (deprecated but should work) not causing an error - i.e. it is not detected", true);
} catch (Exception e) {
assertEquals("Got wrong exception while testing update.chain", e.getMessage(), "unknown UpdateRequestProcessorChain: nonexistant");
}
// Then check that the new param behaves correctly
params.getMap().remove(UpdateParams.UPDATE_CHAIN_DEPRECATED);
params.getMap().put(UpdateParams.UPDATE_CHAIN, "nonexistant");
req.setParams(params);
try {
handler.handleRequestBody(req, rsp);
assertFalse("Faulty update.chain parameter not causing an error - i.e. it is not detected", true);
} catch (Exception e) {
assertEquals("Got wrong exception while testing update.chain", e.getMessage(), "unknown UpdateRequestProcessorChain: nonexistant");
}
}
}