lucene/contrib/benchmark/conf/tokenize.alg

37 lines
1.3 KiB
Plaintext
Raw Normal View History

#/**
# * Licensed to the Apache Software Foundation (ASF) under one or more
# * contributor license agreements. See the NOTICE file distributed with
# * this work for additional information regarding copyright ownership.
# * The ASF licenses this file to You under the Apache License, Version 2.0
# * (the "License"); you may not use this file except in compliance with
# * the License. You may obtain a copy of the License at
# *
# * http://www.apache.org/licenses/LICENSE-2.0
# *
# * Unless required by applicable law or agreed to in writing, software
# * distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
# */
# -------------------------------------------------------------------------------------
#
# This alg reads all tokens out of a document but does not index them.
# This is useful for benchmarking tokenizers.
#
# To use this, cd to contrib/benchmark and then run:
#
# ant run-task -Dtask.alg=conf/tokenize.alg
#
content.source=org.apache.lucene.benchmark.byTask.feeds.ReutersContentSource
content.source.forever=false
#
-------------------------------------------------------------------------------------
{ReadTokens > : *
RepSumByName