mirror of https://github.com/apache/lucene.git
SOLR-3619: Rename 'example' dir to 'server'
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1635666 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
eca3d5335e
commit
05ad610074
|
@ -28,10 +28,59 @@ http://lucene.apache.org/solr
|
|||
Getting Started
|
||||
---------------
|
||||
|
||||
See the "example" directory for an example Solr setup. A tutorial
|
||||
using the example setup can be found at
|
||||
To start Solr for the first time after installation, simply do:
|
||||
|
||||
bin/solr start -f
|
||||
|
||||
This will launch a Solr server in the foreground of your shell, bound
|
||||
to port 8983. Alternatively, you can launch Solr in "cloud" mode,
|
||||
which allows you to scale out using sharding and replication. To
|
||||
launch Solr in cloud mode, do:
|
||||
|
||||
bin/solr start -f -cloud
|
||||
|
||||
To see all available options for starting Solr, please do:
|
||||
|
||||
bin/solr start -help
|
||||
|
||||
After starting Solr, direct your Web browser to the Solr Admin Console at:
|
||||
|
||||
http://localhost:8983/solr/
|
||||
|
||||
To add documents to the index, use the post.jar (or post.sh script) in
|
||||
the example/exampledocs subdirectory (while Solr is running), for example:
|
||||
|
||||
cd example/exampledocs
|
||||
java -jar post.jar *.xml
|
||||
Or: sh post.sh *.xml
|
||||
|
||||
For more information about Solr examples please read...
|
||||
|
||||
* example/solr/README.txt
|
||||
For more information about the "Solr Home" and Solr specific configuration
|
||||
* http://lucene.apache.org/solr/tutorial.html
|
||||
For a Tutorial using this example configuration
|
||||
* http://wiki.apache.org/solr/SolrResources
|
||||
For a list of other tutorials and introductory articles.
|
||||
|
||||
|
||||
In addition, Solr ships with several example configurations that
|
||||
help you learn about Solr. To run one of the examples, you would do:
|
||||
|
||||
bin/solr -e <EXAMPLE> where <EXAMPLE> is one of:
|
||||
|
||||
cloud : SolrCloud example
|
||||
dih : Data Import Handler (rdbms, mail, rss, tika)
|
||||
schemaless : Schema-less example (schema is inferred from data during indexing)
|
||||
techproducts : Kitchen sink example providing comprehensive examples of Solr features
|
||||
|
||||
|
||||
A tutorial is available at:
|
||||
|
||||
http://lucene.apache.org/solr/tutorial.html
|
||||
|
||||
or linked from "docs/index.html" in a binary distribution.
|
||||
|
||||
Also, there are Solr clients for many programming languages, see
|
||||
http://wiki.apache.org/solr/IntegratingSolr
|
||||
|
||||
|
@ -39,11 +88,15 @@ Also, there are Solr clients for many programming languages, see
|
|||
Files included in an Apache Solr binary distribution
|
||||
----------------------------------------------------
|
||||
|
||||
server/
|
||||
A self-contained Solr instance, complete with a sample
|
||||
configuration and documents to index. Please see: bin/solr start -help
|
||||
for more information about starting a Solr server.
|
||||
|
||||
example/
|
||||
A self-contained example Solr instance, complete with a sample
|
||||
configuration and documents to index.
|
||||
Please see example/README.txt for information about running this
|
||||
example.
|
||||
Contains example documents and an alternative Solr home
|
||||
directory containing examples of how to use the Data Import Handler,
|
||||
see example/example-DIH/README.txt for more information.
|
||||
|
||||
dist/solr-<component>-XX.jar
|
||||
The Apache Solr libraries. To compile Apache Solr Plugins,
|
||||
|
@ -85,7 +138,7 @@ Instructions for Building Apache Solr from Source
|
|||
|
||||
NOTE:
|
||||
To see Solr in action, you may want to use the "ant example" command to build
|
||||
and package Solr into the example/webapps directory. See also example/README.txt.
|
||||
and package Solr into the server/webapps directory. See also server/README.txt.
|
||||
|
||||
|
||||
Export control
|
||||
|
|
236
solr/bin/solr
236
solr/bin/solr
|
@ -124,7 +124,7 @@ function print_usage() {
|
|||
if [ "$CMD" == "" ]; then
|
||||
echo ""
|
||||
echo "Usage: solr COMMAND OPTIONS"
|
||||
echo " where COMMAND is one of: start, stop, restart, healthcheck"
|
||||
echo " where COMMAND is one of: start, stop, restart, healthcheck, create_core, create_collection"
|
||||
echo ""
|
||||
echo " Standalone server example (start Solr running in the background on port 8984):"
|
||||
echo ""
|
||||
|
@ -168,10 +168,9 @@ function print_usage() {
|
|||
echo ""
|
||||
echo " -e <example> Name of the example to run; available examples:"
|
||||
echo " cloud: SolrCloud example"
|
||||
echo " default: Solr default example"
|
||||
echo " techproducts: Comprehensive example illustrating many of Solr's core capabilities"
|
||||
echo " dih: Data Import Handler"
|
||||
echo " schemaless: Schema-less example"
|
||||
echo " multicore: Multicore"
|
||||
echo ""
|
||||
echo " -a Additional parameters to pass to the JVM when starting Solr, such as to setup"
|
||||
echo " Java debug options. For example, to enable a Java debugger to attach to the Solr JVM"
|
||||
|
@ -204,6 +203,37 @@ function print_usage() {
|
|||
echo ""
|
||||
echo " -z <zkHost> ZooKeeper connection string; default is localhost:9983"
|
||||
echo ""
|
||||
elif [ "$CMD" == "create_core" ]; then
|
||||
echo ""
|
||||
echo "Usage: solr create_core [-n name] [-c configset]"
|
||||
echo ""
|
||||
echo " -n <name> Name of core to create"
|
||||
echo ""
|
||||
echo " -c <configset> Name of configuration directory to use, valid options are:"
|
||||
echo " basic_configs: Minimal Solr configuration"
|
||||
echo " data_driven_schema_configs: Managed schema with field-guessing support enabled"
|
||||
echo " sample_techproducts_configs: Example configuration with many optional features enabled to"
|
||||
echo " demonstrate the full power of Solr"
|
||||
echo " If not specified, default is: data_driven_schema_configs"
|
||||
echo ""
|
||||
elif [ "$CMD" == "create_collection" ]; then
|
||||
echo ""
|
||||
echo "Usage: solr create_collection [-n name] [-c configset] [-shards #] [-replicationFactor #]"
|
||||
echo ""
|
||||
echo " -n <name> Name of collection to create"
|
||||
echo ""
|
||||
echo " -c <configset> Name of configuration directory to use, valid options are:"
|
||||
echo " basic_configs: Minimal Solr configuration"
|
||||
echo " data_driven_schema_configs: Managed schema with field-guessing support enabled"
|
||||
echo " sample_techproducts_configs: Example configuration with many optional features enabled to"
|
||||
echo " demonstrate the full power of Solr"
|
||||
echo " If not specified, default is: data_driven_schema_configs"
|
||||
echo ""
|
||||
echo " -shards <#> Number of shards to split the collection into"
|
||||
echo ""
|
||||
echo " -replicationFactor <#> Number of copies of each document in the collection"
|
||||
echo ""
|
||||
echo ""
|
||||
fi
|
||||
} # end print_usage
|
||||
|
||||
|
@ -428,6 +458,123 @@ if [ "$SCRIPT_CMD" == "healthcheck" ]; then
|
|||
exit $?
|
||||
fi
|
||||
|
||||
# create a core or collection
|
||||
if [[ "$SCRIPT_CMD" == "create_core" || "$SCRIPT_CMD" == "create_collection" ]]; then
|
||||
|
||||
CREATE_TYPE=collection
|
||||
CREATE_NUM_SHARDS=1
|
||||
CREATE_REPFACT=1
|
||||
if [ "$SCRIPT_CMD" == "create_core" ]; then
|
||||
CREATE_TYPE=core
|
||||
fi
|
||||
|
||||
if [ $# -gt 0 ]; then
|
||||
while true; do
|
||||
case $1 in
|
||||
-n|-name)
|
||||
if [ "${2:0:1}" == "-" ]; then
|
||||
print_usage "$SCRIPT_CMD" "Expected $CREATE_TYPE name but found $2 instead!"
|
||||
exit 1
|
||||
fi
|
||||
CREATE_NAME=$2
|
||||
shift 2
|
||||
;;
|
||||
-c|-configset)
|
||||
if [ "${2:0:1}" == "-" ]; then
|
||||
print_usage "$SCRIPT_CMD" "Expected configset name but found $2 instead!"
|
||||
exit 1
|
||||
fi
|
||||
CREATE_CONFIGSET="$2"
|
||||
shift 2
|
||||
;;
|
||||
-shards)
|
||||
if [ "${2:0:1}" == "-" ]; then
|
||||
print_usage "$SCRIPT_CMD" "Expected shard count but found $2 instead!"
|
||||
exit 1
|
||||
fi
|
||||
CREATE_NUM_SHARDS="$2"
|
||||
shift 2
|
||||
;;
|
||||
-replicationFactor)
|
||||
if [ "${2:0:1}" == "-" ]; then
|
||||
print_usage "$SCRIPT_CMD" "Expected replication factor but found $2 instead!"
|
||||
exit 1
|
||||
fi
|
||||
CREATE_REPFACT="$2"
|
||||
shift 2
|
||||
;;
|
||||
-help|-usage)
|
||||
print_usage "$SCRIPT_CMD"
|
||||
exit 0
|
||||
;;
|
||||
--)
|
||||
shift
|
||||
break
|
||||
;;
|
||||
*)
|
||||
if [ "$1" != "" ]; then
|
||||
print_usage "$SCRIPT_CMD" "Unrecognized or misplaced argument: $1!"
|
||||
exit 1
|
||||
else
|
||||
break # out-of-args, stop looping
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
done
|
||||
fi
|
||||
|
||||
if [ "$CREATE_CONFIGSET" == "" ]; then
|
||||
CREATE_CONFIGSET=data_driven_schema_configs
|
||||
fi
|
||||
|
||||
if [ "$CREATE_NAME" == "" ]; then
|
||||
echo "$CREATE_TYPE name is required!"
|
||||
print_usage "$SCRIPT_CMD"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
for ID in `ps waux | grep java | grep start.jar | awk '{print $2}' | sort -r`
|
||||
do
|
||||
port=`jetty_port "$ID"`
|
||||
if [ "$port" != "" ]; then
|
||||
CREATE_PORT=$port
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
if [ "$CREATE_PORT" == "" ]; then
|
||||
echo "Failed to determine the port of a local Solr instance, cannot create $CREATE_TYPE!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
STATUS_INFO=`run_tool status -solr http://localhost:$CREATE_PORT/solr | tr -d ' '`
|
||||
IS_CLOUD=`echo $STATUS_INFO | grep $'"cloud":' | tr -d ' '`
|
||||
echo $IS_CLOUD
|
||||
if [ "$IS_CLOUD" != "" ]; then
|
||||
if [ "$SCRIPT_CMD" == "create_core" ]; then
|
||||
echo -e "\nERROR: Solr running on port $CREATE_PORT is running in SolrCloud mode, please use create_collection command instead.\n"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
if [ "$SCRIPT_CMD" == "create_collection" ]; then
|
||||
echo -e "\nERROR: Solr running on port $CREATE_PORT is running in standalone server mode, please use the create_core command instead\ncreate_collection can only be used when running in SolrCloud mode.\n"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$SCRIPT_CMD" == "create_collection" ]; then
|
||||
run_tool create_collection -name $CREATE_NAME -shards $CREATE_NUM_SHARDS -replicationFactor $CREATE_REPFACT \
|
||||
-config $CREATE_CONFIGSET -configsetsDir $SOLR_TIP/server/solr/configsets -solrUrl http://localhost:$CREATE_PORT/solr
|
||||
else
|
||||
CREATE_URL="http://localhost:$CREATE_PORT/solr/admin/cores?action=CREATE&name=$CREATE_NAME&configSet=$CREATE_CONFIGSET"
|
||||
echo -e "Creating new core using command:\n$CREATE_URL\n"
|
||||
run_tool api -get $CREATE_URL
|
||||
fi
|
||||
|
||||
exit $?
|
||||
fi
|
||||
|
||||
|
||||
# verify the command given is supported
|
||||
if [ "$SCRIPT_CMD" != "stop" ] && [ "$SCRIPT_CMD" != "start" ] && [ "$SCRIPT_CMD" != "restart" ]; then
|
||||
print_usage "" "$SCRIPT_CMD not supported!"
|
||||
|
@ -652,8 +799,9 @@ if [ "$EXAMPLE" != "" ]; then
|
|||
SOLR_PORT=${CLOUD_PORTS[0]}
|
||||
shift
|
||||
;;
|
||||
default)
|
||||
SOLR_HOME="$SOLR_TIP/example/solr"
|
||||
techproducts)
|
||||
SOLR_HOME="$SOLR_TIP/server/solr"
|
||||
EXAMPLE_CONFIGSET=sample_techproducts_configs
|
||||
shift
|
||||
;;
|
||||
dih)
|
||||
|
@ -661,15 +809,12 @@ if [ "$EXAMPLE" != "" ]; then
|
|||
shift
|
||||
;;
|
||||
schemaless)
|
||||
SOLR_HOME="$SOLR_TIP/example/example-schemaless/solr"
|
||||
shift
|
||||
;;
|
||||
multicore)
|
||||
SOLR_HOME="$SOLR_TIP/example/multicore"
|
||||
SOLR_HOME="$SOLR_TIP/server/solr"
|
||||
EXAMPLE_CONFIGSET=data_driven_schema_configs
|
||||
shift
|
||||
;;
|
||||
*)
|
||||
print_usage "start" "Unsupported example $EXAMPLE! Please choose one of: cloud, dih, schemaless, multicore, or default"
|
||||
print_usage "start" "Unsupported example $EXAMPLE! Please choose one of: cloud, dih, schemaless, or techproducts"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
@ -926,6 +1071,25 @@ $SOLR_HOST_ARG -Djetty.port=$SOLR_PORT \
|
|||
|
||||
if [ "$EXAMPLE" != "cloud" ]; then
|
||||
launch_solr "$FG" "$ADDITIONAL_CMD_OPTS"
|
||||
|
||||
# create the core/collection for the requested example after launching Solr
|
||||
if [[ "$EXAMPLE" == "schemaless" || "$EXAMPLE" == "techproducts" ]]; then
|
||||
if [ "$SOLR_MODE" == "solrcloud" ]; then
|
||||
run_tool create_collection -name $EXAMPLE -shards 1 -replicationFactor 1 \
|
||||
-config $EXAMPLE_CONFIGSET -configsetsDir $SOLR_TIP/server/solr/configsets -solrUrl http://localhost:$SOLR_PORT/solr
|
||||
else
|
||||
CREATE_URL="http://localhost:$SOLR_PORT/solr/admin/cores?action=CREATE&name=$EXAMPLE&configSet=$EXAMPLE_CONFIGSET"
|
||||
echo -e "Creating new core using command:\n$CREATE_URL\n"
|
||||
run_tool api -get $CREATE_URL
|
||||
fi
|
||||
|
||||
if [ "$EXAMPLE" == "techproducts" ]; then
|
||||
echo "Indexing tech product example docs from $SOLR_TIP/example/exampledocs"
|
||||
"$JAVA" -Durl=http://localhost:$SOLR_PORT/solr/$EXAMPLE/update -jar $SOLR_TIP/example/exampledocs/post.jar $SOLR_TIP/example/exampledocs/*.xml
|
||||
fi
|
||||
|
||||
echo -e "\nSolr $EXAMPLE launched successfully. Direct your Web browser to http://localhost:$SOLR_PORT/solr to visit the Solr Admin UI\n"
|
||||
fi
|
||||
else
|
||||
#
|
||||
# SolrCloud example is a bit involved so needs special handling here
|
||||
|
@ -952,8 +1116,6 @@ else
|
|||
# can't launch this node in the foreground else we can't run anymore commands
|
||||
launch_solr "false" "$ADDITIONAL_CMD_OPTS"
|
||||
|
||||
sleep 5
|
||||
|
||||
# if user did not define a specific -z parameter, assume embedded in first cloud node we launched above
|
||||
zk_host=$ZK_HOST
|
||||
if [ "$zk_host" == "" ]; then
|
||||
|
@ -977,16 +1139,7 @@ else
|
|||
if $noprompt ; then
|
||||
CLOUD_NUM_SHARDS=2
|
||||
CLOUD_REPFACT=2
|
||||
# if the new default config directory is available, then use it,
|
||||
# otherwise, use the legacy collection1 example
|
||||
# TODO: this will need to change when SOLR-3619 is resolved
|
||||
if [ -d "$SOLR_TIP/server/solr/configsets/schemaless" ]; then
|
||||
CLOUD_CONFIG_DIR=$SOLR_TIP/server/solr/configsets/schemaless
|
||||
CLOUD_CONFIG=schemaless
|
||||
else
|
||||
CLOUD_CONFIG_DIR=$SOLR_TIP/example/solr/collection1/conf
|
||||
CLOUD_CONFIG=default
|
||||
fi
|
||||
CLOUD_CONFIG=data_driven_schema_configs
|
||||
else
|
||||
echo -e "\nNow let's create a new collection for indexing documents in your $CLOUD_NUM_NODES-node cluster.\n"
|
||||
read -e -p "Please provide a name for your new collection: [gettingstarted] " USER_INPUT
|
||||
|
@ -1022,47 +1175,18 @@ else
|
|||
echo $CLOUD_REPFACT
|
||||
|
||||
USER_INPUT=
|
||||
read -e -p "Please choose a configuration for the $CLOUD_COLLECTION collection, available options are: default or schemaless [default] " USER_INPUT
|
||||
read -e -p "Please choose a configuration for the $CLOUD_COLLECTION collection, available options are:\nbasic_configs, data_driven_schema_configs, or sample_techproducts_configs [data_driven_schema_configs] " USER_INPUT
|
||||
# trim whitespace out of the user input
|
||||
CLOUD_CONFIG=`echo $USER_INPUT | tr -d ' '`
|
||||
|
||||
# handle the default selection or empty input
|
||||
if [ "$CLOUD_CONFIG" == "" ]; then
|
||||
CLOUD_CONFIG=default
|
||||
fi
|
||||
echo $CLOUD_CONFIG
|
||||
|
||||
if [ "$CLOUD_CONFIG" == "schemaless" ]; then
|
||||
if [ -d "$SOLR_TIP/server/solr/configsets/schemaless" ]; then
|
||||
CLOUD_CONFIG_DIR=$SOLR_TIP/server/solr/configsets/schemaless
|
||||
else
|
||||
CLOUD_CONFIG_DIR=$SOLR_TIP/example/example-schemaless/solr/collection1/conf
|
||||
fi
|
||||
else
|
||||
CLOUD_CONFIG_DIR=$SOLR_TIP/example/solr/collection1/conf
|
||||
CLOUD_CONFIG=data_driven_schema_configs
|
||||
fi
|
||||
fi
|
||||
|
||||
echo -e "\nDeploying default Solr configuration files to embedded ZooKeeper using command:\n"
|
||||
echo -e "$DEFAULT_SERVER_DIR/scripts/cloud-scripts/zkcli.sh -zkhost $zk_host -cmd upconfig -confdir $CLOUD_CONFIG_DIR -confname $CLOUD_CONFIG\n"
|
||||
# upload the config directory to ZooKeeper
|
||||
# Extract the solr.war if it hasn't been done already (so we can access the SolrCLI class)
|
||||
if [ ! -d "$DEFAULT_SERVER_DIR/solr-webapp/webapp" ]; then
|
||||
(mkdir -p $DEFAULT_SERVER_DIR/solr-webapp/webapp && cd $DEFAULT_SERVER_DIR/solr-webapp/webapp && jar xf $DEFAULT_SERVER_DIR/webapps/solr.war)
|
||||
fi
|
||||
$JAVA -Dlog4j.configuration=file:$DEFAULT_SERVER_DIR/scripts/cloud-scripts/log4j.properties \
|
||||
-classpath "$DEFAULT_SERVER_DIR/solr-webapp/webapp/WEB-INF/lib/*:$DEFAULT_SERVER_DIR/lib/ext/*" \
|
||||
org.apache.solr.cloud.ZkCLI -zkhost $zk_host -cmd upconfig -confdir $CLOUD_CONFIG_DIR -confname $CLOUD_CONFIG > /dev/null 2>&1
|
||||
echo -e "Successfully deployed the $CLOUD_CONFIG_DIR configuration directory to ZooKeeper as $CLOUD_CONFIG\n"
|
||||
|
||||
# note use of ceiling logic in case of remainder
|
||||
MAX_SHARDS_PER_NODE=$((($CLOUD_NUM_SHARDS*$CLOUD_REPFACT+$CLOUD_NUM_NODES-1)/$CLOUD_NUM_NODES))
|
||||
|
||||
COLLECTIONS_API=http://localhost:$SOLR_PORT/solr/admin/collections
|
||||
|
||||
CLOUD_CREATE_COLLECTION_CMD="$COLLECTIONS_API?action=CREATE&name=$CLOUD_COLLECTION&replicationFactor=$CLOUD_REPFACT&numShards=$CLOUD_NUM_SHARDS&collection.configName=$CLOUD_CONFIG&maxShardsPerNode=$MAX_SHARDS_PER_NODE&wt=json&indent=2"
|
||||
echo -e "\n\nCreating new collection $CLOUD_COLLECTION with $CLOUD_NUM_SHARDS shards and replication factor $CLOUD_REPFACT using Collections API command:\n\n$CLOUD_CREATE_COLLECTION_CMD\n\nFor more information about the Collections API, please see: https://cwiki.apache.org/confluence/display/solr/Collections+API\n"
|
||||
curl "$CLOUD_CREATE_COLLECTION_CMD"
|
||||
run_tool create_collection -name $CLOUD_COLLECTION -shards $CLOUD_NUM_SHARDS -replicationFactor $CLOUD_REPFACT \
|
||||
-config $CLOUD_CONFIG -configsetsDir $SOLR_TIP/server/solr/configsets -solrUrl http://localhost:$SOLR_PORT/solr
|
||||
echo -e "\n\nSolrCloud example running, please visit http://localhost:$SOLR_PORT/solr \n\n"
|
||||
fi
|
||||
|
||||
|
|
|
@ -68,9 +68,20 @@ IF "%1"=="stop" goto set_script_cmd
|
|||
IF "%1"=="restart" goto set_script_cmd
|
||||
IF "%1"=="healthcheck" (
|
||||
REM healthcheck uses different arg parsing strategy
|
||||
set SCRIPT_CMD=healthcheck
|
||||
SHIFT
|
||||
goto parse_healthcheck_args
|
||||
)
|
||||
IF "%1"=="create_core" (
|
||||
set SCRIPT_CMD=create_core
|
||||
SHIFT
|
||||
goto parse_create_args
|
||||
)
|
||||
IF "%1"=="create_collection" (
|
||||
set SCRIPT_CMD=create_collection
|
||||
SHIFT
|
||||
goto parse_create_args
|
||||
)
|
||||
goto parse_args
|
||||
|
||||
:usage
|
||||
|
@ -82,12 +93,15 @@ IF "%FIRST_ARG%"=="/?" goto script_usage
|
|||
IF "%SCRIPT_CMD%"=="start" goto start_usage
|
||||
IF "%SCRIPT_CMD%"=="restart" goto start_usage
|
||||
IF "%SCRIPT_CMD%"=="stop" goto stop_usage
|
||||
IF "%SCRIPT_CMD%"=="healthcheck" goto healthcheck_usage
|
||||
IF "%SCRIPT_CMD%"=="create_core" goto create_core_usage
|
||||
IF "%SCRIPT_CMD%"=="create_collection" goto create_collection_usage
|
||||
goto done
|
||||
|
||||
:script_usage
|
||||
@echo.
|
||||
@echo Usage: solr COMMAND OPTIONS
|
||||
@echo where COMMAND is one of: start, stop, restart, healthcheck
|
||||
@echo where COMMAND is one of: start, stop, restart, healthcheck, create_core, create_collection
|
||||
@echo.
|
||||
@echo Standalone server example (start Solr running in the background on port 8984):
|
||||
@echo.
|
||||
|
@ -133,10 +147,9 @@ goto done
|
|||
@echo.
|
||||
@echo -e example Name of the example to run; available examples:
|
||||
@echo cloud: SolrCloud example
|
||||
@echo default: Solr default example
|
||||
@echo techproducts: Comprehensive example illustrating many of Solr's core capabilities
|
||||
@echo dih: Data Import Handler
|
||||
@echo schemaless: Schema-less example
|
||||
@echo multicore: Multicore
|
||||
@echo.
|
||||
@echo -a opts Additional parameters to pass to the JVM when starting Solr, such as to setup
|
||||
@echo Java debug options. For example, to enable a Java debugger to attach to the Solr JVM
|
||||
|
@ -157,9 +170,9 @@ goto done
|
|||
@echo.
|
||||
@echo -p port Specify the port to start the Solr HTTP listener on; default is 8983
|
||||
@echo.
|
||||
@echo -V Verbose messages from this script
|
||||
@echo -all Find and stop all running Solr servers on this host
|
||||
@echo.
|
||||
@echo NOTE: If port is not specified, then all running Solr servers are stopped.
|
||||
@echo -V Verbose messages from this script
|
||||
@echo.
|
||||
goto done
|
||||
|
||||
|
@ -173,6 +186,40 @@ goto done
|
|||
@echo.
|
||||
goto done
|
||||
|
||||
:create_core_usage
|
||||
echo.
|
||||
echo Usage: solr create_core [-n name] [-c configset]
|
||||
echo.
|
||||
echo -n name Name of core to create
|
||||
echo.
|
||||
echo -c configset Name of configuration directory to use, valid options are:
|
||||
echo basic_configs: Minimal Solr configuration
|
||||
echo data_driven_schema_configs: Managed schema with field-guessing support enabled
|
||||
echo sample_techproducts_configs: Example configuration with many optional features enabled to
|
||||
echo demonstrate the full power of Solr
|
||||
echo If not specified, default is: data_driven_schema_configs
|
||||
echo.
|
||||
goto done
|
||||
|
||||
:create_collection_usage
|
||||
echo.
|
||||
echo Usage: solr create_collection [-n name] [-c configset] [-shards #] [-replicationFactor #]
|
||||
echo.
|
||||
echo -n name Name of collection to create
|
||||
echo.
|
||||
echo -c configset Name of configuration directory to use, valid options are:
|
||||
echo basic_configs: Minimal Solr configuration
|
||||
echo data_driven_schema_configs: Managed schema with field-guessing support enabled
|
||||
echo sample_techproducts_configs: Example configuration with many optional features enabled to
|
||||
echo demonstrate the full power of Solr
|
||||
echo If not specified, default is: data_driven_schema_configs
|
||||
echo.
|
||||
echo -shards # Number of shards to split the collection into
|
||||
echo.
|
||||
echo -replicationFactor # Number of copies of each document in the collection
|
||||
echo.
|
||||
goto done
|
||||
|
||||
REM Really basic command-line arg parsing
|
||||
:parse_args
|
||||
IF "%SCRIPT_CMD%"=="" set SCRIPT_CMD=start
|
||||
|
@ -205,6 +252,7 @@ IF "%1"=="-addlopts" goto set_addl_opts
|
|||
IF "%1"=="-noprompt" goto set_noprompt
|
||||
IF "%1"=="-k" goto set_stop_key
|
||||
IF "%1"=="-key" goto set_stop_key
|
||||
IF "%1"=="-all" goto set_stop_all
|
||||
IF NOT "%1"=="" goto invalid_cmd_line
|
||||
|
||||
:set_script_cmd
|
||||
|
@ -320,12 +368,16 @@ IF "%firstChar%"=="-" (
|
|||
set SCRIPT_ERROR=Expected port but found %2 instead!
|
||||
goto invalid_cmd_line
|
||||
)
|
||||
|
||||
set STOP_KEY=%~2
|
||||
SHIFT
|
||||
SHIFT
|
||||
goto parse_args
|
||||
|
||||
:set_stop_all
|
||||
set STOP_ALL=1
|
||||
SHIFT
|
||||
goto parse_args
|
||||
|
||||
:set_zookeeper
|
||||
|
||||
set "arg=%~2"
|
||||
|
@ -377,17 +429,15 @@ IF NOT EXIST "%SOLR_SERVER_DIR%" (
|
|||
|
||||
IF "%EXAMPLE%"=="" (
|
||||
REM SOLR_HOME just becomes serverDir/solr
|
||||
) ELSE IF "%EXAMPLE%"=="default" (
|
||||
set "SOLR_HOME=%SOLR_TIP%\example\solr"
|
||||
) ELSE IF "%EXAMPLE%"=="techproducts" (
|
||||
set "SOLR_HOME=%SOLR_TIP%\server\solr"
|
||||
) ELSE IF "%EXAMPLE%"=="cloud" (
|
||||
set SOLR_MODE=solrcloud
|
||||
goto cloud_example_start
|
||||
) ELSE IF "%EXAMPLE%"=="dih" (
|
||||
set "SOLR_HOME=%SOLR_TIP%\example\example-DIH\solr"
|
||||
) ELSE IF "%EXAMPLE%"=="schemaless" (
|
||||
set "SOLR_HOME=%SOLR_TIP%\example\example-schemaless\solr"
|
||||
) ELSE IF "%EXAMPLE%"=="multicore" (
|
||||
set "SOLR_HOME=%SOLR_TIP%\example\multicore"
|
||||
set "SOLR_HOME=%SOLR_TIP%\server\solr"
|
||||
) ELSE (
|
||||
@echo.
|
||||
@echo 'Unrecognized example %EXAMPLE%!'
|
||||
|
@ -413,17 +463,62 @@ IF NOT EXIST "%SOLR_HOME%\solr.xml" (
|
|||
|
||||
IF "%STOP_KEY%"=="" set STOP_KEY=solrrocks
|
||||
|
||||
REM TODO stop all if no port specified as Windows doesn't seem to have a
|
||||
REM tool that does: ps waux | grep start.jar
|
||||
@REM stop logic here
|
||||
IF "%SCRIPT_CMD%"=="stop" (
|
||||
IF "%SOLR_PORT%"=="" (
|
||||
set SCRIPT_ERROR=Must specify the port when trying to stop Solr!
|
||||
IF "%STOP_ALL%"=="1" (
|
||||
for /f "usebackq" %%i in (`dir /b %SOLR_TIP\bin% ^| findstr /i "^solr-.*\.port$"`) do (
|
||||
set SOME_SOLR_PORT=
|
||||
For /F "Delims=" %%J In (%SOLR_TIP%\bin\%%i) do set SOME_SOLR_PORT=%%~J
|
||||
if NOT "!SOME_SOLR_PORT!"=="" (
|
||||
for /f "tokens=2,5" %%j in ('netstat -aon ^| find /i "listening" ^| find ":!SOME_SOLR_PORT!"') do (
|
||||
@echo Stopping Solr running on port !SOME_SOLR_PORT!
|
||||
set /A STOP_PORT=!SOME_SOLR_PORT! - 1000
|
||||
"%JAVA%" -jar "%SOLR_SERVER_DIR%\start.jar" STOP.PORT=!STOP_PORT! STOP.KEY=%STOP_KEY% --stop
|
||||
del %SOLR_TIP%\bin\solr-!SOME_SOLR_PORT!.port
|
||||
timeout /T 5
|
||||
REM Kill it if it is still running after the graceful shutdown
|
||||
For /f "tokens=5" %%M in ('netstat -nao ^| find /i "listening" ^| find ":!SOME_SOLR_PORT!"') do (taskkill /f /PID %%M)
|
||||
)
|
||||
)
|
||||
)
|
||||
) ELSE (
|
||||
set SCRIPT_ERROR=Must specify the port when trying to stop Solr, or use -all to stop all running nodes on this host.
|
||||
goto err
|
||||
)
|
||||
) ELSE (
|
||||
set found_it=0
|
||||
For /f "tokens=5" %%M in ('netstat -nao ^| find /i "listening" ^| find ":%SOLR_PORT%"') do (
|
||||
set found_it=1
|
||||
@echo Stopping Solr running on port %SOLR_PORT%
|
||||
set /A STOP_PORT=%SOLR_PORT% - 1000
|
||||
"%JAVA%" -jar "%SOLR_SERVER_DIR%\start.jar" STOP.PORT=!STOP_PORT! STOP.KEY=%STOP_KEY% --stop
|
||||
del %SOLR_TIP%\bin\solr-%SOLR_PORT%.port
|
||||
timeout /T 5
|
||||
REM Kill it if it is still running after the graceful shutdown
|
||||
For /f "tokens=5" %%j in ('netstat -nao ^| find /i "listening" ^| find ":%SOLR_PORT%"') do (taskkill /f /PID %%j)
|
||||
|
||||
REM backup log files (use current timestamp for backup name)
|
||||
For /f "tokens=2-4 delims=/ " %%a in ('date /t') do (set mydate=%%c-%%a-%%b)
|
||||
For /f "tokens=1-2 delims=/:" %%a in ("%TIME%") do (set mytime=%%a%%b)
|
||||
set now_ts=!mydate!_!mytime!
|
||||
IF EXIST "%SOLR_SERVER_DIR%\logs\solr.log" (
|
||||
echo Backing up %SOLR_SERVER_DIR%\logs\solr.log
|
||||
move /Y "%SOLR_SERVER_DIR%\logs\solr.log" "%SOLR_SERVER_DIR%\logs\solr_log_!now_ts!"
|
||||
)
|
||||
|
||||
IF EXIST "%SOLR_SERVER_DIR%\logs\solr_gc.log" (
|
||||
echo Backing up %SOLR_SERVER_DIR%\logs\solr_gc.log
|
||||
move /Y "%SOLR_SERVER_DIR%\logs\solr_gc.log" "%SOLR_SERVER_DIR%\logs\solr_gc_log_!now_ts!"
|
||||
)
|
||||
)
|
||||
if "!found_it!"=="0" echo No Solr found running on port %SOLR_PORT%
|
||||
)
|
||||
goto done
|
||||
)
|
||||
|
||||
IF "%SOLR_PORT%"=="" set SOLR_PORT=8983
|
||||
IF "%STOP_PORT%"=="" set STOP_PORT=79%SOLR_PORT:~-2,2%
|
||||
IF "%STOP_PORT%"=="" set /A STOP_PORT=%SOLR_PORT% - 1000
|
||||
|
||||
IF "%SCRIPT_CMD%"=="start" (
|
||||
REM see if Solr is already running using netstat
|
||||
|
@ -431,31 +526,8 @@ IF "%SCRIPT_CMD%"=="start" (
|
|||
set "SCRIPT_ERROR=Process %%j is already listening on port %SOLR_PORT%. If this is Solr, please stop it first before starting (or use restart). If this is not Solr, then please choose a different port using -p PORT"
|
||||
goto err
|
||||
)
|
||||
) ELSE (
|
||||
@echo Stopping Solr running on port %SOLR_PORT%
|
||||
"%JAVA%" -jar "%SOLR_SERVER_DIR%\start.jar" STOP.PORT=%STOP_PORT% STOP.KEY=%STOP_KEY% --stop
|
||||
timeout /T 5
|
||||
)
|
||||
|
||||
REM Kill it if it is still running after the graceful shutdown
|
||||
For /f "tokens=5" %%j in ('netstat -nao ^| find /i "listening" ^| find ":%SOLR_PORT%"') do (taskkill /f /PID %%j)
|
||||
|
||||
REM backup log files (use current timestamp for backup name)
|
||||
For /f "tokens=2-4 delims=/ " %%a in ('date /t') do (set mydate=%%c-%%a-%%b)
|
||||
For /f "tokens=1-2 delims=/:" %%a in ("%TIME%") do (set mytime=%%a%%b)
|
||||
set now_ts=%mydate%_%mytime%
|
||||
IF EXIST "%SOLR_SERVER_DIR%\logs\solr.log" (
|
||||
echo Backing up %SOLR_SERVER_DIR%\logs\solr.log
|
||||
move /Y "%SOLR_SERVER_DIR%\logs\solr.log" "%SOLR_SERVER_DIR%\logs\solr_log_!now_ts!"
|
||||
)
|
||||
|
||||
IF EXIST "%SOLR_SERVER_DIR%\logs\solr_gc.log" (
|
||||
echo Backing up %SOLR_SERVER_DIR%\logs\solr_gc.log
|
||||
move /Y "%SOLR_SERVER_DIR%\logs\solr_gc.log" "%SOLR_SERVER_DIR%\logs\solr_gc_log_!now_ts!"
|
||||
)
|
||||
|
||||
IF "%SCRIPT_CMD%"=="stop" goto done
|
||||
|
||||
REM if verbose gc logging enabled, setup the location of the log file
|
||||
IF NOT "%GC_LOG_OPTS%"=="" set GC_LOG_OPTS=%GC_LOG_OPTS% -Xloggc:"%SOLR_SERVER_DIR%/logs/solr_gc.log"
|
||||
|
||||
|
@ -521,13 +593,55 @@ cd "%SOLR_SERVER_DIR%"
|
|||
@echo.
|
||||
IF "%FG%"=="1" (
|
||||
REM run solr in the foreground
|
||||
"%JAVA%" -server -Xss256k %SOLR_JAVA_MEM% %START_OPTS% -DSTOP.PORT=%STOP_PORT% -DSTOP.KEY=%STOP_KEY% ^
|
||||
title "Solr-%SOLR_PORT%"
|
||||
echo %SOLR_PORT%>%SOLR_TIP%\bin\solr-%SOLR_PORT%.port
|
||||
"%JAVA%" -server -Xss256k %SOLR_JAVA_MEM% %START_OPTS% -DSTOP.PORT=!STOP_PORT! -DSTOP.KEY=%STOP_KEY% ^
|
||||
-Djetty.port=%SOLR_PORT% -Dsolr.solr.home="%SOLR_HOME%" -Dsolr.install.dir="%SOLR_TIP%" -jar start.jar
|
||||
) ELSE (
|
||||
START "" "%JAVA%" -server -Xss256k %SOLR_JAVA_MEM% %START_OPTS% -DSTOP.PORT=%STOP_PORT% -DSTOP.KEY=%STOP_KEY% ^
|
||||
START "Solr-%SOLR_PORT%" "%JAVA%" -server -Xss256k %SOLR_JAVA_MEM% %START_OPTS% -DSTOP.PORT=!STOP_PORT! -DSTOP.KEY=%STOP_KEY% ^
|
||||
-Djetty.port=%SOLR_PORT% -Dsolr.solr.home="%SOLR_HOME%" -Dsolr.install.dir="%SOLR_TIP%" -jar start.jar > "%SOLR_SERVER_DIR%\logs\solr-%SOLR_PORT%-console.log"
|
||||
echo %SOLR_PORT%>%SOLR_TIP%\bin\solr-%SOLR_PORT%.port
|
||||
)
|
||||
|
||||
|
||||
set CREATE_EXAMPLE_CONFIG=
|
||||
IF "%EXAMPLE%"=="schemaless" (
|
||||
set CREATE_EXAMPLE_CONFIG=data_driven_schema_configs
|
||||
)
|
||||
IF "%EXAMPLE%"=="techproducts" (
|
||||
set CREATE_EXAMPLE_CONFIG=sample_techproducts_configs
|
||||
)
|
||||
|
||||
IF NOT "!CREATE_EXAMPLE_CONFIG!"=="" (
|
||||
timeout /T 5
|
||||
IF "%SOLR_MODE%"=="solrcloud" (
|
||||
"%JAVA%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
|
||||
-classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
|
||||
org.apache.solr.util.SolrCLI create_collection -name !EXAMPLE! -shards 1 -replicationFactor 1 ^
|
||||
-config !CREATE_EXAMPLE_CONFIG! -configsetsDir "%SOLR_SERVER_DIR%\solr\configsets" -solrUrl http://localhost:%SOLR_PORT%/solr
|
||||
) ELSE (
|
||||
set "CREATE_URL=http://localhost:%SOLR_PORT%/solr/admin/cores?action=CREATE&name=%EXAMPLE%&configSet=!CREATE_EXAMPLE_CONFIG!"
|
||||
@echo.
|
||||
@echo Creating new core using command:
|
||||
@echo !CREATE_URL!
|
||||
@echo.
|
||||
"%JAVA%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
|
||||
-classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
|
||||
org.apache.solr.util.SolrCLI api -get "!CREATE_URL!"
|
||||
)
|
||||
)
|
||||
|
||||
IF "%EXAMPLE%"=="techproducts" (
|
||||
@echo.
|
||||
@echo Indexing tech product example docs from %SOLR_TIP%\example\exampledocs
|
||||
"%JAVA%" -Durl=http://localhost:%SOLR_PORT%/solr/%EXAMPLE%/update -jar %SOLR_TIP%/example/exampledocs/post.jar %SOLR_TIP%/example/exampledocs/*.xml
|
||||
)
|
||||
|
||||
@echo.
|
||||
@echo Solr %EXAMPLE% launched successfully.
|
||||
@echo Direct your Web browser to http://localhost:%SOLR_PORT%/solr to visit the Solr Admin UI
|
||||
@echo.
|
||||
|
||||
goto done
|
||||
|
||||
:cloud_example_start
|
||||
|
@ -608,8 +722,9 @@ for /l %%x in (1, 1, !CLOUD_NUM_NODES!) do (
|
|||
)
|
||||
@echo Starting node1 on port !NODE_PORT! using command:
|
||||
@echo solr -cloud -p !NODE_PORT! -d node1 !DASHZ! !DASHM!
|
||||
START "" "%SDIR%\solr" -f -cloud -p !NODE_PORT! -d node1 !DASHZ! !DASHM!
|
||||
START "Solr-!NODE_PORT!" "%SDIR%\solr" -f -cloud -p !NODE_PORT! -d node1 !DASHZ! !DASHM!
|
||||
set NODE1_PORT=!NODE_PORT!
|
||||
echo !NODE_PORT!>%SOLR_TIP%\bin\solr-!NODE_PORT!.port
|
||||
) ELSE (
|
||||
IF "!ZK_HOST!"=="" (
|
||||
set /A ZK_PORT=!NODE1_PORT!+1000
|
||||
|
@ -617,7 +732,8 @@ for /l %%x in (1, 1, !CLOUD_NUM_NODES!) do (
|
|||
)
|
||||
@echo Starting node%%x on port !NODE_PORT! using command:
|
||||
@echo solr -cloud -p !NODE_PORT! -d node%%x -z !ZK_HOST! !DASHM!
|
||||
START "" "%SDIR%\solr" -f -cloud -p !NODE_PORT! -d node%%x -z !ZK_HOST! !DASHM!
|
||||
START "Solr-!NODE_PORT!" "%SDIR%\solr" -f -cloud -p !NODE_PORT! -d node%%x -z !ZK_HOST! !DASHM!
|
||||
echo !NODE_PORT!>%SOLR_TIP%\bin\solr-!NODE_PORT!.port
|
||||
)
|
||||
|
||||
timeout /T 10
|
||||
|
@ -630,8 +746,7 @@ IF "%NO_USER_PROMPT%"=="1" (
|
|||
set CLOUD_COLLECTION=gettingstarted
|
||||
set CLOUD_NUM_SHARDS=2
|
||||
set CLOUD_REPFACT=2
|
||||
set CLOUD_CONFIG=default
|
||||
set "CLOUD_CONFIG_DIR=%SOLR_TIP%\example\solr\collection1\conf"
|
||||
set CLOUD_CONFIG=data_driven_schema_configs
|
||||
goto create_collection
|
||||
) ELSE (
|
||||
goto get_create_collection_params
|
||||
|
@ -656,43 +771,17 @@ set CLOUD_REPFACT=!USER_INPUT!
|
|||
echo !CLOUD_REPFACT!
|
||||
set USER_INPUT=
|
||||
echo.
|
||||
set /P "USER_INPUT=Please choose a configuration for the !CLOUD_COLLECTION! collection, available options are: default or schemaless [default] "
|
||||
IF "!USER_INPUT!"=="" set USER_INPUT=default
|
||||
set /P "USER_INPUT=Please choose a configuration for the !CLOUD_COLLECTION! collection, available options are: basic_configs, data_driven_schema_configs, or sample_techproducts_configs [data_driven_schema_configs]"
|
||||
IF "!USER_INPUT!"=="" set USER_INPUT=data_driven_schema_configs
|
||||
set CLOUD_CONFIG=!USER_INPUT!
|
||||
echo !CLOUD_CONFIG!
|
||||
|
||||
IF "!CLOUD_CONFIG!"=="schemaless" (
|
||||
IF EXIST "%SOLR_TIP%\server\solr\configsets\schemaless" set "CLOUD_CONFIG_DIR=%SOLR_TIP%\server\solr\configsets\schemaless"
|
||||
IF NOT EXIST "%SOLR_TIP%\server\solr\configsets\schemaless" set "CLOUD_CONFIG_DIR=%SOLR_TIP%\example\example-schemaless\solr\collection1\conf"
|
||||
) ELSE (
|
||||
set "CLOUD_CONFIG_DIR=%SOLR_TIP%\example\solr\collection1\conf"
|
||||
)
|
||||
|
||||
goto create_collection
|
||||
|
||||
:create_collection
|
||||
set /A MAX_SHARDS_PER_NODE=((!CLOUD_NUM_SHARDS!*!CLOUD_REPFACT!)/!CLOUD_NUM_NODES!)+1
|
||||
|
||||
echo.
|
||||
echo Deploying default Solr configuration files to embedded ZooKeeper
|
||||
echo.
|
||||
"%JAVA%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
|
||||
-classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
|
||||
org.apache.solr.cloud.ZkCLI -zkhost %zk_host% -cmd upconfig -confdir "!CLOUD_CONFIG_DIR!" -confname !CLOUD_CONFIG!
|
||||
|
||||
set COLLECTIONS_API=http://localhost:!NODE1_PORT!/solr/admin/collections
|
||||
|
||||
set "CLOUD_CREATE_COLLECTION_CMD=%COLLECTIONS_API%?action=CREATE&name=%CLOUD_COLLECTION%&replicationFactor=%CLOUD_REPFACT%&numShards=%CLOUD_NUM_SHARDS%&collection.configName=!CLOUD_CONFIG!&maxShardsPerNode=%MAX_SHARDS_PER_NODE%&wt=json&indent=2"
|
||||
echo Creating new collection %CLOUD_COLLECTION% with %CLOUD_NUM_SHARDS% shards and replication factor %CLOUD_REPFACT% using Collections API command:
|
||||
echo.
|
||||
@echo "%CLOUD_CREATE_COLLECTION_CMD%"
|
||||
echo.
|
||||
echo For more information about the Collections API, please see: https://cwiki.apache.org/confluence/display/solr/Collections+API
|
||||
echo.
|
||||
|
||||
"%JAVA%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
|
||||
-classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
|
||||
org.apache.solr.util.SolrCLI api -get "%CLOUD_CREATE_COLLECTION_CMD%"
|
||||
org.apache.solr.util.SolrCLI create_collection -name !CLOUD_COLLECTION! -shards !CLOUD_NUM_SHARDS! -replicationFactor !CLOUD_REPFACT! ^
|
||||
-config !CLOUD_CONFIG! -configsetsDir "%SOLR_SERVER_DIR%\solr\configsets" -zkHost %zk_host%
|
||||
|
||||
echo.
|
||||
echo SolrCloud example is running, please visit http://localhost:%NODE1_PORT%/solr"
|
||||
|
@ -705,23 +794,22 @@ goto done
|
|||
:get_info
|
||||
REM Find all Java processes, correlate with those listening on a port
|
||||
REM and then try to contact via that port using the status tool
|
||||
for /f "tokens=2" %%a in ('tasklist ^| find "java.exe"') do (
|
||||
for /f "tokens=2,5" %%j in ('netstat -aon ^| find /i "listening"') do (
|
||||
if "%%k" EQU "%%a" (
|
||||
for /f "usebackq" %%i in (`dir /b %SOLR_TIP\bin% ^| findstr /i "^solr-.*\.port$"`) do (
|
||||
set SOME_SOLR_PORT=
|
||||
For /F "Delims=" %%J In (%SOLR_TIP%\bin\%%i) do set SOME_SOLR_PORT=%%~J
|
||||
if NOT "!SOME_SOLR_PORT!"=="" (
|
||||
for /f "tokens=2,5" %%j in ('netstat -aon ^| find /i "listening" ^| find /i "!SOME_SOLR_PORT!"') do (
|
||||
for /f "delims=: tokens=1,2" %%x IN ("%%j") do (
|
||||
if "0.0.0.0" EQU "%%x" (
|
||||
@echo.
|
||||
set has_info=1
|
||||
echo Found Solr process %%k running on port %%y
|
||||
echo Found Solr process %%k running on port !SOME_SOLR_PORT!
|
||||
"%JAVA%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
|
||||
-classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
|
||||
org.apache.solr.util.SolrCLI status -solr http://localhost:%%y/solr
|
||||
|
||||
org.apache.solr.util.SolrCLI status -solr http://localhost:!SOME_SOLR_PORT!/solr
|
||||
@echo.
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
if NOT "!has_info!"=="1" echo No running Solr nodes found.
|
||||
set has_info=
|
||||
|
@ -758,6 +846,111 @@ IF NOT DEFINED HEALTHCHECK_ZK_HOST set "HEALTHCHECK_ZK_HOST=localhost:9983"
|
|||
org.apache.solr.util.SolrCLI healthcheck -collection !HEALTHCHECK_COLLECTION! -zkHost !HEALTHCHECK_ZK_HOST!
|
||||
goto done
|
||||
|
||||
:parse_create_args
|
||||
IF [%1]==[] goto run_create
|
||||
IF "%1"=="-c" goto set_create_config
|
||||
IF "%1"=="-configset" goto set_create_config
|
||||
IF "%1"=="-n" goto set_create_name
|
||||
IF "%1"=="-name" goto set_create_name
|
||||
IF "%1"=="-shards" goto set_create_shards
|
||||
IF "%1"=="-replicationFactor" goto set_create_rf
|
||||
IF "%1"=="-help" goto usage
|
||||
IF "%1"=="-usage" goto usage
|
||||
IF "%1"=="/?" goto usage
|
||||
goto run_create
|
||||
|
||||
:set_create_configset
|
||||
set CREATE_CONFIGSET=%~2
|
||||
SHIFT
|
||||
SHIFT
|
||||
goto parse_create_args
|
||||
|
||||
:set_create_name
|
||||
set CREATE_NAME=%~2
|
||||
SHIFT
|
||||
SHIFT
|
||||
goto parse_create_args
|
||||
|
||||
:set_create_shards
|
||||
set CREATE_NUM_SHARDS=%~2
|
||||
SHIFT
|
||||
SHIFT
|
||||
goto parse_create_args
|
||||
|
||||
:set_create_rf
|
||||
set CREATE_REPFACT=%~2
|
||||
SHIFT
|
||||
SHIFT
|
||||
goto parse_create_args
|
||||
|
||||
:run_create
|
||||
IF "!CREATE_NAME!"=="" (
|
||||
set "SCRIPT_ERROR=Name (-n) is a required parameter for $SCRIPT_CMD"
|
||||
goto invalid_cmd_line
|
||||
)
|
||||
IF "!CREATE_CONFIGSET!"=="" set CREATE_CONFIGSET=data_driven_schema_configs
|
||||
IF "!CREATE_NUM_SHARDS!"=="" set CREATE_NUM_SHARDS=1
|
||||
IF "!CREATE_REPFACT!"=="" set CREATE_REPFACT=1
|
||||
|
||||
REM Find a port that Solr is running on
|
||||
set CREATE_PORT=0
|
||||
for /f "usebackq" %%i in (`dir /b %SOLR_TIP\bin% ^| findstr /i "^solr-.*\.port$"`) do (
|
||||
set SOME_SOLR_PORT=
|
||||
For /F "Delims=" %%J In (%SOLR_TIP%\bin\%%i) do set SOME_SOLR_PORT=%%~J
|
||||
if NOT "!SOME_SOLR_PORT!"=="" (
|
||||
for /f "tokens=2,5" %%j in ('netstat -aon ^| find /i "listening" ^| find /i "!SOME_SOLR_PORT!"') do (
|
||||
set CREATE_PORT=!SOME_SOLR_PORT!
|
||||
)
|
||||
)
|
||||
)
|
||||
if "!CREATE_PORT!" EQU "0" (
|
||||
set "SCRIPT_ERROR=Could not find a running Solr instance on this host!"
|
||||
goto err
|
||||
)
|
||||
|
||||
@echo Found Solr node running on port !CREATE_PORT!
|
||||
|
||||
@REM Determine if the Solr node is in cloud or standalone server mode
|
||||
"%JAVA%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
|
||||
-classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
|
||||
org.apache.solr.util.SolrCLI status -solr http://localhost:!CREATE_PORT!/solr > solr_status.txt
|
||||
set /p VAR=<solr_status.txt
|
||||
del solr_status.txt
|
||||
if "!VAR!"=="!VAR:ZooKeeper=!" (
|
||||
@REM Not in CloudMode - create core
|
||||
if "%SCRIPT_CMD%"=="create_core" (
|
||||
set "CREATE_URL=http://localhost:!CREATE_PORT!/solr/admin/cores?action=CREATE&name=!CREATE_NAME!&configSet=!CREATE_CONFIGSET!"
|
||||
@echo.
|
||||
@echo Creating new core using command:
|
||||
@echo !CREATE_URL!
|
||||
@echo.
|
||||
"%JAVA%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
|
||||
-classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
|
||||
org.apache.solr.util.SolrCLI api -get "!CREATE_URL!"
|
||||
) else (
|
||||
@echo.
|
||||
@echo ERROR: Solr running on port !CREATE_PORT! is running in standalone server mode, please use the create_core command instead.
|
||||
@echo %SCRIPT_CMD% can only be used when running in SolrCloud mode.
|
||||
@echo.
|
||||
goto done
|
||||
)
|
||||
) ELSE (
|
||||
@REM In CloudMode - create collection
|
||||
if "%SCRIPT_CMD%"=="create_collection" (
|
||||
"%JAVA%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
|
||||
-classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
|
||||
org.apache.solr.util.SolrCLI create_collection -name !CREATE_NAME! -shards !CREATE_NUM_SHARDS! -replicationFactor !CREATE_REPFACT! ^
|
||||
-config !CREATE_CONFIGSET! -configsetsDir "%SOLR_TIP%\server\solr\configsets" -solrUrl http://localhost:!CREATE_PORT!/solr
|
||||
) else (
|
||||
@echo.
|
||||
@echo ERROR: Solr running on port !CREATE_PORT! is running in SolrCloud mode, please use the create_collection command instead.
|
||||
@echo %SCRIPT_CMD% can only be used when running in standalone server mode.
|
||||
@echo.
|
||||
goto done
|
||||
)
|
||||
)
|
||||
goto done
|
||||
|
||||
:invalid_cmd_line
|
||||
@echo.
|
||||
IF "!SCRIPT_ERROR!"=="" (
|
||||
|
@ -776,6 +969,10 @@ IF "%FIRST_ARG%"=="start" (
|
|||
goto stop_usage
|
||||
) ELSE IF "%FIRST_ARG%"=="healthcheck" (
|
||||
goto healthcheck_usage
|
||||
) ELSE IF "%FIRST_ARG%"=="create_core" (
|
||||
goto create_core_usage
|
||||
) ELSE IF "%FIRST_ARG%"=="create_collection" (
|
||||
goto create_collection_usage
|
||||
) ELSE (
|
||||
goto script_usage
|
||||
)
|
||||
|
|
|
@ -42,7 +42,7 @@
|
|||
<target name="example" description="Creates a runnable example configuration."
|
||||
depends="dist-contrib,dist-war">
|
||||
<copy file="${dist}/${fullnamever}.war"
|
||||
tofile="${example}/webapps/${ant.project.name}.war"/>
|
||||
tofile="${server.dir}/webapps/${ant.project.name}.war"/>
|
||||
<jar destfile="${example}/exampledocs/post.jar"
|
||||
basedir="${dest}/solr-core/classes/java"
|
||||
includes="org/apache/solr/util/SimplePostTool*.class">
|
||||
|
@ -51,7 +51,7 @@
|
|||
</manifest>
|
||||
</jar>
|
||||
<delete includeemptydirs="true">
|
||||
<fileset dir="${example}/solr-webapp" includes="**/*"/>
|
||||
<fileset dir="${server.dir}/solr-webapp" includes="**/*"/>
|
||||
</delete>
|
||||
<echo>See ${example}/README.txt for how to run the Solr example configuration.</echo>
|
||||
</target>
|
||||
|
@ -66,7 +66,7 @@
|
|||
</condition>
|
||||
<property name="example.jvm.line" value=""/>
|
||||
<property name="example.heap.size" value="512M"/>
|
||||
<java jar="${example}/start.jar" fork="true" dir="${example}" maxmemory="${example.heap.size}">
|
||||
<java jar="${server.dir}/start.jar" fork="true" dir="${server.dir}" maxmemory="${example.heap.size}">
|
||||
<jvmarg line="${example.jvm.line}"/>
|
||||
<sysproperty key="solr.solr.home" file="${example.solr.home}"/>
|
||||
<sysproperty key="jetty.port" value="${example.jetty.port}"/>
|
||||
|
@ -254,9 +254,9 @@
|
|||
<license-check-macro dir="${basedir}" licensedir="${common-solr.dir}/licenses">
|
||||
<additional-excludes>
|
||||
<!-- Exclude start.jar only (it'd be weird to have a license file there?) -->
|
||||
<exclude name="example/start.jar" />
|
||||
<exclude name="server/start.jar" />
|
||||
<exclude name="example/exampledocs/post.jar" />
|
||||
<exclude name="example/solr-webapp/**" />
|
||||
<exclude name="server/solr-webapp/**" />
|
||||
<exclude name="package/**"/>
|
||||
</additional-excludes>
|
||||
<additional-filters>
|
||||
|
@ -295,10 +295,18 @@
|
|||
<delete dir="build" />
|
||||
<delete dir="dist" />
|
||||
<delete dir="package" />
|
||||
<delete dir="server/solr/lib" />
|
||||
<delete dir="example/solr/lib" />
|
||||
<delete includeemptydirs="true">
|
||||
<fileset dir="example">
|
||||
<include name="**/data/**/*" />
|
||||
<exclude name="**/.gitignore" />
|
||||
</fileset>
|
||||
<fileset dir="server">
|
||||
<include name="**/data/**/*" />
|
||||
<include name="solr/zoo_data" />
|
||||
<include name="start.jar" />
|
||||
<include name="logs/*" />
|
||||
<include name="webapps/**/*" />
|
||||
<include name="solr-webapp/**/*" />
|
||||
<exclude name="**/.gitignore" />
|
||||
|
@ -416,7 +424,7 @@
|
|||
<tarfileset dir="." prefix="${fullnamever}" includes="LICENSE.txt NOTICE.txt"/>
|
||||
<tarfileset dir="." prefix="${fullnamever}/solr"
|
||||
excludes="build/** ${package.dir.rel}/** ${dist.rel}/**
|
||||
example/webapps/*.war example/lib/**
|
||||
server/webapps/*.war example/lib/**
|
||||
**/*.jar
|
||||
lib/README.committers.txt **/data/ **/logs/*
|
||||
**/*.sh **/bin/ scripts/
|
||||
|
@ -443,7 +451,7 @@
|
|||
</antcall>
|
||||
<mkdir dir="${dest}/${fullnamever}"/>
|
||||
<delete includeemptydirs="true">
|
||||
<fileset dir="${example}/solr-webapp" includes="**/*"/>
|
||||
<fileset dir="${server.dir}/solr-webapp" includes="**/*"/>
|
||||
<fileset dir="${dest}/${fullnamever}" includes="**/*"/>
|
||||
</delete>
|
||||
|
||||
|
@ -461,7 +469,7 @@
|
|||
<tarfileset dir="."
|
||||
prefix="${fullnamever}"
|
||||
includes="LICENSE.txt NOTICE.txt CHANGES.txt README.txt SYSTEM_REQUIREMENTS.txt
|
||||
example/** contrib/**/lib/** contrib/**/README.txt
|
||||
bin/** server/** example/** contrib/**/lib/** contrib/**/README.txt
|
||||
licenses/**"
|
||||
excludes="licenses/README.committers.txt **/data/ **/logs/*
|
||||
**/classes/ **/*.sh **/ivy.xml **/build.xml
|
||||
|
@ -473,7 +481,7 @@
|
|||
<tarfileset dir="."
|
||||
filemode="755"
|
||||
prefix="${fullnamever}"
|
||||
includes="bin/* example/**/*.sh example/**/bin/" />
|
||||
includes="bin/* server/**/*.sh example/**/*.sh example/**/bin/" />
|
||||
<tarfileset dir="."
|
||||
prefix="${fullnamever}"
|
||||
includes="dist/*.jar
|
||||
|
@ -527,7 +535,7 @@
|
|||
<sign-artifacts-macro artifacts.dir="${package.dir}"/>
|
||||
</target>
|
||||
|
||||
<target name="resolve" depends="resolve-example">
|
||||
<target name="resolve" depends="resolve-example,resolve-server">
|
||||
<sequential>
|
||||
<ant dir="core" target="resolve" inheritall="false">
|
||||
<propertyset refid="uptodate.and.compiled.properties"/>
|
||||
|
|
|
@ -37,6 +37,7 @@
|
|||
<property name="maven.dist.dir" location="${package.dir}/maven"/>
|
||||
<property name="lucene-libs" location="${dest}/lucene-libs" />
|
||||
<property name="tests.userdir" location="src/test-files"/>
|
||||
<property name="server.dir" location="${common-solr.dir}/server" />
|
||||
<property name="example" location="${common-solr.dir}/example" />
|
||||
<property name="javadoc.dir" location="${dest}/docs"/>
|
||||
<property name="tests.cleanthreads.sysprop" value="perClass"/>
|
||||
|
@ -70,7 +71,7 @@
|
|||
<path id="additional.dependencies">
|
||||
<fileset dir="${common-solr.dir}/core/lib" excludes="${common.classpath.excludes}"/>
|
||||
<fileset dir="${common-solr.dir}/solrj/lib" excludes="${common.classpath.excludes}"/>
|
||||
<fileset dir="${common-solr.dir}/example/lib" excludes="${common.classpath.excludes}"/>
|
||||
<fileset dir="${common-solr.dir}/server/lib" excludes="${common.classpath.excludes}"/>
|
||||
<fileset dir="${common-solr.dir}/example/example-DIH/solr/db/lib" excludes="${common.classpath.excludes}"/>
|
||||
<fileset dir="lib" excludes="${common.classpath.excludes}" erroronmissingdir="false"/>
|
||||
</path>
|
||||
|
@ -224,7 +225,7 @@
|
|||
<solr-jarify/>
|
||||
</target>
|
||||
|
||||
<target name="compile-core" depends="prep-lucene-jars,resolve-example,common.compile-core"/>
|
||||
<target name="compile-core" depends="prep-lucene-jars,resolve-example,resolve-server,common.compile-core"/>
|
||||
<target name="compile-test" depends="compile-solr-test-framework,common.compile-test"/>
|
||||
|
||||
<target name="dist" depends="jar-core">
|
||||
|
@ -424,15 +425,20 @@
|
|||
|
||||
<!-- resolve dependencies in the example (relied upon by compile/tests) -->
|
||||
<target name="resolve-example" unless="example.libs.uptodate">
|
||||
<ant dir="${common-solr.dir}/example" target="resolve" inheritAll="false">
|
||||
<propertyset refid="uptodate.and.compiled.properties"/>
|
||||
</ant>
|
||||
<ant dir="${common-solr.dir}/example/example-DIH" target="resolve" inheritAll="false">
|
||||
<propertyset refid="uptodate.and.compiled.properties"/>
|
||||
</ant>
|
||||
<property name="example.libs.uptodate" value="true"/>
|
||||
</target>
|
||||
|
||||
<!-- resolve dependencies in the server directory (relied upon by compile/tests) -->
|
||||
<target name="resolve-server" unless="server.libs.uptodate">
|
||||
<ant dir="${common-solr.dir}/server" target="resolve" inheritAll="false">
|
||||
<propertyset refid="uptodate.and.compiled.properties"/>
|
||||
</ant>
|
||||
<property name="server.libs.uptodate" value="true"/>
|
||||
</target>
|
||||
|
||||
<macrodef name="contrib-crawl">
|
||||
<attribute name="target" default=""/>
|
||||
<attribute name="failonerror" default="true"/>
|
||||
|
|
|
@ -17,6 +17,9 @@ package org.apache.solr.util;
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.io.PrintStream;
|
||||
import java.net.ConnectException;
|
||||
import java.net.SocketException;
|
||||
|
@ -40,12 +43,19 @@ import org.apache.commons.cli.Option;
|
|||
import org.apache.commons.cli.OptionBuilder;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.http.HttpEntity;
|
||||
import org.apache.http.HttpResponse;
|
||||
import org.apache.http.NoHttpResponseException;
|
||||
import org.apache.http.StatusLine;
|
||||
import org.apache.http.client.ClientProtocolException;
|
||||
import org.apache.http.client.HttpClient;
|
||||
import org.apache.http.client.HttpResponseException;
|
||||
import org.apache.http.client.ResponseHandler;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.client.utils.URIBuilder;
|
||||
import org.apache.http.conn.ConnectTimeoutException;
|
||||
import org.apache.http.impl.client.BasicResponseHandler;
|
||||
import org.apache.http.util.EntityUtils;
|
||||
import org.apache.log4j.Level;
|
||||
import org.apache.log4j.LogManager;
|
||||
import org.apache.log4j.Logger;
|
||||
|
@ -55,6 +65,7 @@ import org.apache.solr.client.solrj.impl.CloudSolrServer;
|
|||
import org.apache.solr.client.solrj.impl.HttpClientUtil;
|
||||
import org.apache.solr.client.solrj.impl.HttpSolrServer;
|
||||
import org.apache.solr.client.solrj.response.QueryResponse;
|
||||
import org.apache.solr.cloud.ZkController;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.cloud.ClusterState;
|
||||
import org.apache.solr.common.cloud.Replica;
|
||||
|
@ -191,6 +202,8 @@ public class SolrCLI {
|
|||
return new StatusTool();
|
||||
else if ("api".equals(toolType))
|
||||
return new ApiTool();
|
||||
else if ("create_collection".equals(toolType))
|
||||
return new CreateCollectionTool();
|
||||
|
||||
// If you add a built-in tool to this class, add it here to avoid
|
||||
// classpath scanning
|
||||
|
@ -209,6 +222,7 @@ public class SolrCLI {
|
|||
formatter.printHelp("healthcheck", getToolOptions(new HealthcheckTool()));
|
||||
formatter.printHelp("status", getToolOptions(new StatusTool()));
|
||||
formatter.printHelp("api", getToolOptions(new ApiTool()));
|
||||
formatter.printHelp("create_collection", getToolOptions(new CreateCollectionTool()));
|
||||
|
||||
List<Class<Tool>> toolClasses = findToolClassesInPackage("org.apache.solr.util");
|
||||
for (Class<Tool> next : toolClasses) {
|
||||
|
@ -417,41 +431,44 @@ public class SolrCLI {
|
|||
return json;
|
||||
}
|
||||
|
||||
private static class SolrResponseHandler implements ResponseHandler<Map<String,Object>> {
|
||||
public Map<String,Object> handleResponse(HttpResponse response) throws ClientProtocolException, IOException {
|
||||
HttpEntity entity = response.getEntity();
|
||||
if (entity != null) {
|
||||
Object resp = ObjectBuilder.getVal(new JSONParser(EntityUtils.toString(entity)));
|
||||
if (resp != null && resp instanceof Map) {
|
||||
return (Map<String,Object>)resp;
|
||||
} else {
|
||||
throw new ClientProtocolException("Expected JSON object in response but received "+ resp);
|
||||
}
|
||||
} else {
|
||||
StatusLine statusLine = response.getStatusLine();
|
||||
throw new HttpResponseException(statusLine.getStatusCode(), statusLine.getReasonPhrase());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility function for sending HTTP GET request to Solr and then doing some
|
||||
* validation of the response.
|
||||
*/
|
||||
@SuppressWarnings({"unchecked"})
|
||||
public static Map<String,Object> getJson(HttpClient httpClient, String getUrl) throws Exception {
|
||||
Map<String,Object> json = null;
|
||||
|
||||
// ensure we're requesting JSON back from Solr
|
||||
HttpGet httpGet = new HttpGet(new URIBuilder(getUrl).setParameter("wt", "json").build());
|
||||
|
||||
//Will throw HttpResponseException if a non-ok response
|
||||
String content = httpClient.execute(httpGet, new BasicResponseHandler());
|
||||
|
||||
Object resp = ObjectBuilder.getVal(new JSONParser(content));
|
||||
if (resp != null && resp instanceof Map) {
|
||||
json = (Map<String,Object>)resp;
|
||||
} else {
|
||||
throw new SolrServerException("Expected JSON object in response from "+
|
||||
getUrl+" but received "+ resp);
|
||||
}
|
||||
|
||||
// lastly check the response JSON from Solr to see if it is an error
|
||||
// make the request and get back a parsed JSON object
|
||||
Map<String,Object> json = httpClient.execute(httpGet, new SolrResponseHandler());
|
||||
// check the response JSON from Solr to see if it is an error
|
||||
Long statusCode = asLong("/responseHeader/status", json);
|
||||
|
||||
if (statusCode == -1) {
|
||||
throw new SolrServerException("Unable to determine outcome of GET request to: "+
|
||||
getUrl+"! Response: "+json);
|
||||
} else if (statusCode != 0) {
|
||||
String errMsg = asString("/error/msg", json);
|
||||
|
||||
errMsg = errMsg == null ? String.valueOf(json) : errMsg;
|
||||
if (errMsg == null)
|
||||
errMsg = String.valueOf(json);
|
||||
throw new SolrServerException("Request to "+getUrl+" failed due to: "+errMsg);
|
||||
}
|
||||
|
||||
return json;
|
||||
}
|
||||
|
||||
|
@ -599,7 +616,7 @@ public class SolrCLI {
|
|||
return exitCode;
|
||||
}
|
||||
|
||||
protected Map<String,Object> reportStatus(String solrUrl, Map<String,Object> info, HttpClient httpClient)
|
||||
public Map<String,Object> reportStatus(String solrUrl, Map<String,Object> info, HttpClient httpClient)
|
||||
throws Exception
|
||||
{
|
||||
Map<String,Object> status = new LinkedHashMap<String,Object>();
|
||||
|
@ -614,26 +631,7 @@ public class SolrCLI {
|
|||
|
||||
// if this is a Solr in solrcloud mode, gather some basic cluster info
|
||||
if ("solrcloud".equals(info.get("mode"))) {
|
||||
|
||||
// TODO: Need a better way to get the zkHost from a running server
|
||||
// as it can be set from solr.xml vs. on the command-line
|
||||
String zkHost = null;
|
||||
List<String> args = asList("/jvm/jmx/commandLineArgs", info);
|
||||
if (args != null) {
|
||||
for (String arg : args) {
|
||||
if (arg.startsWith("-DzkHost=")) {
|
||||
zkHost = arg.substring("-DzkHost=".length());
|
||||
break;
|
||||
} else if (arg.startsWith("-DzkRun")) {
|
||||
URL serverUrl = new URL(solrUrl);
|
||||
String host = serverUrl.getHost();
|
||||
int port = serverUrl.getPort();
|
||||
zkHost = host+":"+(port+1000)+" (embedded)";
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
String zkHost = (String)info.get("zkHost");
|
||||
status.put("cloud", getCloudStatus(httpClient, solrUrl, zkHost));
|
||||
}
|
||||
|
||||
|
@ -703,6 +701,195 @@ public class SolrCLI {
|
|||
}
|
||||
} // end ApiTool class
|
||||
|
||||
/**
|
||||
* Supports create_collection command in the bin/solr script.
|
||||
*/
|
||||
public static class CreateCollectionTool implements Tool {
|
||||
|
||||
private static final String DEFAULT_CONFIG_SET = "data_driven_schema_configs";
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "create_collection";
|
||||
}
|
||||
|
||||
@SuppressWarnings("static-access")
|
||||
@Override
|
||||
public Option[] getOptions() {
|
||||
return new Option[] {
|
||||
OptionBuilder
|
||||
.withArgName("HOST")
|
||||
.hasArg()
|
||||
.isRequired(false)
|
||||
.withDescription("Address of the Zookeeper ensemble; defaults to: "+ZK_HOST)
|
||||
.create("zkHost"),
|
||||
OptionBuilder
|
||||
.withArgName("HOST")
|
||||
.hasArg()
|
||||
.isRequired(false)
|
||||
.withDescription("Base Solr URL, which can be used to determine the zkHost if that's not known")
|
||||
.create("solrUrl"),
|
||||
OptionBuilder
|
||||
.withArgName("NAME")
|
||||
.hasArg()
|
||||
.isRequired(true)
|
||||
.withDescription("Name of collection to create.")
|
||||
.create("name"),
|
||||
OptionBuilder
|
||||
.withArgName("#")
|
||||
.hasArg()
|
||||
.isRequired(false)
|
||||
.withDescription("Number of shards; default is 1")
|
||||
.create("shards"),
|
||||
OptionBuilder
|
||||
.withArgName("#")
|
||||
.hasArg()
|
||||
.isRequired(false)
|
||||
.withDescription("Number of copies of each document across the collection (replicas per shard); default is 1")
|
||||
.create("replicationFactor"),
|
||||
OptionBuilder
|
||||
.withArgName("#")
|
||||
.hasArg()
|
||||
.isRequired(false)
|
||||
.withDescription("Maximum number of shards per Solr node; default is determined based on the number of shards, replication factor, and live nodes.")
|
||||
.create("maxShardsPerNode"),
|
||||
OptionBuilder
|
||||
.withArgName("NAME")
|
||||
.hasArg()
|
||||
.isRequired(false)
|
||||
.withDescription("Name of the configuration for this collection; default is "+DEFAULT_CONFIG_SET)
|
||||
.create("config"),
|
||||
OptionBuilder
|
||||
.withArgName("DIR")
|
||||
.hasArg()
|
||||
.isRequired(true)
|
||||
.withDescription("Path to configsets directory on the local system.")
|
||||
.create("configsetsDir")
|
||||
};
|
||||
}
|
||||
|
||||
public int runTool(CommandLine cli) throws Exception {
|
||||
|
||||
// quiet down the ZK logging for cli tools
|
||||
LogManager.getLogger("org.apache.zookeeper").setLevel(Level.ERROR);
|
||||
LogManager.getLogger("org.apache.solr.common.cloud").setLevel(Level.WARN);
|
||||
|
||||
String zkHost = cli.getOptionValue("zkHost");
|
||||
if (zkHost == null) {
|
||||
// find it using the localPort
|
||||
String solrUrl = cli.getOptionValue("solrUrl");
|
||||
if (solrUrl == null)
|
||||
throw new IllegalStateException(
|
||||
"Must provide either the -zkHost or -solrUrl parameters to use the create_collection command!");
|
||||
|
||||
if (!solrUrl.endsWith("/"))
|
||||
solrUrl += "/";
|
||||
|
||||
String systemInfoUrl = solrUrl+"admin/info/system";
|
||||
HttpClient httpClient = getHttpClient();
|
||||
try {
|
||||
// hit Solr to get system info
|
||||
Map<String,Object> systemInfo = getJson(httpClient, systemInfoUrl, 2);
|
||||
|
||||
// convert raw JSON into user-friendly output
|
||||
StatusTool statusTool = new StatusTool();
|
||||
Map<String,Object> status = statusTool.reportStatus(solrUrl, systemInfo, httpClient);
|
||||
|
||||
Map<String,Object> cloud = (Map<String, Object>)status.get("cloud");
|
||||
if (cloud == null)
|
||||
throw new IllegalArgumentException("Solr server at "+solrUrl+" not running in SolrCloud mode!");
|
||||
|
||||
String zookeeper = (String) cloud.get("ZooKeeper");
|
||||
if (zookeeper.endsWith("(embedded)")) {
|
||||
zookeeper = zookeeper.substring(0,zookeeper.length()-"(embedded)".length());
|
||||
}
|
||||
zkHost = zookeeper;
|
||||
} finally {
|
||||
closeHttpClient(httpClient);
|
||||
}
|
||||
}
|
||||
|
||||
CloudSolrServer cloudSolrServer = null;
|
||||
try {
|
||||
cloudSolrServer = new CloudSolrServer(zkHost);
|
||||
System.out.println("Connecting to ZooKeeper at "+zkHost);
|
||||
cloudSolrServer.connect();
|
||||
runCloudTool(cloudSolrServer, cli);
|
||||
} finally {
|
||||
if (cloudSolrServer != null) {
|
||||
try {
|
||||
cloudSolrServer.shutdown();
|
||||
} catch (Exception ignore) {}
|
||||
}
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
protected void runCloudTool(CloudSolrServer cloudSolrServer, CommandLine cli) throws Exception {
|
||||
Set<String> liveNodes = cloudSolrServer.getZkStateReader().getClusterState().getLiveNodes();
|
||||
if (liveNodes.isEmpty())
|
||||
throw new IllegalStateException("No live nodes found! Cannot create a collection until " +
|
||||
"there is at least 1 live node in the cluster.");
|
||||
String firstLiveNode = liveNodes.iterator().next();
|
||||
|
||||
// build a URL to create the collection
|
||||
int numShards = optionAsInt(cli, "shards", 1);
|
||||
int replicationFactor = optionAsInt(cli, "replicationFactor", 1);
|
||||
int maxShardsPerNode = -1;
|
||||
|
||||
if (cli.hasOption("maxShardsPerNode")) {
|
||||
maxShardsPerNode = Integer.parseInt(cli.getOptionValue("maxShardsPerNode"));
|
||||
} else {
|
||||
// need number of live nodes to determine maxShardsPerNode if it is not set
|
||||
int numNodes = liveNodes.size();
|
||||
maxShardsPerNode = ((numShards*replicationFactor)+numNodes-1)/numNodes;
|
||||
}
|
||||
|
||||
String configSet = cli.getOptionValue("config", DEFAULT_CONFIG_SET);
|
||||
// first, test to see if that config exists in ZK
|
||||
if (!cloudSolrServer.getZkStateReader().getZkClient().exists("/configs/"+configSet, true)) {
|
||||
File configsetsDir = new File(cli.getOptionValue("configsetsDir"));
|
||||
if (!configsetsDir.isDirectory())
|
||||
throw new FileNotFoundException(configsetsDir.getAbsolutePath()+" not found!");
|
||||
|
||||
// upload the configset if it exists
|
||||
File configSetDir = new File(configsetsDir, configSet);
|
||||
if (!configSetDir.isDirectory())
|
||||
throw new FileNotFoundException("Specified config "+configSet+
|
||||
" not found in "+configsetsDir.getAbsolutePath());
|
||||
|
||||
File confDir = new File(configSetDir,"conf");
|
||||
System.out.println("Uploading "+confDir.getAbsolutePath()+
|
||||
" for config "+configSet+" to ZooKeeper at "+cloudSolrServer.getZkHost());
|
||||
ZkController.uploadConfigDir(cloudSolrServer.getZkStateReader().getZkClient(), confDir, configSet);
|
||||
}
|
||||
|
||||
String baseUrl = cloudSolrServer.getZkStateReader().getBaseUrlForNodeName(firstLiveNode);
|
||||
String collectionName = cli.getOptionValue("name");
|
||||
String createCollectionUrl =
|
||||
String.format(Locale.ROOT,
|
||||
"%s/admin/collections?action=CREATE&name=%s&numShards=%d&replicationFactor=%d&maxShardsPerNode=%d&configSet=%s",
|
||||
baseUrl,
|
||||
collectionName,
|
||||
numShards,
|
||||
replicationFactor,
|
||||
maxShardsPerNode,
|
||||
configSet);
|
||||
|
||||
System.out.println("Creating new collection '"+collectionName+"' using command:\n\n"+createCollectionUrl+"\n");
|
||||
|
||||
Map<String,Object> json = getJson(createCollectionUrl);
|
||||
CharArr arr = new CharArr();
|
||||
new JSONWriter(arr, 2).write(json);
|
||||
System.out.println(arr.toString());
|
||||
}
|
||||
|
||||
protected int optionAsInt(CommandLine cli, String option, int defaultVal) {
|
||||
return Integer.parseInt(cli.getOptionValue(option, String.valueOf(defaultVal)));
|
||||
}
|
||||
} // end CreateCollectionTool class
|
||||
|
||||
private static final long MS_IN_MIN = 60 * 1000L;
|
||||
private static final long MS_IN_HOUR = MS_IN_MIN * 60L;
|
||||
private static final long MS_IN_DAY = MS_IN_HOUR * 24L;
|
||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.solr.cloud;
|
|||
|
||||
import java.io.File;
|
||||
|
||||
import org.apache.solr.SolrJettyTestBase;
|
||||
import org.apache.solr.SolrTestCaseJ4;
|
||||
import org.apache.solr.common.cloud.SolrZkClient;
|
||||
import org.apache.solr.common.cloud.ZooKeeperException;
|
||||
|
@ -46,7 +47,7 @@ public class TestZkChroot extends SolrTestCaseJ4 {
|
|||
zkDir = createTempDir("zkData").toFile().getAbsolutePath();
|
||||
zkServer = new ZkTestServer(zkDir);
|
||||
zkServer.run();
|
||||
home = ExternalPaths.EXAMPLE_HOME;
|
||||
home = SolrJettyTestBase.legacyExampleCollection1SolrHome();
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.apache.commons.io.FileUtils;
|
|||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.io.filefilter.RegexFileFilter;
|
||||
import org.apache.commons.io.filefilter.TrueFileFilter;
|
||||
import org.apache.solr.SolrJettyTestBase;
|
||||
import org.apache.solr.SolrTestCaseJ4;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.cloud.SolrZkClient;
|
||||
|
@ -76,13 +77,15 @@ public class ZkCLITest extends SolrTestCaseJ4 {
|
|||
super.setUp();
|
||||
log.info("####SETUP_START " + getTestName());
|
||||
|
||||
String exampleHome = SolrJettyTestBase.legacyExampleCollection1SolrHome();
|
||||
|
||||
boolean useNewSolrXml = random().nextBoolean();
|
||||
File tmpDir = createTempDir().toFile();
|
||||
if (useNewSolrXml) {
|
||||
solrHome = ExternalPaths.EXAMPLE_HOME;
|
||||
solrHome = exampleHome;
|
||||
} else {
|
||||
File tmpSolrHome = new File(tmpDir, "tmp-solr-home");
|
||||
FileUtils.copyDirectory(new File(ExternalPaths.EXAMPLE_HOME), tmpSolrHome);
|
||||
FileUtils.copyDirectory(new File(exampleHome), tmpSolrHome);
|
||||
FileUtils.copyFile(getFile("old-solr-example/solr.xml"), new File(tmpSolrHome, "solr.xml"));
|
||||
solrHome = tmpSolrHome.getAbsolutePath();
|
||||
}
|
||||
|
@ -214,8 +217,7 @@ public class ZkCLITest extends SolrTestCaseJ4 {
|
|||
"-cmd",
|
||||
"upconfig",
|
||||
"-confdir",
|
||||
ExternalPaths.EXAMPLE_HOME + File.separator + "collection1"
|
||||
+ File.separator + "conf", "-confname", confsetname};
|
||||
ExternalPaths.TECHPRODUCTS_CONFIGSET, "-confname", confsetname};
|
||||
ZkCLI.main(args);
|
||||
|
||||
assertTrue(zkClient.exists(ZkController.CONFIGS_ZKNODE + "/" + confsetname, true));
|
||||
|
@ -245,13 +247,12 @@ public class ZkCLITest extends SolrTestCaseJ4 {
|
|||
List<String> zkFiles = zkClient.getChildren(ZkController.CONFIGS_ZKNODE + "/" + confsetname, null, true);
|
||||
assertEquals(files.length, zkFiles.size());
|
||||
|
||||
File sourceConfDir = new File(ExternalPaths.EXAMPLE_HOME + File.separator + "collection1"
|
||||
+ File.separator + "conf");
|
||||
File sourceConfDir = new File(ExternalPaths.TECHPRODUCTS_CONFIGSET);
|
||||
// filter out all directories starting with . (e.g. .svn)
|
||||
Collection<File> sourceFiles = FileUtils.listFiles(sourceConfDir, TrueFileFilter.INSTANCE, new RegexFileFilter("[^\\.].*"));
|
||||
for (File sourceFile :sourceFiles){
|
||||
int indexOfRelativePath = sourceFile.getAbsolutePath().lastIndexOf("collection1" + File.separator + "conf");
|
||||
String relativePathofFile = sourceFile.getAbsolutePath().substring(indexOfRelativePath + 17, sourceFile.getAbsolutePath().length());
|
||||
int indexOfRelativePath = sourceFile.getAbsolutePath().lastIndexOf("sample_techproducts_configs" + File.separator + "conf");
|
||||
String relativePathofFile = sourceFile.getAbsolutePath().substring(indexOfRelativePath + 33, sourceFile.getAbsolutePath().length());
|
||||
File downloadedFile = new File(confDir,relativePathofFile);
|
||||
assertTrue(downloadedFile.getAbsolutePath() + " does not exist source:" + sourceFile.getAbsolutePath(), downloadedFile.exists());
|
||||
assertTrue("Content didn't change",FileUtils.contentEquals(sourceFile,downloadedFile));
|
||||
|
|
|
@ -24,6 +24,7 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase.Slow;
|
||||
import org.apache.solr.SolrJettyTestBase;
|
||||
import org.apache.solr.SolrTestCaseJ4;
|
||||
import org.apache.solr.common.cloud.SolrZkClient;
|
||||
import org.apache.solr.common.cloud.ZkNodeProps;
|
||||
|
@ -232,11 +233,13 @@ public class ZkControllerTest extends SolrTestCaseJ4 {
|
|||
}
|
||||
});
|
||||
|
||||
zkController.uploadToZK(new File(ExternalPaths.EXAMPLE_HOME + "/collection1/conf"),
|
||||
String solrHome = SolrJettyTestBase.legacyExampleCollection1SolrHome();
|
||||
|
||||
zkController.uploadToZK(new File(solrHome + "/collection1/conf"),
|
||||
ZkController.CONFIGS_ZKNODE + "/config1");
|
||||
|
||||
// uploading again should overwrite, not error...
|
||||
zkController.uploadToZK(new File(ExternalPaths.EXAMPLE_HOME + "/collection1/conf"),
|
||||
zkController.uploadToZK(new File(solrHome + "/collection1/conf"),
|
||||
ZkController.CONFIGS_ZKNODE + "/config1");
|
||||
|
||||
if (DEBUG) {
|
||||
|
|
|
@ -43,7 +43,7 @@ public class ShowFileRequestHandlerTest extends SolrJettyTestBase {
|
|||
|
||||
@BeforeClass
|
||||
public static void beforeTest() throws Exception {
|
||||
createJetty(ExternalPaths.EXAMPLE_HOME, null, null);
|
||||
createJetty(legacyExampleCollection1SolrHome(), null, null);
|
||||
}
|
||||
|
||||
public void test404ViaHttp() throws SolrServerException {
|
||||
|
|
|
@ -16,14 +16,25 @@
|
|||
Solr example
|
||||
------------
|
||||
|
||||
This directory contains an instance of the Jetty Servlet container setup to
|
||||
run Solr using an example configuration.
|
||||
This directory contains Solr examples. Each example is contained in a
|
||||
separate directory. To run a specific example, do:
|
||||
|
||||
To run this example:
|
||||
bin/solr -e <EXAMPLE> where <EXAMPLE> is one of:
|
||||
|
||||
java -jar start.jar
|
||||
cloud : SolrCloud example
|
||||
dih : Data Import Handler (rdbms, mail, rss, tika)
|
||||
schemaless : Schema-less example (schema is inferred from data during indexing)
|
||||
techproducts : Kitchen sink example providing comprehensive examples of Solr features
|
||||
|
||||
in this example directory, and when Solr is started connect to
|
||||
For instance, if you want to run the Solr Data Import Handler example, do:
|
||||
|
||||
bin/solr -e dih
|
||||
|
||||
To see all the options available when starting Solr:
|
||||
|
||||
bin/solr start -help
|
||||
|
||||
After starting a Solr example, direct your Web browser to:
|
||||
|
||||
http://localhost:8983/solr/
|
||||
|
||||
|
@ -46,16 +57,6 @@ For more information about this example please read...
|
|||
Notes About These Examples
|
||||
--------------------------
|
||||
|
||||
* SolrHome *
|
||||
|
||||
By default, start.jar starts Solr in Jetty using the default Solr Home
|
||||
directory of "./solr/" (relative to the working directory of hte servlet
|
||||
container). To run other example configurations, you can specify the
|
||||
solr.solr.home system property when starting jetty...
|
||||
|
||||
java -Dsolr.solr.home=multicore -jar start.jar
|
||||
java -Dsolr.solr.home=example-DIH/solr -jar start.jar
|
||||
|
||||
* References to Jar Files Outside This Directory *
|
||||
|
||||
Various example SolrHome dirs contained in this directory may use "<lib>"
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
name=collection1
|
|
@ -1,50 +0,0 @@
|
|||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
Example SolrCore Instance Directory
|
||||
=============================
|
||||
|
||||
This directory is provided as an example of what an "Instance Directory"
|
||||
should look like for a SolrCore
|
||||
|
||||
It's not strictly necessary that you copy all of the files in this
|
||||
directory when setting up a new SolrCores, but it is recommended.
|
||||
|
||||
|
||||
Basic Directory Structure
|
||||
-------------------------
|
||||
|
||||
The Solr Home directory typically contains the following sub-directories...
|
||||
|
||||
conf/
|
||||
This directory is mandatory and must contain your solrconfig.xml
|
||||
and schema.xml. Any other optional configuration files would also
|
||||
be kept here.
|
||||
|
||||
data/
|
||||
This directory is the default location where Solr will keep your
|
||||
index, and is used by the replication scripts for dealing with
|
||||
snapshots. You can override this location in the
|
||||
conf/solrconfig.xml. Solr will create this directory if it does not
|
||||
already exist.
|
||||
|
||||
lib/
|
||||
This directory is optional. If it exists, Solr will load any Jars
|
||||
found in this directory and use them to resolve any "plugins"
|
||||
specified in your solrconfig.xml or schema.xml (ie: Analyzers,
|
||||
Request Handlers, etc...). Alternatively you can use the <lib>
|
||||
syntax in conf/solrconfig.xml to direct Solr to your plugins. See
|
||||
the example conf/solrconfig.xml file for details.
|
|
@ -1 +0,0 @@
|
|||
name=collection1
|
|
@ -1,50 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8" ?>
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
|
||||
<!--
|
||||
This is an example of a simple "solr.xml" file for configuring one or
|
||||
more Solr Cores, as well as allowing Cores to be added, removed, and
|
||||
reloaded via HTTP requests.
|
||||
|
||||
More information about options available in this configuration file,
|
||||
and Solr Core administration can be found online:
|
||||
http://wiki.apache.org/solr/CoreAdmin
|
||||
-->
|
||||
|
||||
<solr>
|
||||
|
||||
<solrcloud>
|
||||
<str name="host">${host:}</str>
|
||||
<int name="hostPort">${jetty.port:8983}</int>
|
||||
<str name="hostContext">${hostContext:solr}</str>
|
||||
<int name="zkClientTimeout">${zkClientTimeout:30000}</int>
|
||||
<bool name="genericCoreNodeNames">${genericCoreNodeNames:true}</bool>
|
||||
|
||||
<!-- ZooKeeper Security -->
|
||||
<str name="zkACLProvider">${zkACLProvider:}</str>
|
||||
<str name="zkCredentialProvider">${zkCredentialProvider:}</str>
|
||||
|
||||
</solrcloud>
|
||||
|
||||
<shardHandlerFactory name="shardHandlerFactory"
|
||||
class="HttpShardHandlerFactory">
|
||||
<int name="socketTimeout">${socketTimeout:0}</int>
|
||||
<int name="connTimeout">${connTimeout:0}</int>
|
||||
</shardHandlerFactory>
|
||||
|
||||
</solr>
|
|
@ -0,0 +1,113 @@
|
|||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
Solr server
|
||||
------------
|
||||
|
||||
This directory contains an instance of the Jetty Servlet container setup to
|
||||
run Solr.
|
||||
|
||||
To run Solr:
|
||||
|
||||
cd $SOLR_INSTALL
|
||||
bin/solr start
|
||||
|
||||
where $SOLR_INSTALL is the location where you extracted the Solr installation bundle.
|
||||
|
||||
Server directory layout
|
||||
-----------------------
|
||||
|
||||
server/contexts
|
||||
|
||||
This directory contains the Jetty Web application deployment descriptor for the Solr Web app.
|
||||
|
||||
server/etc
|
||||
|
||||
Jetty configuration and example SSL keystore
|
||||
|
||||
server/lib
|
||||
|
||||
Jetty and other 3rd party libraries
|
||||
|
||||
server/logs
|
||||
|
||||
Solr log files
|
||||
|
||||
server/resources
|
||||
|
||||
Contains configuration files, such as the Log4j configuration (log4j.properties) for configuring Solr loggers.
|
||||
|
||||
server/scripts/cloud-scripts
|
||||
|
||||
Command-line utility for working with ZooKeeper when running in SolrCloud mode, see zkcli.sh / .cmd for
|
||||
usage information.
|
||||
|
||||
server/solr
|
||||
|
||||
Default solr.solr.home directory where Solr will create core directories; must contain solr.xml
|
||||
|
||||
server/solr/configsets
|
||||
|
||||
Directories containing different configuration options for running Solr.
|
||||
|
||||
basic_configs : Bare minimum configuration settings needed to run Solr.
|
||||
|
||||
data_driven_schema_configs : Field-guessing and managed schema mode; use this configuration if you want
|
||||
to start indexing data in Solr without having to design a schema upfront.
|
||||
You can use the REST API to manage your schema as you refine your index
|
||||
requirements.
|
||||
|
||||
sample_techproducts_configs : Comprehensive example configuration that demonstrates many of the powerful
|
||||
features of Solr, based on the use case of building a search solution for
|
||||
tech products.
|
||||
|
||||
server/solr-webapp
|
||||
|
||||
Jetty will extract the solr.war into this directory at runtime.
|
||||
|
||||
server/webapps
|
||||
|
||||
Contains the solr.war file.
|
||||
|
||||
|
||||
Notes About Solr Examples
|
||||
--------------------------
|
||||
|
||||
* SolrHome *
|
||||
|
||||
By default, start.jar starts Solr in Jetty using the default Solr Home
|
||||
directory of "./solr/" (relative to the working directory of the servlet
|
||||
container).
|
||||
|
||||
* References to Jar Files Outside This Directory *
|
||||
|
||||
Various example SolrHome dirs contained in this directory may use "<lib>"
|
||||
statements in the solrconfig.xml file to reference plugin jars outside of
|
||||
this directory for loading "contrib" plugins via relative paths.
|
||||
|
||||
If you make a copy of this example server and wish to use the
|
||||
ExtractingRequestHandler (SolrCell), DataImportHandler (DIH), UIMA, the
|
||||
clustering component, or any other modules in "contrib", you will need to
|
||||
copy the required jars or update the paths to those jars in your
|
||||
solrconfig.xml.
|
||||
|
||||
* Logging *
|
||||
|
||||
By default, Jetty & Solr will log to the console a logs/solr.log. This can be convenient when
|
||||
first getting started, but eventually you will want to log just to a file. To
|
||||
configure logging, edit the log4j.properties file in "resources".
|
||||
|
||||
It is also possible to setup log4j or other popular logging frameworks.
|
||||
|
|
@ -15,8 +15,8 @@
|
|||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
<project name="solr-example" default="resolve" xmlns:ivy="antlib:org.apache.ivy.ant">
|
||||
<description>Solr Example</description>
|
||||
<project name="solr-server" default="resolve" xmlns:ivy="antlib:org.apache.ivy.ant">
|
||||
<description>Solr Server</description>
|
||||
|
||||
<import file="../common-build.xml"/>
|
||||
|
|
@ -14,14 +14,11 @@
|
|||
# limitations under the License.
|
||||
|
||||
|
||||
Example Solr Home Directory
|
||||
Default Solr Home Directory
|
||||
=============================
|
||||
|
||||
This directory is provided as an example of what a "Solr Home" directory
|
||||
should look like.
|
||||
|
||||
It's not strictly necessary that you copy all of the files in this
|
||||
directory when setting up a new instance of Solr, but it is recommended.
|
||||
This directory is the default Solr home directory which holds
|
||||
configuration files and Solr indexes (called cores).
|
||||
|
||||
|
||||
Basic Directory Structure
|
||||
|
@ -31,22 +28,39 @@ The Solr Home directory typically contains the following...
|
|||
|
||||
* solr.xml *
|
||||
|
||||
This is the primary configuration file Solr looks for when starting.
|
||||
This file specifies the list of "SolrCores" it should load, and high
|
||||
level configuration options that should be used for all SolrCores.
|
||||
This is the primary configuration file Solr looks for when starting;
|
||||
it specifies high-level configuration options that apply to all
|
||||
of your Solr cores, such as cluster-wide SolrCloud settings like
|
||||
the ZooKeeper client timeout.
|
||||
|
||||
Please see the comments in ./solr.xml for more details.
|
||||
In addition, you can also declare Solr cores in this file, however
|
||||
it is recommended to just use automatic core discovery instead of
|
||||
listing cores in solr.xml.
|
||||
|
||||
If no solr.xml file is found, then Solr assumes that there should be
|
||||
a single SolrCore named "collection1" and that the "Instance Directory"
|
||||
for collection1 should be the same as the Solr Home Directory.
|
||||
|
||||
For more information about solr.xml, please see:
|
||||
https://cwiki.apache.org/confluence/display/solr/Solr+Cores+and+solr.xml
|
||||
|
||||
* Individual SolrCore Instance Directories *
|
||||
|
||||
Although solr.xml can be configured to look for SolrCore Instance Directories
|
||||
in any path, simple sub-directories of the Solr Home Dir using relative paths
|
||||
are common for many installations. In this directory you can see the
|
||||
"./collection1" Instance Directory.
|
||||
are common for many installations.
|
||||
|
||||
* Core Discovery *
|
||||
|
||||
During startup, Solr will scan sub-directories of Solr home looking for
|
||||
a specific file named core.properties. If core.properties is found in a
|
||||
sub-directory (at any depth), Solr will initialize a core using the properties
|
||||
defined in core.properties. For an example of core.properties, please see:
|
||||
|
||||
example/solr/collection1/core.properties
|
||||
|
||||
For more information about core discovery, please see:
|
||||
https://cwiki.apache.org/confluence/display/solr/Moving+to+the+New+solr.xml+Format
|
||||
|
||||
* A Shared 'lib' Directory *
|
||||
|
|
@ -0,0 +1,529 @@
|
|||
<?xml version="1.0" encoding="UTF-8" ?>
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
|
||||
<!--
|
||||
This is the Solr schema file. This file should be named "schema.xml" and
|
||||
should be in the conf directory under the solr home
|
||||
(i.e. ./solr/conf/schema.xml by default)
|
||||
or located where the classloader for the Solr webapp can find it.
|
||||
|
||||
This example schema is the recommended starting point for users.
|
||||
It should be kept correct and concise, usable out-of-the-box.
|
||||
|
||||
For more information, on how to customize this file, please see
|
||||
http://wiki.apache.org/solr/SchemaXml
|
||||
-->
|
||||
|
||||
<schema name="example" version="1.5">
|
||||
<!-- attribute "name" is the name of this schema and is only used for display purposes.
|
||||
version="x.y" is Solr's version number for the schema syntax and
|
||||
semantics. It should not normally be changed by applications.
|
||||
|
||||
1.0: multiValued attribute did not exist, all fields are multiValued
|
||||
by nature
|
||||
1.1: multiValued attribute introduced, false by default
|
||||
1.2: omitTermFreqAndPositions attribute introduced, true by default
|
||||
except for text fields.
|
||||
1.3: removed optional field compress feature
|
||||
1.4: autoGeneratePhraseQueries attribute introduced to drive QueryParser
|
||||
behavior when a single string produces multiple tokens. Defaults
|
||||
to off for version >= 1.4
|
||||
1.5: omitNorms defaults to true for primitive field types
|
||||
(int, float, boolean, string...)
|
||||
-->
|
||||
|
||||
|
||||
<!-- Valid attributes for fields:
|
||||
name: mandatory - the name for the field
|
||||
type: mandatory - the name of a field type from the
|
||||
<types> fieldType section
|
||||
indexed: true if this field should be indexed (searchable or sortable)
|
||||
stored: true if this field should be retrievable
|
||||
docValues: true if this field should have doc values. Doc values are
|
||||
useful for faceting, grouping, sorting and function queries. Although not
|
||||
required, doc values will make the index faster to load, more
|
||||
NRT-friendly and more memory-efficient. They however come with some
|
||||
limitations: they are currently only supported by StrField, UUIDField
|
||||
and all Trie*Fields, and depending on the field type, they might
|
||||
require the field to be single-valued, be required or have a default
|
||||
value (check the documentation of the field type you're interested in
|
||||
for more information)
|
||||
multiValued: true if this field may contain multiple values per document
|
||||
omitNorms: (expert) set to true to omit the norms associated with
|
||||
this field (this disables length normalization and index-time
|
||||
boosting for the field, and saves some memory). Only full-text
|
||||
fields or fields that need an index-time boost need norms.
|
||||
Norms are omitted for primitive (non-analyzed) types by default.
|
||||
termVectors: [false] set to true to store the term vector for a
|
||||
given field.
|
||||
When using MoreLikeThis, fields used for similarity should be
|
||||
stored for best performance.
|
||||
termPositions: Store position information with the term vector.
|
||||
This will increase storage costs.
|
||||
termOffsets: Store offset information with the term vector. This
|
||||
will increase storage costs.
|
||||
required: The field is required. It will throw an error if the
|
||||
value does not exist
|
||||
default: a value that should be used if no value is specified
|
||||
when adding a document.
|
||||
-->
|
||||
|
||||
<!-- field names should consist of alphanumeric or underscore characters only and
|
||||
not start with a digit. This is not currently strictly enforced,
|
||||
but other field names will not have first class support from all components
|
||||
and back compatibility is not guaranteed. Names with both leading and
|
||||
trailing underscores (e.g. _version_) are reserved.
|
||||
-->
|
||||
|
||||
<!-- If you remove this field, you must _also_ disable the update log in solrconfig.xml
|
||||
or Solr won't start. _version_ and update log are required for SolrCloud
|
||||
-->
|
||||
<field name="_version_" type="long" indexed="true" stored="true"/>
|
||||
|
||||
<!-- points to the root document of a block of nested documents. Required for nested
|
||||
document support, may be removed otherwise
|
||||
-->
|
||||
<field name="_root_" type="string" indexed="true" stored="false"/>
|
||||
|
||||
<!-- Only remove the "id" field if you have a very good reason to. While not strictly
|
||||
required, it is highly recommended. A <uniqueKey> is present in almost all Solr
|
||||
installations. See the <uniqueKey> declaration below where <uniqueKey> is set to "id".
|
||||
-->
|
||||
<field name="id" type="string" indexed="true" stored="true" required="true" multiValued="false" />
|
||||
|
||||
<!-- Dynamic field definitions allow using convention over configuration
|
||||
for fields via the specification of patterns to match field names.
|
||||
EXAMPLE: name="*_i" will match any field ending in _i (like myid_i, z_i)
|
||||
RESTRICTION: the glob-like pattern in the name attribute must have
|
||||
a "*" only at the start or the end. -->
|
||||
|
||||
<dynamicField name="*_i" type="int" indexed="true" stored="true"/>
|
||||
<dynamicField name="*_is" type="int" indexed="true" stored="true" multiValued="true"/>
|
||||
<dynamicField name="*_s" type="string" indexed="true" stored="true" />
|
||||
<dynamicField name="*_ss" type="string" indexed="true" stored="true" multiValued="true"/>
|
||||
<dynamicField name="*_l" type="long" indexed="true" stored="true"/>
|
||||
<dynamicField name="*_ls" type="long" indexed="true" stored="true" multiValued="true"/>
|
||||
<dynamicField name="*_t" type="text_general" indexed="true" stored="true"/>
|
||||
<dynamicField name="*_txt" type="text_general" indexed="true" stored="true" multiValued="true"/>
|
||||
<dynamicField name="*_en" type="text_en" indexed="true" stored="true" multiValued="true"/>
|
||||
<dynamicField name="*_b" type="boolean" indexed="true" stored="true"/>
|
||||
<dynamicField name="*_bs" type="boolean" indexed="true" stored="true" multiValued="true"/>
|
||||
<dynamicField name="*_f" type="float" indexed="true" stored="true"/>
|
||||
<dynamicField name="*_fs" type="float" indexed="true" stored="true" multiValued="true"/>
|
||||
<dynamicField name="*_d" type="double" indexed="true" stored="true"/>
|
||||
<dynamicField name="*_ds" type="double" indexed="true" stored="true" multiValued="true"/>
|
||||
|
||||
<!-- Type used to index the lat and lon components for the "location" FieldType -->
|
||||
<dynamicField name="*_coordinate" type="tdouble" indexed="true" stored="false" />
|
||||
|
||||
<dynamicField name="*_dt" type="date" indexed="true" stored="true"/>
|
||||
<dynamicField name="*_dts" type="date" indexed="true" stored="true" multiValued="true"/>
|
||||
<dynamicField name="*_p" type="location" indexed="true" stored="true"/>
|
||||
|
||||
<!-- some trie-coded dynamic fields for faster range queries -->
|
||||
<dynamicField name="*_ti" type="tint" indexed="true" stored="true"/>
|
||||
<dynamicField name="*_tl" type="tlong" indexed="true" stored="true"/>
|
||||
<dynamicField name="*_tf" type="tfloat" indexed="true" stored="true"/>
|
||||
<dynamicField name="*_td" type="tdouble" indexed="true" stored="true"/>
|
||||
<dynamicField name="*_tdt" type="tdate" indexed="true" stored="true"/>
|
||||
|
||||
<dynamicField name="*_c" type="currency" indexed="true" stored="true"/>
|
||||
|
||||
<dynamicField name="ignored_*" type="ignored" multiValued="true"/>
|
||||
<dynamicField name="attr_*" type="text_general" indexed="true" stored="true" multiValued="true"/>
|
||||
|
||||
<dynamicField name="random_*" type="random" />
|
||||
|
||||
<!-- uncomment the following to ignore any fields that don't already match an existing
|
||||
field name or dynamic field, rather than reporting them as an error.
|
||||
alternately, change the type="ignored" to some other type e.g. "text" if you want
|
||||
unknown fields indexed and/or stored by default -->
|
||||
<!--dynamicField name="*" type="ignored" multiValued="true" /-->
|
||||
|
||||
<!-- Field to use to determine and enforce document uniqueness.
|
||||
Unless this field is marked with required="false", it will be a required field
|
||||
-->
|
||||
<uniqueKey>id</uniqueKey>
|
||||
|
||||
<!-- copyField commands copy one field to another at the time a document
|
||||
is added to the index. It's used either to index the same field differently,
|
||||
or to add multiple fields to the same field for easier/faster searching. -->
|
||||
|
||||
<!--
|
||||
<copyField source="title" dest="text"/>
|
||||
<copyField source="body" dest="text"/>
|
||||
-->
|
||||
|
||||
<!-- field type definitions. The "name" attribute is
|
||||
just a label to be used by field definitions. The "class"
|
||||
attribute and any other attributes determine the real
|
||||
behavior of the fieldType.
|
||||
Class names starting with "solr" refer to java classes in a
|
||||
standard package such as org.apache.solr.analysis
|
||||
-->
|
||||
|
||||
<!-- The StrField type is not analyzed, but indexed/stored verbatim.
|
||||
It supports doc values but in that case the field needs to be
|
||||
single-valued and either required or have a default value.
|
||||
-->
|
||||
<fieldType name="string" class="solr.StrField" sortMissingLast="true" />
|
||||
|
||||
<!-- boolean type: "true" or "false" -->
|
||||
<fieldType name="boolean" class="solr.BoolField" sortMissingLast="true"/>
|
||||
|
||||
<!-- sortMissingLast and sortMissingFirst attributes are optional attributes are
|
||||
currently supported on types that are sorted internally as strings
|
||||
and on numeric types.
|
||||
This includes "string","boolean", and, as of 3.5 (and 4.x),
|
||||
int, float, long, date, double, including the "Trie" variants.
|
||||
- If sortMissingLast="true", then a sort on this field will cause documents
|
||||
without the field to come after documents with the field,
|
||||
regardless of the requested sort order (asc or desc).
|
||||
- If sortMissingFirst="true", then a sort on this field will cause documents
|
||||
without the field to come before documents with the field,
|
||||
regardless of the requested sort order.
|
||||
- If sortMissingLast="false" and sortMissingFirst="false" (the default),
|
||||
then default lucene sorting will be used which places docs without the
|
||||
field first in an ascending sort and last in a descending sort.
|
||||
-->
|
||||
|
||||
<!--
|
||||
Default numeric field types. For faster range queries, consider the tint/tfloat/tlong/tdouble types.
|
||||
|
||||
These fields support doc values, but they require the field to be
|
||||
single-valued and either be required or have a default value.
|
||||
-->
|
||||
<fieldType name="int" class="solr.TrieIntField" precisionStep="0" positionIncrementGap="0"/>
|
||||
<fieldType name="float" class="solr.TrieFloatField" precisionStep="0" positionIncrementGap="0"/>
|
||||
<fieldType name="long" class="solr.TrieLongField" precisionStep="0" positionIncrementGap="0"/>
|
||||
<fieldType name="double" class="solr.TrieDoubleField" precisionStep="0" positionIncrementGap="0"/>
|
||||
|
||||
<!--
|
||||
Numeric field types that index each value at various levels of precision
|
||||
to accelerate range queries when the number of values between the range
|
||||
endpoints is large. See the javadoc for NumericRangeQuery for internal
|
||||
implementation details.
|
||||
|
||||
Smaller precisionStep values (specified in bits) will lead to more tokens
|
||||
indexed per value, slightly larger index size, and faster range queries.
|
||||
A precisionStep of 0 disables indexing at different precision levels.
|
||||
-->
|
||||
<fieldType name="tint" class="solr.TrieIntField" precisionStep="8" positionIncrementGap="0"/>
|
||||
<fieldType name="tfloat" class="solr.TrieFloatField" precisionStep="8" positionIncrementGap="0"/>
|
||||
<fieldType name="tlong" class="solr.TrieLongField" precisionStep="8" positionIncrementGap="0"/>
|
||||
<fieldType name="tdouble" class="solr.TrieDoubleField" precisionStep="8" positionIncrementGap="0"/>
|
||||
|
||||
<!-- The format for this date field is of the form 1995-12-31T23:59:59Z, and
|
||||
is a more restricted form of the canonical representation of dateTime
|
||||
http://www.w3.org/TR/xmlschema-2/#dateTime
|
||||
The trailing "Z" designates UTC time and is mandatory.
|
||||
Optional fractional seconds are allowed: 1995-12-31T23:59:59.999Z
|
||||
All other components are mandatory.
|
||||
|
||||
Expressions can also be used to denote calculations that should be
|
||||
performed relative to "NOW" to determine the value, ie...
|
||||
|
||||
NOW/HOUR
|
||||
... Round to the start of the current hour
|
||||
NOW-1DAY
|
||||
... Exactly 1 day prior to now
|
||||
NOW/DAY+6MONTHS+3DAYS
|
||||
... 6 months and 3 days in the future from the start of
|
||||
the current day
|
||||
|
||||
Consult the TrieDateField javadocs for more information.
|
||||
|
||||
Note: For faster range queries, consider the tdate type
|
||||
-->
|
||||
<fieldType name="date" class="solr.TrieDateField" precisionStep="0" positionIncrementGap="0"/>
|
||||
|
||||
<!-- A Trie based date field for faster date range queries and date faceting. -->
|
||||
<fieldType name="tdate" class="solr.TrieDateField" precisionStep="6" positionIncrementGap="0"/>
|
||||
|
||||
|
||||
<!--Binary data type. The data should be sent/retrieved in as Base64 encoded Strings -->
|
||||
<fieldtype name="binary" class="solr.BinaryField"/>
|
||||
|
||||
<!-- The "RandomSortField" is not used to store or search any
|
||||
data. You can declare fields of this type it in your schema
|
||||
to generate pseudo-random orderings of your docs for sorting
|
||||
or function purposes. The ordering is generated based on the field
|
||||
name and the version of the index. As long as the index version
|
||||
remains unchanged, and the same field name is reused,
|
||||
the ordering of the docs will be consistent.
|
||||
If you want different psuedo-random orderings of documents,
|
||||
for the same version of the index, use a dynamicField and
|
||||
change the field name in the request.
|
||||
-->
|
||||
<fieldType name="random" class="solr.RandomSortField" indexed="true" />
|
||||
|
||||
<!-- solr.TextField allows the specification of custom text analyzers
|
||||
specified as a tokenizer and a list of token filters. Different
|
||||
analyzers may be specified for indexing and querying.
|
||||
|
||||
The optional positionIncrementGap puts space between multiple fields of
|
||||
this type on the same document, with the purpose of preventing false phrase
|
||||
matching across fields.
|
||||
|
||||
For more info on customizing your analyzer chain, please see
|
||||
http://wiki.apache.org/solr/AnalyzersTokenizersTokenFilters
|
||||
-->
|
||||
|
||||
<!-- One can also specify an existing Analyzer class that has a
|
||||
default constructor via the class attribute on the analyzer element.
|
||||
Example:
|
||||
<fieldType name="text_greek" class="solr.TextField">
|
||||
<analyzer class="org.apache.lucene.analysis.el.GreekAnalyzer"/>
|
||||
</fieldType>
|
||||
-->
|
||||
|
||||
<!-- A text field that only splits on whitespace for exact matching of words -->
|
||||
<fieldType name="text_ws" class="solr.TextField" positionIncrementGap="100">
|
||||
<analyzer>
|
||||
<tokenizer class="solr.WhitespaceTokenizerFactory"/>
|
||||
</analyzer>
|
||||
</fieldType>
|
||||
|
||||
<!-- A general text field that has reasonable, generic
|
||||
cross-language defaults: it tokenizes with StandardTokenizer,
|
||||
removes stop words from case-insensitive "stopwords.txt"
|
||||
(empty by default), and down cases. At query time only, it
|
||||
also applies synonyms. -->
|
||||
<fieldType name="text_general" class="solr.TextField" positionIncrementGap="100">
|
||||
<analyzer type="index">
|
||||
<tokenizer class="solr.StandardTokenizerFactory"/>
|
||||
<filter class="solr.StopFilterFactory" ignoreCase="true" words="stopwords.txt" />
|
||||
<!-- in this example, we will only use synonyms at query time
|
||||
<filter class="solr.SynonymFilterFactory" synonyms="index_synonyms.txt" ignoreCase="true" expand="false"/>
|
||||
-->
|
||||
<filter class="solr.LowerCaseFilterFactory"/>
|
||||
</analyzer>
|
||||
<analyzer type="query">
|
||||
<tokenizer class="solr.StandardTokenizerFactory"/>
|
||||
<filter class="solr.StopFilterFactory" ignoreCase="true" words="stopwords.txt" />
|
||||
<filter class="solr.SynonymFilterFactory" synonyms="synonyms.txt" ignoreCase="true" expand="true"/>
|
||||
<filter class="solr.LowerCaseFilterFactory"/>
|
||||
</analyzer>
|
||||
</fieldType>
|
||||
|
||||
<!-- A text field with defaults appropriate for English: it
|
||||
tokenizes with StandardTokenizer, removes English stop words
|
||||
(lang/stopwords_en.txt), down cases, protects words from protwords.txt, and
|
||||
finally applies Porter's stemming. The query time analyzer
|
||||
also applies synonyms from synonyms.txt. -->
|
||||
<fieldType name="text_en" class="solr.TextField" positionIncrementGap="100">
|
||||
<analyzer type="index">
|
||||
<tokenizer class="solr.StandardTokenizerFactory"/>
|
||||
<!-- in this example, we will only use synonyms at query time
|
||||
<filter class="solr.SynonymFilterFactory" synonyms="index_synonyms.txt" ignoreCase="true" expand="false"/>
|
||||
-->
|
||||
<!-- Case insensitive stop word removal.
|
||||
-->
|
||||
<filter class="solr.StopFilterFactory"
|
||||
ignoreCase="true"
|
||||
words="lang/stopwords_en.txt"
|
||||
/>
|
||||
<filter class="solr.LowerCaseFilterFactory"/>
|
||||
<filter class="solr.EnglishPossessiveFilterFactory"/>
|
||||
<filter class="solr.KeywordMarkerFilterFactory" protected="protwords.txt"/>
|
||||
<!-- Optionally you may want to use this less aggressive stemmer instead of PorterStemFilterFactory:
|
||||
<filter class="solr.EnglishMinimalStemFilterFactory"/>
|
||||
-->
|
||||
<filter class="solr.PorterStemFilterFactory"/>
|
||||
</analyzer>
|
||||
<analyzer type="query">
|
||||
<tokenizer class="solr.StandardTokenizerFactory"/>
|
||||
<filter class="solr.SynonymFilterFactory" synonyms="synonyms.txt" ignoreCase="true" expand="true"/>
|
||||
<filter class="solr.StopFilterFactory"
|
||||
ignoreCase="true"
|
||||
words="lang/stopwords_en.txt"
|
||||
/>
|
||||
<filter class="solr.LowerCaseFilterFactory"/>
|
||||
<filter class="solr.EnglishPossessiveFilterFactory"/>
|
||||
<filter class="solr.KeywordMarkerFilterFactory" protected="protwords.txt"/>
|
||||
<!-- Optionally you may want to use this less aggressive stemmer instead of PorterStemFilterFactory:
|
||||
<filter class="solr.EnglishMinimalStemFilterFactory"/>
|
||||
-->
|
||||
<filter class="solr.PorterStemFilterFactory"/>
|
||||
</analyzer>
|
||||
</fieldType>
|
||||
|
||||
<!-- A text field with defaults appropriate for English, plus
|
||||
aggressive word-splitting and autophrase features enabled.
|
||||
This field is just like text_en, except it adds
|
||||
WordDelimiterFilter to enable splitting and matching of
|
||||
words on case-change, alpha numeric boundaries, and
|
||||
non-alphanumeric chars. This means certain compound word
|
||||
cases will work, for example query "wi fi" will match
|
||||
document "WiFi" or "wi-fi".
|
||||
-->
|
||||
<fieldType name="text_en_splitting" class="solr.TextField" positionIncrementGap="100" autoGeneratePhraseQueries="true">
|
||||
<analyzer type="index">
|
||||
<tokenizer class="solr.WhitespaceTokenizerFactory"/>
|
||||
<!-- in this example, we will only use synonyms at query time
|
||||
<filter class="solr.SynonymFilterFactory" synonyms="index_synonyms.txt" ignoreCase="true" expand="false"/>
|
||||
-->
|
||||
<!-- Case insensitive stop word removal.
|
||||
-->
|
||||
<filter class="solr.StopFilterFactory"
|
||||
ignoreCase="true"
|
||||
words="lang/stopwords_en.txt"
|
||||
/>
|
||||
<filter class="solr.WordDelimiterFilterFactory" generateWordParts="1" generateNumberParts="1" catenateWords="1" catenateNumbers="1" catenateAll="0" splitOnCaseChange="1"/>
|
||||
<filter class="solr.LowerCaseFilterFactory"/>
|
||||
<filter class="solr.KeywordMarkerFilterFactory" protected="protwords.txt"/>
|
||||
<filter class="solr.PorterStemFilterFactory"/>
|
||||
</analyzer>
|
||||
<analyzer type="query">
|
||||
<tokenizer class="solr.WhitespaceTokenizerFactory"/>
|
||||
<filter class="solr.SynonymFilterFactory" synonyms="synonyms.txt" ignoreCase="true" expand="true"/>
|
||||
<filter class="solr.StopFilterFactory"
|
||||
ignoreCase="true"
|
||||
words="lang/stopwords_en.txt"
|
||||
/>
|
||||
<filter class="solr.WordDelimiterFilterFactory" generateWordParts="1" generateNumberParts="1" catenateWords="0" catenateNumbers="0" catenateAll="0" splitOnCaseChange="1"/>
|
||||
<filter class="solr.LowerCaseFilterFactory"/>
|
||||
<filter class="solr.KeywordMarkerFilterFactory" protected="protwords.txt"/>
|
||||
<filter class="solr.PorterStemFilterFactory"/>
|
||||
</analyzer>
|
||||
</fieldType>
|
||||
|
||||
<!-- Less flexible matching, but less false matches. Probably not ideal for product names,
|
||||
but may be good for SKUs. Can insert dashes in the wrong place and still match. -->
|
||||
<fieldType name="text_en_splitting_tight" class="solr.TextField" positionIncrementGap="100" autoGeneratePhraseQueries="true">
|
||||
<analyzer>
|
||||
<tokenizer class="solr.WhitespaceTokenizerFactory"/>
|
||||
<filter class="solr.SynonymFilterFactory" synonyms="synonyms.txt" ignoreCase="true" expand="false"/>
|
||||
<filter class="solr.StopFilterFactory" ignoreCase="true" words="lang/stopwords_en.txt"/>
|
||||
<filter class="solr.WordDelimiterFilterFactory" generateWordParts="0" generateNumberParts="0" catenateWords="1" catenateNumbers="1" catenateAll="0"/>
|
||||
<filter class="solr.LowerCaseFilterFactory"/>
|
||||
<filter class="solr.KeywordMarkerFilterFactory" protected="protwords.txt"/>
|
||||
<filter class="solr.EnglishMinimalStemFilterFactory"/>
|
||||
<!-- this filter can remove any duplicate tokens that appear at the same position - sometimes
|
||||
possible with WordDelimiterFilter in conjuncton with stemming. -->
|
||||
<filter class="solr.RemoveDuplicatesTokenFilterFactory"/>
|
||||
</analyzer>
|
||||
</fieldType>
|
||||
|
||||
<!-- Just like text_general except it reverses the characters of
|
||||
each token, to enable more efficient leading wildcard queries. -->
|
||||
<fieldType name="text_general_rev" class="solr.TextField" positionIncrementGap="100">
|
||||
<analyzer type="index">
|
||||
<tokenizer class="solr.StandardTokenizerFactory"/>
|
||||
<filter class="solr.StopFilterFactory" ignoreCase="true" words="stopwords.txt" />
|
||||
<filter class="solr.LowerCaseFilterFactory"/>
|
||||
<filter class="solr.ReversedWildcardFilterFactory" withOriginal="true"
|
||||
maxPosAsterisk="3" maxPosQuestion="2" maxFractionAsterisk="0.33"/>
|
||||
</analyzer>
|
||||
<analyzer type="query">
|
||||
<tokenizer class="solr.StandardTokenizerFactory"/>
|
||||
<filter class="solr.SynonymFilterFactory" synonyms="synonyms.txt" ignoreCase="true" expand="true"/>
|
||||
<filter class="solr.StopFilterFactory" ignoreCase="true" words="stopwords.txt" />
|
||||
<filter class="solr.LowerCaseFilterFactory"/>
|
||||
</analyzer>
|
||||
</fieldType>
|
||||
|
||||
<!-- This is an example of using the KeywordTokenizer along
|
||||
With various TokenFilterFactories to produce a sortable field
|
||||
that does not include some properties of the source text
|
||||
-->
|
||||
<fieldType name="alphaOnlySort" class="solr.TextField" sortMissingLast="true" omitNorms="true">
|
||||
<analyzer>
|
||||
<!-- KeywordTokenizer does no actual tokenizing, so the entire
|
||||
input string is preserved as a single token
|
||||
-->
|
||||
<tokenizer class="solr.KeywordTokenizerFactory"/>
|
||||
<!-- The LowerCase TokenFilter does what you expect, which can be
|
||||
when you want your sorting to be case insensitive
|
||||
-->
|
||||
<filter class="solr.LowerCaseFilterFactory" />
|
||||
<!-- The TrimFilter removes any leading or trailing whitespace -->
|
||||
<filter class="solr.TrimFilterFactory" />
|
||||
<!-- The PatternReplaceFilter gives you the flexibility to use
|
||||
Java Regular expression to replace any sequence of characters
|
||||
matching a pattern with an arbitrary replacement string,
|
||||
which may include back references to portions of the original
|
||||
string matched by the pattern.
|
||||
|
||||
See the Java Regular Expression documentation for more
|
||||
information on pattern and replacement string syntax.
|
||||
|
||||
http://docs.oracle.com/javase/7/docs/api/java/util/regex/package-summary.html
|
||||
-->
|
||||
<filter class="solr.PatternReplaceFilterFactory"
|
||||
pattern="([^a-z])" replacement="" replace="all"
|
||||
/>
|
||||
</analyzer>
|
||||
</fieldType>
|
||||
|
||||
<!-- lowercases the entire field value, keeping it as a single token. -->
|
||||
<fieldType name="lowercase" class="solr.TextField" positionIncrementGap="100">
|
||||
<analyzer>
|
||||
<tokenizer class="solr.KeywordTokenizerFactory"/>
|
||||
<filter class="solr.LowerCaseFilterFactory" />
|
||||
</analyzer>
|
||||
</fieldType>
|
||||
|
||||
<!-- since fields of this type are by default not stored or indexed,
|
||||
any data added to them will be ignored outright. -->
|
||||
<fieldtype name="ignored" stored="false" indexed="false" multiValued="true" class="solr.StrField" />
|
||||
|
||||
<!-- This point type indexes the coordinates as separate fields (subFields)
|
||||
If subFieldType is defined, it references a type, and a dynamic field
|
||||
definition is created matching *___<typename>. Alternately, if
|
||||
subFieldSuffix is defined, that is used to create the subFields.
|
||||
Example: if subFieldType="double", then the coordinates would be
|
||||
indexed in fields myloc_0___double,myloc_1___double.
|
||||
Example: if subFieldSuffix="_d" then the coordinates would be indexed
|
||||
in fields myloc_0_d,myloc_1_d
|
||||
The subFields are an implementation detail of the fieldType, and end
|
||||
users normally should not need to know about them.
|
||||
-->
|
||||
<fieldType name="point" class="solr.PointType" dimension="2" subFieldSuffix="_d"/>
|
||||
|
||||
<!-- A specialized field for geospatial search. If indexed, this fieldType must not be multivalued. -->
|
||||
<fieldType name="location" class="solr.LatLonType" subFieldSuffix="_coordinate"/>
|
||||
|
||||
<!-- An alternative geospatial field type new to Solr 4. It supports multiValued and polygon shapes.
|
||||
For more information about this and other Spatial fields new to Solr 4, see:
|
||||
http://wiki.apache.org/solr/SolrAdaptersForLuceneSpatial4
|
||||
-->
|
||||
<fieldType name="location_rpt" class="solr.SpatialRecursivePrefixTreeFieldType"
|
||||
geo="true" distErrPct="0.025" maxDistErr="0.000009" units="degrees" />
|
||||
|
||||
<!-- Spatial rectangle (bounding box) field. It supports most spatial predicates, and has
|
||||
special relevancy modes: score=overlapRatio|area|area2D (local-param to the query). DocValues is recommended for
|
||||
relevancy. -->
|
||||
<fieldType name="bbox" class="solr.BBoxField"
|
||||
geo="true" units="degrees" numberType="_bbox_coord" />
|
||||
<fieldType name="_bbox_coord" class="solr.TrieDoubleField" precisionStep="8" docValues="true" stored="false"/>
|
||||
|
||||
<!-- Money/currency field type. See http://wiki.apache.org/solr/MoneyFieldType
|
||||
Parameters:
|
||||
defaultCurrency: Specifies the default currency if none specified. Defaults to "USD"
|
||||
precisionStep: Specifies the precisionStep for the TrieLong field used for the amount
|
||||
providerClass: Lets you plug in other exchange provider backend:
|
||||
solr.FileExchangeRateProvider is the default and takes one parameter:
|
||||
currencyConfig: name of an xml file holding exchange rates
|
||||
solr.OpenExchangeRatesOrgProvider uses rates from openexchangerates.org:
|
||||
ratesFileLocation: URL or path to rates JSON file (default latest.json on the web)
|
||||
refreshInterval: Number of minutes between each rates fetch (default: 1440, min: 60)
|
||||
-->
|
||||
<fieldType name="currency" class="solr.CurrencyField" precisionStep="8" defaultCurrency="USD" currencyConfig="currency.xml" />
|
||||
|
||||
</schema>
|
|
@ -0,0 +1,654 @@
|
|||
<?xml version="1.0" encoding="UTF-8" ?>
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
|
||||
<!--
|
||||
For more details about configurations options that may appear in
|
||||
this file, see http://wiki.apache.org/solr/SolrConfigXml.
|
||||
-->
|
||||
<config>
|
||||
<!-- In all configuration below, a prefix of "solr." for class names
|
||||
is an alias that causes solr to search appropriate packages,
|
||||
including org.apache.solr.(search|update|request|core|analysis)
|
||||
|
||||
You may also specify a fully qualified Java classname if you
|
||||
have your own custom plugins.
|
||||
-->
|
||||
|
||||
<!-- Controls what version of Lucene various components of Solr
|
||||
adhere to. Generally, you want to use the latest version to
|
||||
get all bug fixes and improvements. It is highly recommended
|
||||
that you fully re-index after changing this setting as it can
|
||||
affect both how text is indexed and queried.
|
||||
-->
|
||||
<luceneMatchVersion>6.0.0</luceneMatchVersion>
|
||||
|
||||
<!-- Data Directory
|
||||
|
||||
Used to specify an alternate directory to hold all index data
|
||||
other than the default ./data under the Solr home. If
|
||||
replication is in use, this should match the replication
|
||||
configuration.
|
||||
-->
|
||||
<dataDir>${solr.data.dir:}</dataDir>
|
||||
|
||||
|
||||
<!-- The DirectoryFactory to use for indexes.
|
||||
|
||||
solr.StandardDirectoryFactory is filesystem
|
||||
based and tries to pick the best implementation for the current
|
||||
JVM and platform. solr.NRTCachingDirectoryFactory, the default,
|
||||
wraps solr.StandardDirectoryFactory and caches small files in memory
|
||||
for better NRT performance.
|
||||
|
||||
One can force a particular implementation via solr.MMapDirectoryFactory,
|
||||
solr.NIOFSDirectoryFactory, or solr.SimpleFSDirectoryFactory.
|
||||
|
||||
solr.RAMDirectoryFactory is memory based, not
|
||||
persistent, and doesn't work with replication.
|
||||
-->
|
||||
<directoryFactory name="DirectoryFactory"
|
||||
class="${solr.directoryFactory:solr.NRTCachingDirectoryFactory}">
|
||||
</directoryFactory>
|
||||
|
||||
<!-- The CodecFactory for defining the format of the inverted index.
|
||||
The default implementation is SchemaCodecFactory, which is the official Lucene
|
||||
index format, but hooks into the schema to provide per-field customization of
|
||||
the postings lists and per-document values in the fieldType element
|
||||
(postingsFormat/docValuesFormat). Note that most of the alternative implementations
|
||||
are experimental, so if you choose to customize the index format, its a good
|
||||
idea to convert back to the official format e.g. via IndexWriter.addIndexes(IndexReader)
|
||||
before upgrading to a newer version to avoid unnecessary reindexing.
|
||||
-->
|
||||
<codecFactory class="solr.SchemaCodecFactory"/>
|
||||
|
||||
<schemaFactory class="ClassicIndexSchemaFactory"/>
|
||||
|
||||
<!-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
Index Config - These settings control low-level behavior of indexing
|
||||
Most example settings here show the default value, but are commented
|
||||
out, to more easily see where customizations have been made.
|
||||
|
||||
Note: This replaces <indexDefaults> and <mainIndex> from older versions
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -->
|
||||
<indexConfig>
|
||||
|
||||
<!-- LockFactory
|
||||
|
||||
This option specifies which Lucene LockFactory implementation
|
||||
to use.
|
||||
|
||||
single = SingleInstanceLockFactory - suggested for a
|
||||
read-only index or when there is no possibility of
|
||||
another process trying to modify the index.
|
||||
native = NativeFSLockFactory - uses OS native file locking.
|
||||
Do not use when multiple solr webapps in the same
|
||||
JVM are attempting to share a single index.
|
||||
simple = SimpleFSLockFactory - uses a plain file for locking
|
||||
|
||||
Defaults: 'native' is default for Solr3.6 and later, otherwise
|
||||
'simple' is the default
|
||||
|
||||
More details on the nuances of each LockFactory...
|
||||
http://wiki.apache.org/lucene-java/AvailableLockFactories
|
||||
-->
|
||||
<lockType>${solr.lock.type:native}</lockType>
|
||||
|
||||
<!-- Lucene Infostream
|
||||
|
||||
To aid in advanced debugging, Lucene provides an "InfoStream"
|
||||
of detailed information when indexing.
|
||||
|
||||
Setting the value to true will instruct the underlying Lucene
|
||||
IndexWriter to write its info stream to solr's log. By default,
|
||||
this is enabled here, and controlled through log4j.properties.
|
||||
-->
|
||||
<infoStream>true</infoStream>
|
||||
|
||||
<!--
|
||||
Use true to enable this safety check, which can help
|
||||
reduce the risk of propagating index corruption from older segments
|
||||
into new ones, at the expense of slower merging.
|
||||
-->
|
||||
<checkIntegrityAtMerge>false</checkIntegrityAtMerge>
|
||||
</indexConfig>
|
||||
|
||||
|
||||
<!-- JMX
|
||||
|
||||
This example enables JMX if and only if an existing MBeanServer
|
||||
is found, use this if you want to configure JMX through JVM
|
||||
parameters. Remove this to disable exposing Solr configuration
|
||||
and statistics to JMX.
|
||||
|
||||
For more details see http://wiki.apache.org/solr/SolrJmx
|
||||
-->
|
||||
<jmx />
|
||||
<!-- If you want to connect to a particular server, specify the
|
||||
agentId
|
||||
-->
|
||||
<!-- <jmx agentId="myAgent" /> -->
|
||||
<!-- If you want to start a new MBeanServer, specify the serviceUrl -->
|
||||
<!-- <jmx serviceUrl="service:jmx:rmi:///jndi/rmi://localhost:9999/solr"/>
|
||||
-->
|
||||
|
||||
<!-- The default high-performance update handler -->
|
||||
<updateHandler class="solr.DirectUpdateHandler2">
|
||||
|
||||
<!-- Enables a transaction log, used for real-time get, durability, and
|
||||
and solr cloud replica recovery. The log can grow as big as
|
||||
uncommitted changes to the index, so use of a hard autoCommit
|
||||
is recommended (see below).
|
||||
"dir" - the target directory for transaction logs, defaults to the
|
||||
solr data directory. -->
|
||||
<updateLog>
|
||||
<str name="dir">${solr.ulog.dir:}</str>
|
||||
</updateLog>
|
||||
|
||||
<!-- AutoCommit
|
||||
|
||||
Perform a hard commit automatically under certain conditions.
|
||||
Instead of enabling autoCommit, consider using "commitWithin"
|
||||
when adding documents.
|
||||
|
||||
http://wiki.apache.org/solr/UpdateXmlMessages
|
||||
|
||||
maxDocs - Maximum number of documents to add since the last
|
||||
commit before automatically triggering a new commit.
|
||||
|
||||
maxTime - Maximum amount of time in ms that is allowed to pass
|
||||
since a document was added before automatically
|
||||
triggering a new commit.
|
||||
openSearcher - if false, the commit causes recent index changes
|
||||
to be flushed to stable storage, but does not cause a new
|
||||
searcher to be opened to make those changes visible.
|
||||
|
||||
If the updateLog is enabled, then it's highly recommended to
|
||||
have some sort of hard autoCommit to limit the log size.
|
||||
-->
|
||||
<autoCommit>
|
||||
<maxTime>${solr.autoCommit.maxTime:15000}</maxTime>
|
||||
<openSearcher>false</openSearcher>
|
||||
</autoCommit>
|
||||
|
||||
<!-- softAutoCommit is like autoCommit except it causes a
|
||||
'soft' commit which only ensures that changes are visible
|
||||
but does not ensure that data is synced to disk. This is
|
||||
faster and more near-realtime friendly than a hard commit.
|
||||
-->
|
||||
<autoSoftCommit>
|
||||
<maxTime>${solr.autoSoftCommit.maxTime:-1}</maxTime>
|
||||
</autoSoftCommit>
|
||||
|
||||
</updateHandler>
|
||||
|
||||
<!-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
Query section - these settings control query time things like caches
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -->
|
||||
<query>
|
||||
<!-- Max Boolean Clauses
|
||||
|
||||
Maximum number of clauses in each BooleanQuery, an exception
|
||||
is thrown if exceeded.
|
||||
|
||||
** WARNING **
|
||||
|
||||
This option actually modifies a global Lucene property that
|
||||
will affect all SolrCores. If multiple solrconfig.xml files
|
||||
disagree on this property, the value at any given moment will
|
||||
be based on the last SolrCore to be initialized.
|
||||
|
||||
-->
|
||||
<maxBooleanClauses>1024</maxBooleanClauses>
|
||||
|
||||
|
||||
<!-- Solr Internal Query Caches
|
||||
|
||||
There are two implementations of cache available for Solr,
|
||||
LRUCache, based on a synchronized LinkedHashMap, and
|
||||
FastLRUCache, based on a ConcurrentHashMap.
|
||||
|
||||
FastLRUCache has faster gets and slower puts in single
|
||||
threaded operation and thus is generally faster than LRUCache
|
||||
when the hit ratio of the cache is high (> 75%), and may be
|
||||
faster under other scenarios on multi-cpu systems.
|
||||
-->
|
||||
|
||||
<!-- Filter Cache
|
||||
|
||||
Cache used by SolrIndexSearcher for filters (DocSets),
|
||||
unordered sets of *all* documents that match a query. When a
|
||||
new searcher is opened, its caches may be prepopulated or
|
||||
"autowarmed" using data from caches in the old searcher.
|
||||
autowarmCount is the number of items to prepopulate. For
|
||||
LRUCache, the autowarmed items will be the most recently
|
||||
accessed items.
|
||||
|
||||
Parameters:
|
||||
class - the SolrCache implementation LRUCache or
|
||||
(LRUCache or FastLRUCache)
|
||||
size - the maximum number of entries in the cache
|
||||
initialSize - the initial capacity (number of entries) of
|
||||
the cache. (see java.util.HashMap)
|
||||
autowarmCount - the number of entries to prepopulate from
|
||||
and old cache.
|
||||
-->
|
||||
<filterCache class="solr.FastLRUCache"
|
||||
size="512"
|
||||
initialSize="512"
|
||||
autowarmCount="0"/>
|
||||
|
||||
<!-- Query Result Cache
|
||||
|
||||
Caches results of searches - ordered lists of document ids
|
||||
(DocList) based on a query, a sort, and the range of documents requested.
|
||||
-->
|
||||
<queryResultCache class="solr.LRUCache"
|
||||
size="512"
|
||||
initialSize="512"
|
||||
autowarmCount="0"/>
|
||||
|
||||
<!-- Document Cache
|
||||
|
||||
Caches Lucene Document objects (the stored fields for each
|
||||
document). Since Lucene internal document ids are transient,
|
||||
this cache will not be autowarmed.
|
||||
-->
|
||||
<documentCache class="solr.LRUCache"
|
||||
size="512"
|
||||
initialSize="512"
|
||||
autowarmCount="0"/>
|
||||
|
||||
<!-- custom cache currently used by block join -->
|
||||
<cache name="perSegFilter"
|
||||
class="solr.search.LRUCache"
|
||||
size="10"
|
||||
initialSize="0"
|
||||
autowarmCount="10"
|
||||
regenerator="solr.NoOpRegenerator" />
|
||||
|
||||
<!-- Lazy Field Loading
|
||||
|
||||
If true, stored fields that are not requested will be loaded
|
||||
lazily. This can result in a significant speed improvement
|
||||
if the usual case is to not load all stored fields,
|
||||
especially if the skipped fields are large compressed text
|
||||
fields.
|
||||
-->
|
||||
<enableLazyFieldLoading>true</enableLazyFieldLoading>
|
||||
|
||||
<!-- Result Window Size
|
||||
|
||||
An optimization for use with the queryResultCache. When a search
|
||||
is requested, a superset of the requested number of document ids
|
||||
are collected. For example, if a search for a particular query
|
||||
requests matching documents 10 through 19, and queryWindowSize is 50,
|
||||
then documents 0 through 49 will be collected and cached. Any further
|
||||
requests in that range can be satisfied via the cache.
|
||||
-->
|
||||
<queryResultWindowSize>20</queryResultWindowSize>
|
||||
|
||||
<!-- Maximum number of documents to cache for any entry in the
|
||||
queryResultCache.
|
||||
-->
|
||||
<queryResultMaxDocsCached>200</queryResultMaxDocsCached>
|
||||
|
||||
<!-- Use Cold Searcher
|
||||
|
||||
If a search request comes in and there is no current
|
||||
registered searcher, then immediately register the still
|
||||
warming searcher and use it. If "false" then all requests
|
||||
will block until the first searcher is done warming.
|
||||
-->
|
||||
<useColdSearcher>false</useColdSearcher>
|
||||
|
||||
<!-- Max Warming Searchers
|
||||
|
||||
Maximum number of searchers that may be warming in the
|
||||
background concurrently. An error is returned if this limit
|
||||
is exceeded.
|
||||
|
||||
Recommend values of 1-2 for read-only slaves, higher for
|
||||
masters w/o cache warming.
|
||||
-->
|
||||
<maxWarmingSearchers>2</maxWarmingSearchers>
|
||||
|
||||
</query>
|
||||
|
||||
|
||||
<!-- Request Dispatcher
|
||||
|
||||
This section contains instructions for how the SolrDispatchFilter
|
||||
should behave when processing requests for this SolrCore.
|
||||
|
||||
handleSelect is a legacy option that affects the behavior of requests
|
||||
such as /select?qt=XXX
|
||||
|
||||
handleSelect="true" will cause the SolrDispatchFilter to process
|
||||
the request and dispatch the query to a handler specified by the
|
||||
"qt" param, assuming "/select" isn't already registered.
|
||||
|
||||
handleSelect="false" will cause the SolrDispatchFilter to
|
||||
ignore "/select" requests, resulting in a 404 unless a handler
|
||||
is explicitly registered with the name "/select"
|
||||
|
||||
handleSelect="true" is not recommended for new users, but is the default
|
||||
for backwards compatibility
|
||||
-->
|
||||
<requestDispatcher handleSelect="false" >
|
||||
<!-- Request Parsing
|
||||
|
||||
These settings indicate how Solr Requests may be parsed, and
|
||||
what restrictions may be placed on the ContentStreams from
|
||||
those requests
|
||||
|
||||
enableRemoteStreaming - enables use of the stream.file
|
||||
and stream.url parameters for specifying remote streams.
|
||||
|
||||
multipartUploadLimitInKB - specifies the max size (in KiB) of
|
||||
Multipart File Uploads that Solr will allow in a Request.
|
||||
|
||||
formdataUploadLimitInKB - specifies the max size (in KiB) of
|
||||
form data (application/x-www-form-urlencoded) sent via
|
||||
POST. You can use POST to pass request parameters not
|
||||
fitting into the URL.
|
||||
|
||||
addHttpRequestToContext - if set to true, it will instruct
|
||||
the requestParsers to include the original HttpServletRequest
|
||||
object in the context map of the SolrQueryRequest under the
|
||||
key "httpRequest". It will not be used by any of the existing
|
||||
Solr components, but may be useful when developing custom
|
||||
plugins.
|
||||
|
||||
*** WARNING ***
|
||||
The settings below authorize Solr to fetch remote files, You
|
||||
should make sure your system has some authentication before
|
||||
using enableRemoteStreaming="true"
|
||||
|
||||
-->
|
||||
<requestParsers enableRemoteStreaming="true"
|
||||
multipartUploadLimitInKB="2048000"
|
||||
formdataUploadLimitInKB="2048"
|
||||
addHttpRequestToContext="false"/>
|
||||
|
||||
<!-- HTTP Caching
|
||||
|
||||
Set HTTP caching related parameters (for proxy caches and clients).
|
||||
|
||||
The options below instruct Solr not to output any HTTP Caching
|
||||
related headers
|
||||
-->
|
||||
<httpCaching never304="true" />
|
||||
|
||||
</requestDispatcher>
|
||||
|
||||
<!-- Request Handlers
|
||||
|
||||
http://wiki.apache.org/solr/SolrRequestHandler
|
||||
|
||||
Incoming queries will be dispatched to a specific handler by name
|
||||
based on the path specified in the request.
|
||||
|
||||
Legacy behavior: If the request path uses "/select" but no Request
|
||||
Handler has that name, and if handleSelect="true" has been specified in
|
||||
the requestDispatcher, then the Request Handler is dispatched based on
|
||||
the qt parameter. Handlers without a leading '/' are accessed this way
|
||||
like so: http://host/app/[core/]select?qt=name If no qt is
|
||||
given, then the requestHandler that declares default="true" will be
|
||||
used or the one named "standard".
|
||||
|
||||
If a Request Handler is declared with startup="lazy", then it will
|
||||
not be initialized until the first request that uses it.
|
||||
|
||||
-->
|
||||
<!-- SearchHandler
|
||||
|
||||
http://wiki.apache.org/solr/SearchHandler
|
||||
|
||||
For processing Search Queries, the primary Request Handler
|
||||
provided with Solr is "SearchHandler" It delegates to a sequent
|
||||
of SearchComponents (see below) and supports distributed
|
||||
queries across multiple shards
|
||||
-->
|
||||
<requestHandler name="/select" class="solr.SearchHandler">
|
||||
<!-- default values for query parameters can be specified, these
|
||||
will be overridden by parameters in the request
|
||||
-->
|
||||
<lst name="defaults">
|
||||
<str name="echoParams">explicit</str>
|
||||
<int name="rows">10</int>
|
||||
</lst>
|
||||
|
||||
</requestHandler>
|
||||
|
||||
<!-- A request handler that returns indented JSON by default -->
|
||||
<requestHandler name="/query" class="solr.SearchHandler">
|
||||
<lst name="defaults">
|
||||
<str name="echoParams">explicit</str>
|
||||
<str name="wt">json</str>
|
||||
<str name="indent">true</str>
|
||||
<str name="df">text</str>
|
||||
</lst>
|
||||
</requestHandler>
|
||||
|
||||
<!-- realtime get handler, guaranteed to return the latest stored fields of
|
||||
any document, without the need to commit or open a new searcher. The
|
||||
current implementation relies on the updateLog feature being enabled.
|
||||
|
||||
** WARNING **
|
||||
Do NOT disable the realtime get handler at /get if you are using
|
||||
SolrCloud otherwise any leader election will cause a full sync in ALL
|
||||
replicas for the shard in question. Similarly, a replica recovery will
|
||||
also always fetch the complete index from the leader because a partial
|
||||
sync will not be possible in the absence of this handler.
|
||||
-->
|
||||
<requestHandler name="/get" class="solr.RealTimeGetHandler">
|
||||
<lst name="defaults">
|
||||
<str name="omitHeader">true</str>
|
||||
<str name="wt">json</str>
|
||||
<str name="indent">true</str>
|
||||
</lst>
|
||||
</requestHandler>
|
||||
|
||||
<!--
|
||||
The export request handler is used to export full sorted result sets.
|
||||
Do not change these defaults.
|
||||
-->
|
||||
<requestHandler name="/export" class="solr.SearchHandler">
|
||||
<lst name="invariants">
|
||||
<str name="rq">{!xport}</str>
|
||||
<str name="wt">xsort</str>
|
||||
<str name="distrib">false</str>
|
||||
</lst>
|
||||
|
||||
<arr name="components">
|
||||
<str>query</str>
|
||||
</arr>
|
||||
</requestHandler>
|
||||
|
||||
<!-- Update Request Handler.
|
||||
|
||||
http://wiki.apache.org/solr/UpdateXmlMessages
|
||||
|
||||
The canonical Request Handler for Modifying the Index through
|
||||
commands specified using XML, JSON, CSV, or JAVABIN
|
||||
|
||||
Note: Since solr1.1 requestHandlers requires a valid content
|
||||
type header if posted in the body. For example, curl now
|
||||
requires: -H 'Content-type:text/xml; charset=utf-8'
|
||||
|
||||
To override the request content type and force a specific
|
||||
Content-type, use the request parameter:
|
||||
?update.contentType=text/csv
|
||||
|
||||
This handler will pick a response format to match the input
|
||||
if the 'wt' parameter is not explicit
|
||||
-->
|
||||
<initParams path="/update/**,/query,/select,/tvrh,/elevate,/spell,/browse">
|
||||
<lst name="defaults">
|
||||
<str name="df">text</str>
|
||||
</lst>
|
||||
</initParams>
|
||||
|
||||
<!-- Field Analysis Request Handler
|
||||
|
||||
RequestHandler that provides much the same functionality as
|
||||
analysis.jsp. Provides the ability to specify multiple field
|
||||
types and field names in the same request and outputs
|
||||
index-time and query-time analysis for each of them.
|
||||
|
||||
Request parameters are:
|
||||
analysis.fieldname - field name whose analyzers are to be used
|
||||
|
||||
analysis.fieldtype - field type whose analyzers are to be used
|
||||
analysis.fieldvalue - text for index-time analysis
|
||||
q (or analysis.q) - text for query time analysis
|
||||
analysis.showmatch (true|false) - When set to true and when
|
||||
query analysis is performed, the produced tokens of the
|
||||
field value analysis will be marked as "matched" for every
|
||||
token that is produces by the query analysis
|
||||
-->
|
||||
<requestHandler name="/analysis/field"
|
||||
startup="lazy"
|
||||
class="solr.FieldAnalysisRequestHandler" />
|
||||
|
||||
|
||||
<!-- Document Analysis Handler
|
||||
|
||||
http://wiki.apache.org/solr/AnalysisRequestHandler
|
||||
|
||||
An analysis handler that provides a breakdown of the analysis
|
||||
process of provided documents. This handler expects a (single)
|
||||
content stream with the following format:
|
||||
|
||||
<docs>
|
||||
<doc>
|
||||
<field name="id">1</field>
|
||||
<field name="name">The Name</field>
|
||||
<field name="text">The Text Value</field>
|
||||
</doc>
|
||||
<doc>...</doc>
|
||||
<doc>...</doc>
|
||||
...
|
||||
</docs>
|
||||
|
||||
Note: Each document must contain a field which serves as the
|
||||
unique key. This key is used in the returned response to associate
|
||||
an analysis breakdown to the analyzed document.
|
||||
|
||||
Like the FieldAnalysisRequestHandler, this handler also supports
|
||||
query analysis by sending either an "analysis.query" or "q"
|
||||
request parameter that holds the query text to be analyzed. It
|
||||
also supports the "analysis.showmatch" parameter which when set to
|
||||
true, all field tokens that match the query tokens will be marked
|
||||
as a "match".
|
||||
-->
|
||||
<requestHandler name="/analysis/document"
|
||||
class="solr.DocumentAnalysisRequestHandler"
|
||||
startup="lazy" />
|
||||
|
||||
<!-- Admin Handlers
|
||||
|
||||
Admin Handlers - This will register all the standard admin
|
||||
RequestHandlers.
|
||||
-->
|
||||
<requestHandler name="/admin/"
|
||||
class="solr.admin.AdminHandlers" />
|
||||
|
||||
<!-- ping/healthcheck -->
|
||||
<requestHandler name="/admin/ping" class="solr.PingRequestHandler">
|
||||
<lst name="invariants">
|
||||
<str name="q">solrpingquery</str>
|
||||
</lst>
|
||||
<lst name="defaults">
|
||||
<str name="echoParams">all</str>
|
||||
</lst>
|
||||
<!-- An optional feature of the PingRequestHandler is to configure the
|
||||
handler with a "healthcheckFile" which can be used to enable/disable
|
||||
the PingRequestHandler.
|
||||
relative paths are resolved against the data dir
|
||||
-->
|
||||
<!-- <str name="healthcheckFile">server-enabled.txt</str> -->
|
||||
</requestHandler>
|
||||
|
||||
<!-- Echo the request contents back to the client -->
|
||||
<requestHandler name="/debug/dump" class="solr.DumpRequestHandler" >
|
||||
<lst name="defaults">
|
||||
<str name="echoParams">explicit</str>
|
||||
<str name="echoHandler">true</str>
|
||||
</lst>
|
||||
</requestHandler>
|
||||
|
||||
<!-- Solr Replication
|
||||
|
||||
The SolrReplicationHandler supports replicating indexes from a
|
||||
"master" used for indexing and "slaves" used for queries.
|
||||
|
||||
http://wiki.apache.org/solr/SolrReplication
|
||||
|
||||
It is also necessary for SolrCloud to function (in Cloud mode, the
|
||||
replication handler is used to bulk transfer segments when nodes
|
||||
are added or need to recover).
|
||||
|
||||
https://wiki.apache.org/solr/SolrCloud/
|
||||
-->
|
||||
<requestHandler name="/replication" class="solr.ReplicationHandler" >
|
||||
|
||||
</requestHandler>
|
||||
|
||||
<!-- Search Components
|
||||
|
||||
Search components are registered to SolrCore and used by
|
||||
instances of SearchHandler (which can access them by name)
|
||||
|
||||
By default, the following components are available:
|
||||
|
||||
<searchComponent name="query" class="solr.QueryComponent" />
|
||||
<searchComponent name="facet" class="solr.FacetComponent" />
|
||||
<searchComponent name="mlt" class="solr.MoreLikeThisComponent" />
|
||||
<searchComponent name="highlight" class="solr.HighlightComponent" />
|
||||
<searchComponent name="stats" class="solr.StatsComponent" />
|
||||
<searchComponent name="debug" class="solr.DebugComponent" />
|
||||
|
||||
-->
|
||||
|
||||
<!-- Terms Component
|
||||
|
||||
http://wiki.apache.org/solr/TermsComponent
|
||||
|
||||
A component to return terms and document frequency of those
|
||||
terms
|
||||
-->
|
||||
<searchComponent name="terms" class="solr.TermsComponent"/>
|
||||
|
||||
<!-- A request handler for demonstrating the terms component -->
|
||||
<requestHandler name="/terms" class="solr.SearchHandler" startup="lazy">
|
||||
<lst name="defaults">
|
||||
<bool name="terms">true</bool>
|
||||
<bool name="distrib">false</bool>
|
||||
</lst>
|
||||
<arr name="components">
|
||||
<str>terms</str>
|
||||
</arr>
|
||||
</requestHandler>
|
||||
|
||||
<!-- Legacy config for the admin interface -->
|
||||
<admin>
|
||||
<defaultQuery>*:*</defaultQuery>
|
||||
</admin>
|
||||
|
||||
</config>
|
|
@ -83,7 +83,6 @@
|
|||
|
||||
<lib dir="${solr.install.dir:../../../..}/contrib/velocity/lib" regex=".*\.jar" />
|
||||
<lib dir="${solr.install.dir:../../../..}/dist/" regex="solr-velocity-\d.*\.jar" />
|
||||
|
||||
<!-- an exact 'path' can be used instead of a 'dir' to specify a
|
||||
specific jar file. This will cause a serious error to be logged
|
||||
if it can't be loaded.
|
|
@ -0,0 +1 @@
|
|||
{"initArgs":{},"managedList":[]}
|
|
@ -0,0 +1,67 @@
|
|||
<?xml version="1.0" ?>
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
|
||||
<!-- Example exchange rates file for CurrencyField type named "currency" in example schema -->
|
||||
|
||||
<currencyConfig version="1.0">
|
||||
<rates>
|
||||
<!-- Updated from http://www.exchangerate.com/ at 2011-09-27 -->
|
||||
<rate from="USD" to="ARS" rate="4.333871" comment="ARGENTINA Peso" />
|
||||
<rate from="USD" to="AUD" rate="1.025768" comment="AUSTRALIA Dollar" />
|
||||
<rate from="USD" to="EUR" rate="0.743676" comment="European Euro" />
|
||||
<rate from="USD" to="BRL" rate="1.881093" comment="BRAZIL Real" />
|
||||
<rate from="USD" to="CAD" rate="1.030815" comment="CANADA Dollar" />
|
||||
<rate from="USD" to="CLP" rate="519.0996" comment="CHILE Peso" />
|
||||
<rate from="USD" to="CNY" rate="6.387310" comment="CHINA Yuan" />
|
||||
<rate from="USD" to="CZK" rate="18.47134" comment="CZECH REP. Koruna" />
|
||||
<rate from="USD" to="DKK" rate="5.515436" comment="DENMARK Krone" />
|
||||
<rate from="USD" to="HKD" rate="7.801922" comment="HONG KONG Dollar" />
|
||||
<rate from="USD" to="HUF" rate="215.6169" comment="HUNGARY Forint" />
|
||||
<rate from="USD" to="ISK" rate="118.1280" comment="ICELAND Krona" />
|
||||
<rate from="USD" to="INR" rate="49.49088" comment="INDIA Rupee" />
|
||||
<rate from="USD" to="XDR" rate="0.641358" comment="INTNL MON. FUND SDR" />
|
||||
<rate from="USD" to="ILS" rate="3.709739" comment="ISRAEL Sheqel" />
|
||||
<rate from="USD" to="JPY" rate="76.32419" comment="JAPAN Yen" />
|
||||
<rate from="USD" to="KRW" rate="1169.173" comment="KOREA (SOUTH) Won" />
|
||||
<rate from="USD" to="KWD" rate="0.275142" comment="KUWAIT Dinar" />
|
||||
<rate from="USD" to="MXN" rate="13.85895" comment="MEXICO Peso" />
|
||||
<rate from="USD" to="NZD" rate="1.285159" comment="NEW ZEALAND Dollar" />
|
||||
<rate from="USD" to="NOK" rate="5.859035" comment="NORWAY Krone" />
|
||||
<rate from="USD" to="PKR" rate="87.57007" comment="PAKISTAN Rupee" />
|
||||
<rate from="USD" to="PEN" rate="2.730683" comment="PERU Sol" />
|
||||
<rate from="USD" to="PHP" rate="43.62039" comment="PHILIPPINES Peso" />
|
||||
<rate from="USD" to="PLN" rate="3.310139" comment="POLAND Zloty" />
|
||||
<rate from="USD" to="RON" rate="3.100932" comment="ROMANIA Leu" />
|
||||
<rate from="USD" to="RUB" rate="32.14663" comment="RUSSIA Ruble" />
|
||||
<rate from="USD" to="SAR" rate="3.750465" comment="SAUDI ARABIA Riyal" />
|
||||
<rate from="USD" to="SGD" rate="1.299352" comment="SINGAPORE Dollar" />
|
||||
<rate from="USD" to="ZAR" rate="8.329761" comment="SOUTH AFRICA Rand" />
|
||||
<rate from="USD" to="SEK" rate="6.883442" comment="SWEDEN Krona" />
|
||||
<rate from="USD" to="CHF" rate="0.906035" comment="SWITZERLAND Franc" />
|
||||
<rate from="USD" to="TWD" rate="30.40283" comment="TAIWAN Dollar" />
|
||||
<rate from="USD" to="THB" rate="30.89487" comment="THAILAND Baht" />
|
||||
<rate from="USD" to="AED" rate="3.672955" comment="U.A.E. Dirham" />
|
||||
<rate from="USD" to="UAH" rate="7.988582" comment="UKRAINE Hryvnia" />
|
||||
<rate from="USD" to="GBP" rate="0.647910" comment="UNITED KINGDOM Pound" />
|
||||
|
||||
<!-- Cross-rates for some common currencies -->
|
||||
<rate from="EUR" to="GBP" rate="0.869914" />
|
||||
<rate from="EUR" to="NOK" rate="7.800095" />
|
||||
<rate from="GBP" to="NOK" rate="8.966508" />
|
||||
</rates>
|
||||
</currencyConfig>
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue