bring compatibility to current trunk of Solr, using admin/luke instead of indexinfo. Also added HpricotMapper (SOLR-210: from Jamie Orchard-Hays), and correctly imported the ArrayMapper

git-svn-id: https://svn.apache.org/repos/asf/lucene/solr/trunk@536728 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Erik Hatcher 2007-05-10 01:28:49 +00:00
parent b47fc2cccc
commit d7568ddbc6
17 changed files with 373 additions and 85 deletions

View File

@ -140,7 +140,7 @@ class Solr::Connection
def post(request)
response = @connection.post(@url.path + "/" + request.handler,
request.to_s,
{ "Content-Type" => "application/x-www-form-urlencoded; charset=utf-8" })
{ "Content-Type" => request.content_type })
case response
when Net::HTTPSuccess then response.body

View File

@ -12,6 +12,8 @@
module Solr; module Importer; end; end
require 'solr/importer/mapper'
require 'solr/importer/array_mapper'
require 'solr/importer/delimited_file_source'
require 'solr/importer/hpricot_mapper'
require 'solr/importer/xpath_mapper'
require 'solr/importer/solr_source'

View File

@ -0,0 +1,20 @@
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'hpricot'
# For files with the first line containing field names
class Solr::Importer::HpricotMapper < Solr::Importer::Mapper
def field_data(doc, path)
doc.search(path.to_s).collect { |e| e.inner_html }
end
end

View File

@ -13,7 +13,7 @@
class Solr::Indexer
# deprecated, use Indexer.new(ds,mapping).index instead
def self.index(data_source, mapper_or_mapping, options={})
indexer = Solr::Indexer.new(data_source, mapper_or_mapping, options={})
indexer = Solr::Indexer.new(data_source, mapper_or_mapping, options)
indexer.index
end
@ -33,7 +33,7 @@ class Solr::Indexer
@data_source.each do |record|
document = @mapper.map(record)
yield(record, document) if block_given?
yield(record, document) if block_given? # TODO check return of block, if not true then don't index
buffer << document

View File

@ -21,6 +21,10 @@ class Solr::Request::Base
def response_format
raise "unknown request type: #{self.class}"
end
def content_type
'text/xml; charset=utf-8'
end
# returns the solr handler or url fragment that can
# respond to this type of request

View File

@ -11,7 +11,9 @@
# limitations under the License.
class Solr::Request::IndexInfo < Solr::Request::Select
def initialize
super('indexinfo')
def handler
'admin/luke'
end
end

View File

@ -31,6 +31,10 @@ class Solr::Request::Select < Solr::Request::Base
def handler
'select'
end
def content_type
'application/x-www-form-urlencoded; charset=utf-8'
end
def to_hash
return {:qt => query_type, :wt => 'ruby'}

View File

@ -13,20 +13,5 @@
require 'rexml/xpath'
class Solr::Response::Commit < Solr::Response::Xml
attr_reader :ok
def initialize(xml)
super(xml)
e = REXML::XPath.first(@doc, './result')
if e and e.attributes['status'] == '0'
@ok = true
else
@ok = false
end
end
def ok?
@ok
end
end

View File

@ -20,11 +20,16 @@ class Solr::Response::Xml < Solr::Response::Base
super(xml)
# parse the xml
@doc = REXML::Document.new(xml)
# look for the result code and string
result = REXML::XPath.first(@doc, './result')
# <?xml version="1.0" encoding="UTF-8"?>
# <response>
# <lst name="responseHeader"><int name="status">0</int><int name="QTime">2</int></lst>
# </response>
result = REXML::XPath.first(@doc, './response/lst[@name="responseHeader"]/int[@name="status"]')
if result
@status_code = result.attributes['status']
@status_message = result.text
@status_code = result.text
@status_message = result.text # TODO: any need for a message?
end
rescue REXML::ParseException => e
raise Solr::Exception.new("invalid response xml: #{e}")

View File

@ -17,11 +17,21 @@
-->
<config>
<!-- Set this to 'false' if you want solr to continue working after it has
encountered an severe configuration error. In a production environment,
you may want solr to keep working even if one handler is mis-configured.
You may also set this to false using by setting the system property:
-Dsolr.abortOnConfigurationError=false
-->
<abortOnConfigurationError>${solr.abortOnConfigurationError:true}</abortOnConfigurationError>
<!-- Used to specify an alternate directory to hold all index data
other than the default ./data under the Solr home.
If replication is in use, this should match the replication configuration. -->
<dataDir>${solr.data.dir:./solr/data}</dataDir>
<!--
<dataDir>./solr/data</dataDir>
-->
<indexDefaults>
<!-- Values here affect all index writers and act as a default unless overridden. -->
@ -60,6 +70,7 @@
<!-- autocommit pending docs if certain criteria are met
<autoCommit>
<maxDocs>10000</maxDocs>
<maxTime>1000</maxTime>
</autoCommit>
-->
@ -114,9 +125,9 @@
-->
<filterCache
class="solr.LRUCache"
size="300000"
initialSize="100000"
autowarmCount="50000"/>
size="512"
initialSize="512"
autowarmCount="256"/>
<!-- queryResultCache caches results of searches - ordered lists of
document ids (DocList) based on a query, a sort, and the range
@ -213,21 +224,30 @@
warming. -->
<useColdSearcher>false</useColdSearcher>
<!-- Maximum number of searchers that may be warming in the background
concurrently. An error is returned if this limit is exceeded. Recommend
1-2 for read-only slaves, higher for masters w/o cache warming. -->
<maxWarmingSearchers>4</maxWarmingSearchers>
</query>
<!--
Let the dispatch filter handler /select?qt=XXX
handleSelect=true will use consistent error handling for /select and /update
handleSelect=false will use solr1.1 style error formatting
-->
<requestDispatcher handleSelect="true" >
<!--Make sure your system has some authentication before enabling remote streaming! -->
<requestParsers enableRemoteStreaming="false" multipartUploadLimitInKB="2048" />
</requestDispatcher>
<!-- requestHandler plugins... incoming queries will be dispatched to the
correct handler based on the qt (query type) param matching the
name of registered handlers.
The "standard" request handler is the default and will be used if qt
is not specified in the request.
-->
<!--Make sure your system has some authentication before enabling remote streaming! -->
<requestParsers enableRemoteStreaming="true" multipartUploadLimitInKB="2048" />
<requestHandler name="indexinfo" class="solr.IndexInfoRequestHandler"/>
<requestHandler name="standard" class="solr.StandardRequestHandler">
<!-- default values for query parameters -->
<lst name="defaults">
@ -239,7 +259,7 @@
-->
</lst>
</requestHandler>
<!-- DisMaxRequestHandler allows easy searching across multiple fields
for simple user-entered phrases.
see http://wiki.apache.org/solr/DisMaxRequestHandler
@ -249,41 +269,150 @@
<str name="echoParams">explicit</str>
<float name="tie">0.01</float>
<str name="qf">
text^1.9
text^0.5 features^1.0 name^1.2 sku^1.5 id^10.0 manu^1.1 cat^1.4
</str>
<str name="pf">
text^1.9
text^0.2 features^1.1 name^1.5 manu^1.4 manu_exact^1.9
</str>
<str name="bf">
ord(poplarity)^0.5 recip(rord(price),1,1000,1000)^0.3
</str>
<str name="fl">
id, text
id,name,price,score
</str>
<str name="mm">
2&lt;-1 5&lt;-2 6&lt;90%
</str>
<int name="ps">100</int>
<str name="q.alt">*:*</str>
</lst>
</requestHandler>
<!-- Standard update plugin. If we put this on /update, it will get all the new goodness -->
<requestHandler name="/update" class="solr.XmlUpdateRequestHandler" >
<!--
<lst name="defaults">
<str name="name">value</str>
</lst>
<!-- Note how you can register the same handler multiple times with
different names (and different init parameters)
-->
<requestHandler name="partitioned" class="solr.DisMaxRequestHandler" >
<lst name="defaults">
<str name="echoParams">explicit</str>
<str name="qf">text^0.5 features^1.0 name^1.2 sku^1.5 id^10.0</str>
<str name="mm">2&lt;-1 5&lt;-2 6&lt;90%</str>
<!-- This is an example of using Date Math to specify a constantly
moving date range in a config...
-->
<str name="bq">incubationdate_dt:[* TO NOW/DAY-1MONTH]^2.2</str>
</lst>
<!-- In addition to defaults, "appends" params can be specified
to identify values which should be appended to the list of
multi-val params from the query (or the existing "defaults").
In this example, the param "fq=instock:true" will be appended to
any query time fq params the user may specify, as a mechanism for
partitioning the index, independent of any user selected filtering
that may also be desired (perhaps as a result of faceted searching).
NOTE: there is *absolutely* nothing a client can do to prevent these
"appends" values from being used, so don't use this mechanism
unless you are sure you always want it.
-->
<lst name="appends">
<str name="fq">inStock:true</str>
</lst>
<!-- "invariants" are a way of letting the Solr maintainer lock down
the options available to Solr clients. Any params values
specified here are used regardless of what values may be specified
in either the query, the "defaults", or the "appends" params.
In this example, the facet.field and facet.query params are fixed,
limiting the facets clients can use. Faceting is not turned on by
default - but if the client does specify facet=true in the request,
these are the only facets they will be able to see counts for;
regardless of what other facet.field or facet.query params they
may specify.
NOTE: there is *absolutely* nothing a client can do to prevent these
"invariants" values from being used, so don't use this mechanism
unless you are sure you always want it.
-->
<lst name="invariants">
<str name="facet.field">cat</str>
<str name="facet.field">manu_exact</str>
<str name="facet.query">price:[* TO 500]</str>
<str name="facet.query">price:[500 TO *]</str>
</lst>
</requestHandler>
<requestHandler name="instock" class="solr.DisMaxRequestHandler" >
<!-- for legacy reasons, DisMaxRequestHandler will assume all init
params are "defaults" if you don't explicitly specify any defaults.
-->
<str name="fq">
inStock:true
</str>
<str name="qf">
text^0.5 features^1.0 name^1.2 sku^1.5 id^10.0 manu^1.1 cat^1.4
</str>
<str name="mm">
2&lt;-1 5&lt;-2 6&lt;90%
</str>
</requestHandler>
<requestHandler name="/debug/dump" class="solr.DumpRequestHandler" />
<!-- NOTE, /update is mapped to a servlet, we can have the filter handle requests off that! -->
<requestHandler name="/update/commit" class="solr.CommitRequestHandler" />
<!-- SpellCheckerRequestHandler takes in a word (or several words) as the
value of the "q" parameter and returns a list of alternative spelling
suggestions. If invoked with a ...&cmd=rebuild, it will rebuild the
spellchecker index.
-->
<requestHandler name="spellchecker" class="solr.SpellCheckerRequestHandler">
<!-- default values for query parameters -->
<lst name="defaults">
<int name="suggestionCount">1</int>
<float name="accuracy">0.5</float>
</lst>
<!-- Main init params for handler -->
<!-- The directory where your SpellChecker Index should live. -->
<!-- May be absolute, or relative to the Solr "dataDir" directory. -->
<!-- If this option is not specified, a RAM directory will be used -->
<str name="spellcheckerIndexDir">spell</str>
<!-- the field in your schema that you want to be able to build -->
<!-- your spell index on. This should be a field that uses a very -->
<!-- simple FieldType without a lot of Analysis (ie: string) -->
<str name="termSourceField">word</str>
</requestHandler>
<!-- Update request handler.
Note: Since solr1.1 requestHandlers requires a valid content type header if posted in
the body. For example, curl now requires: -H 'Content-type:text/xml; charset=utf-8'
The response format differs from solr1.1 formatting and returns a standard error code.
To enable solr1.1 behavior, remove the /update handler or change its path
-->
<requestHandler name="/update" class="solr.XmlUpdateRequestHandler" />
<!-- CSV update handler, loaded on demand -->
<requestHandler name="/update/csv" class="solr.CSVRequestHandler" startup="lazy">
<requestHandler name="/update/csv" class="solr.CSVRequestHandler" startup="lazy" />
<!-- Admin Handlers. TODO? There could be a single handler that loads them all... -->
<requestHandler name="/admin/luke" class="org.apache.solr.handler.admin.LukeRequestHandler" />
<requestHandler name="/admin/system" class="org.apache.solr.handler.admin.SystemInfoHandler" />
<requestHandler name="/admin/plugins" class="org.apache.solr.handler.admin.PluginInfoHandler" />
<requestHandler name="/admin/threads" class="org.apache.solr.handler.admin.ThreadDumpHandler" />
<requestHandler name="/admin/properties" class="org.apache.solr.handler.admin.PropertiesRequestHandler" />
<!-- Echo the request contents back to the client -->
<requestHandler name="/debug/dump" class="solr.DumpRequestHandler" >
<lst name="defaults">
<str name="echoParams">explicit</str> <!-- for all params (including the default etc) use: 'all' -->
<str name="echoHandler">true</str>
</lst>
</requestHandler>
<!-- queryResponseWriter plugins... query responses will be written using the
writer specified by the 'wt' request parameter matching the name of a registered
writer.
@ -315,7 +444,7 @@
&amp; separated key=val pairs ... but there shouldn't be any
URL escaping of the values -->
<pingQuery>
q=solr
qt=dismax&amp;q=solr&amp;start=3&amp;fq=id:[* TO *]&amp;fq=cat:[* TO *]
</pingQuery>
<!-- configure a healthcheck file for servers behind a loadbalancer
<healthcheck type="file">server-enabled</healthcheck>

View File

@ -17,6 +17,14 @@
-->
<config>
<!-- Set this to 'false' if you want solr to continue working after it has
encountered an severe configuration error. In a production environment,
you may want solr to keep working even if one handler is mis-configured.
You may also set this to false using by setting the system property:
-Dsolr.abortOnConfigurationError=false
-->
<abortOnConfigurationError>${solr.abortOnConfigurationError:true}</abortOnConfigurationError>
<!-- Used to specify an alternate directory to hold all index data
other than the default ./data under the Solr home.
@ -62,6 +70,7 @@
<!-- autocommit pending docs if certain criteria are met
<autoCommit>
<maxDocs>10000</maxDocs>
<maxTime>1000</maxTime>
</autoCommit>
-->
@ -215,16 +224,30 @@
warming. -->
<useColdSearcher>false</useColdSearcher>
<!-- Maximum number of searchers that may be warming in the background
concurrently. An error is returned if this limit is exceeded. Recommend
1-2 for read-only slaves, higher for masters w/o cache warming. -->
<maxWarmingSearchers>4</maxWarmingSearchers>
</query>
<!--
Let the dispatch filter handler /select?qt=XXX
handleSelect=true will use consistent error handling for /select and /update
handleSelect=false will use solr1.1 style error formatting
-->
<requestDispatcher handleSelect="true">
<!--Make sure your system has some authentication before enabling remote streaming! -->
<requestParsers enableRemoteStreaming="false" multipartUploadLimitInKB="2048" />
</requestDispatcher>
<!-- requestHandler plugins... incoming queries will be dispatched to the
correct handler based on the qt (query type) param matching the
name of registered handlers.
The "standard" request handler is the default and will be used if qt
is not specified in the request.
-->
<requestHandler name="indexinfo" class="solr.IndexInfoRequestHandler"/>
<requestHandler name="standard" class="solr.StandardRequestHandler">
<!-- default values for query parameters -->
<lst name="defaults">
@ -236,7 +259,7 @@
-->
</lst>
</requestHandler>
<!-- DisMaxRequestHandler allows easy searching across multiple fields
for simple user-entered phrases.
see http://wiki.apache.org/solr/DisMaxRequestHandler
@ -246,21 +269,85 @@
<str name="echoParams">explicit</str>
<float name="tie">0.01</float>
<str name="qf">
text^1.9
</str>
<str name="pf">
text^1.9
text^0.5
</str>
<!-- <str name="pf"> -->
<!-- text^0.2 features^1.1 name^1.5 manu^1.4 manu_exact^1.9 -->
<!-- </str> -->
<!-- <str name="bf"> -->
<!-- ord(poplarity)^0.5 recip(rord(price),1,1000,1000)^0.3 -->
<!-- </str> -->
<str name="fl">
id, text
id,test
</str>
<str name="mm">
2&lt;-1 5&lt;-2 6&lt;90%
</str>
<int name="ps">100</int> -->
<int name="ps">100</int>
<str name="q.alt">*:*</str>
</lst>
</requestHandler>
<!-- Note how you can register the same handler multiple times with
different names (and different init parameters)
-->
<!-- SpellCheckerRequestHandler takes in a word (or several words) as the
value of the "q" parameter and returns a list of alternative spelling
suggestions. If invoked with a ...&cmd=rebuild, it will rebuild the
spellchecker index.
-->
<requestHandler name="spellchecker" class="solr.SpellCheckerRequestHandler">
<!-- default values for query parameters -->
<lst name="defaults">
<int name="suggestionCount">1</int>
<float name="accuracy">0.5</float>
</lst>
<!-- Main init params for handler -->
<!-- The directory where your SpellChecker Index should live. -->
<!-- May be absolute, or relative to the Solr "dataDir" directory. -->
<!-- If this option is not specified, a RAM directory will be used -->
<str name="spellcheckerIndexDir">spell</str>
<!-- the field in your schema that you want to be able to build -->
<!-- your spell index on. This should be a field that uses a very -->
<!-- simple FieldType without a lot of Analysis (ie: string) -->
<str name="termSourceField">word</str>
</requestHandler>
<!-- Update request handler.
Note: Since solr1.1 requestHandlers requires a valid content type header if posted in
the body. For example, curl now requires: -H 'Content-type:text/xml; charset=utf-8'
The response format differs from solr1.1 formatting and returns a standard error code.
To enable solr1.1 behavior, remove the /update handler or change its path
-->
<requestHandler name="/update" class="solr.XmlUpdateRequestHandler" />
<!-- CSV update handler, loaded on demand -->
<requestHandler name="/update/csv" class="solr.CSVRequestHandler" startup="lazy" />
<!-- Admin Handlers. TODO? There could be a single handler that loads them all... -->
<requestHandler name="/admin/luke" class="org.apache.solr.handler.admin.LukeRequestHandler" />
<requestHandler name="/admin/system" class="org.apache.solr.handler.admin.SystemInfoHandler" />
<requestHandler name="/admin/plugins" class="org.apache.solr.handler.admin.PluginInfoHandler" />
<requestHandler name="/admin/threads" class="org.apache.solr.handler.admin.ThreadDumpHandler" />
<requestHandler name="/admin/properties" class="org.apache.solr.handler.admin.PropertiesRequestHandler" />
<!-- Echo the request contents back to the client -->
<requestHandler name="/debug/dump" class="solr.DumpRequestHandler" >
<lst name="defaults">
<str name="echoParams">explicit</str> <!-- for all params (including the default etc) use: 'all' -->
<str name="echoHandler">true</str>
</lst>
</requestHandler>
<!-- queryResponseWriter plugins... query responses will be written using the
writer specified by the 'wt' request parameter matching the name of a registered
writer.
@ -292,7 +379,7 @@
&amp; separated key=val pairs ... but there shouldn't be any
URL escaping of the values -->
<pingQuery>
q=solr
qt=dismax&amp;q=solr&amp;start=3&amp;fq=id:[* TO *]&amp;fq=cat:[* TO *]
</pingQuery>
<!-- configure a healthcheck file for servers behind a loadbalancer
<healthcheck type="file">server-enabled</healthcheck>

View File

@ -113,12 +113,12 @@ class ServerTest < Test::Unit::TestCase
def test_commit
response = @connection.send(Solr::Request::Commit.new)
assert_equal "<result status=\"0\"></result>", response.raw_response
assert response.ok?
end
def test_optimize
response = @connection.send(Solr::Request::Optimize.new)
assert_equal "<result status=\"0\"></result>", response.raw_response
assert response.ok?
end
# TODO: add test_ping back... something seems to have changed with the response, so adjustments are needed.
@ -168,14 +168,17 @@ class ServerTest < Test::Unit::TestCase
def test_no_such_field
doc = {:id => 999, :bogus => 'foo'}
request = Solr::Request::AddDocument.new(doc)
response = @connection.send(request)
assert_equal false, response.ok?
assert_match "ERROR:unknown field 'bogus'", response.status_message
assert_raise(Net::HTTPServerException) do
response = @connection.send(request)
end
# assert_equal false, response.ok?
# assert_match "ERROR:unknown field 'bogus'", response.status_message
end
def test_index_info
doc = {:id => 999, :test_index_facet => 'value'}
@connection.add(doc)
ii = Solr::Request::IndexInfo.new
info = @connection.send(Solr::Request::IndexInfo.new)
assert info.field_names.include?("id") && info.field_names.include?("test_index_facet")
assert_equal 1, info.num_docs
@ -199,7 +202,7 @@ class ServerTest < Test::Unit::TestCase
# wipe the index clean
def clean
@connection.delete_by_query('[* TO *]')
@connection.delete_by_query('*:*')
end
end

View File

@ -16,7 +16,7 @@ class AddDocumentTest < SolrMockBaseTestCase
def test_add_document_response
conn = Solr::Connection.new('http://localhost:9999/solr')
set_post_return('<result status="0"></result>')
set_post_return('<?xml version="1.0" encoding="UTF-8"?><response><lst name="responseHeader"><int name="status">0</int><int name="QTime">2</int></lst></response>')
doc = {:id => '123', :text => 'Tlon, Uqbar, Orbis Tertius'}
response = conn.send(Solr::Request::AddDocument.new(doc))
assert_equal true, response.ok?
@ -24,7 +24,7 @@ class AddDocumentTest < SolrMockBaseTestCase
def test_bad_add_document_response
conn = Solr::Connection.new('http://localhost:9999/solr')
set_post_return('<result status="400"></result>')
set_post_return('<?xml version="1.0" encoding="UTF-8"?><response><lst name="responseHeader"><int name="status">1</int><int name="QTime">2</int></lst></response>')
doc = {:id => '123', :text => 'Tlon, Uqbar, Orbis Tertius'}
response = conn.send(Solr::Request::AddDocument.new(doc))
assert_equal false, response.ok?
@ -32,7 +32,7 @@ class AddDocumentTest < SolrMockBaseTestCase
def test_shorthand
conn = Solr::Connection.new('http://localhost:9999/solr')
set_post_return('<result status="0"></result>')
set_post_return('<?xml version="1.0" encoding="UTF-8"?><response><lst name="responseHeader"><int name="status">0</int><int name="QTime">2</int></lst></response>')
doc = {:id => '123', :text => 'Tlon, Uqbar, Orbis Tertius'}
assert_equal true, conn.add(:id => '123', :text => 'Tlon, Uqbar, Orbis Tetius')
end

View File

@ -15,27 +15,27 @@ require 'solr_mock_base'
class CommitTest < SolrMockBaseTestCase
def test_commit
xml = '<result status="0"></result>'
xml = '<?xml version="1.0" encoding="UTF-8"?><response><lst name="responseHeader"><int name="status">0</int><int name="QTime">2</int></lst></response>'
conn = Solr::Connection.new('http://localhost:9999/solr')
set_post_return(xml)
response = conn.send(Solr::Request::Commit.new)
assert_kind_of Solr::Response::Commit, response
assert true, response.ok?
assert_equal true, response.ok?
# test shorthand
assert_equal true, conn.commit
end
def test_invalid_commit
xml = '<foo>bar</foo>'
conn = Solr::Connection.new('http://localhost:9999/solr')
set_post_return(xml)
response = conn.send(Solr::Request::Commit.new)
assert_kind_of Solr::Response::Commit, response
assert_equal false, response.ok?
# test shorthand
assert_equal false, conn.commit
end
# def test_invalid_commit
# xml = '<?xml version="1.0" encoding="UTF-8"?><response><lst name="responseHeader"><int name="status">1</int><int name="QTime">2</int></lst></response>'
# conn = Solr::Connection.new('http://localhost:9999/solr')
# set_post_return(xml)
# response = conn.send(Solr::Request::Commit.new)
# assert_kind_of Solr::Response::Commit, response
# assert_equal false, response.ok?
#
# # test shorthand
# assert_equal false, conn.commit
# end
end

View File

@ -26,7 +26,7 @@ class DeleteTest < SolrMockBaseTestCase
def test_delete_response
conn = Solr::Connection.new 'http://localhost:9999/solr'
set_post_return('<result status="0"></result>')
set_post_return('<?xml version="1.0" encoding="UTF-8"?><response><lst name="responseHeader"><int name="status">0</int><int name="QTime">2</int></lst></response>')
response = conn.send(Solr::Request::Delete.new(:id => 123))
assert_equal true, response.ok?
end

View File

@ -0,0 +1,40 @@
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'solr'
require 'test/unit'
require 'hpricot'
class HpricotMapperTest < Test::Unit::TestCase
def setup
@doc = open(File.expand_path(File.dirname(__FILE__)) + "/hpricot_test_file.xml"){|f| Hpricot.XML(f)}
end
def test_simple_hpricot_path
mapping = {:field1 => :'child[@attribute="attribute1"]',
:field2 => :'child[@attribute="attribute2"]',
:field3 => :'child[@attribute="attribute3"]',
:field4 => :'child[@attribute="attribute3"] grandchild',
:field5 => :'child'}
mapper = Solr::Importer::HpricotMapper.new(mapping)
mapped_data = mapper.map(@doc)
assert_equal ['text1'], mapped_data[:field1]
assert_equal ['text2'], mapped_data[:field2]
assert_equal ['text3<grandchild>grandchild 3 text</grandchild>'], mapped_data[:field3]
assert_equal ['grandchild 3 text'], mapped_data[:field4]
assert_equal ['text1', 'text2', 'text3<grandchild>grandchild 3 text</grandchild>'], mapped_data[:field5]
end
end

View File

@ -0,0 +1,7 @@
<root>
<parent>
<child attribute="attribute1">text1</child>
<child attribute="attribute2">text2</child>
<child attribute="attribute3">text3<grandchild>grandchild 3 text</grandchild></child>
</parent>
</root>