2016-11-01 13:50:14 -04:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
|
|
|
import sys
|
|
|
|
import json
|
|
|
|
import httplib
|
|
|
|
import urllib
|
|
|
|
import libsvm_formatter
|
|
|
|
|
|
|
|
from optparse import OptionParser
|
|
|
|
|
|
|
|
solrQueryUrl = ""
|
|
|
|
|
2016-12-28 11:40:47 -05:00
|
|
|
|
|
|
|
def setupSolr(collection, host, port, featuresFile, featureStoreName):
|
|
|
|
'''Sets up solr with the proper features for the test'''
|
|
|
|
|
|
|
|
conn = httplib.HTTPConnection(host, port)
|
|
|
|
|
|
|
|
baseUrl = "/solr/" + collection
|
|
|
|
featureUrl = baseUrl + "/schema/feature-store"
|
|
|
|
|
|
|
|
conn.request("DELETE", featureUrl+"/"+featureStoreName)
|
|
|
|
r = conn.getresponse()
|
|
|
|
msg = r.read()
|
|
|
|
if (r.status != httplib.OK and
|
|
|
|
r.status != httplib.CREATED and
|
|
|
|
r.status != httplib.ACCEPTED and
|
|
|
|
r.status != httplib.NOT_FOUND):
|
|
|
|
raise Exception("Status: {0} {1}\nResponse: {2}".format(r.status, r.reason, msg))
|
|
|
|
|
|
|
|
|
|
|
|
# Add features
|
|
|
|
headers = {'Content-type': 'application/json'}
|
|
|
|
featuresBody = open(featuresFile)
|
|
|
|
|
|
|
|
conn.request("POST", featureUrl, featuresBody, headers)
|
|
|
|
r = conn.getresponse()
|
|
|
|
msg = r.read()
|
|
|
|
if (r.status != httplib.OK and
|
|
|
|
r.status != httplib.ACCEPTED):
|
|
|
|
print r.status
|
|
|
|
print ""
|
|
|
|
print r.reason;
|
|
|
|
raise Exception("Status: {0} {1}\nResponse: {2}".format(r.status, r.reason, msg))
|
|
|
|
|
|
|
|
conn.close()
|
|
|
|
|
|
|
|
|
|
|
|
def generateQueries(userQueriesFile, collection, requestHandler, solrFeatureStoreName, efiParams):
|
|
|
|
with open(userQueriesFile) as input:
|
2016-11-01 13:50:14 -04:00
|
|
|
solrQueryUrls = [] #A list of tuples with solrQueryUrl,solrQuery,docId,scoreForPQ,source
|
|
|
|
|
|
|
|
for line in input:
|
|
|
|
line = line.strip();
|
|
|
|
searchText,docId,score,source = line.split("|");
|
2016-12-28 11:40:47 -05:00
|
|
|
solrQuery = generateHttpRequest(collection,requestHandler,solrFeatureStoreName,efiParams,searchText,docId)
|
2016-11-01 13:50:14 -04:00
|
|
|
solrQueryUrls.append((solrQuery,searchText,docId,score,source))
|
|
|
|
|
|
|
|
return solrQueryUrls;
|
|
|
|
|
2016-12-28 11:40:47 -05:00
|
|
|
|
|
|
|
def generateHttpRequest(collection, requestHandler, solrFeatureStoreName, efiParams, searchText, docId):
|
2016-11-01 13:50:14 -04:00
|
|
|
global solrQueryUrl
|
|
|
|
if len(solrQueryUrl) < 1:
|
2016-12-28 11:40:47 -05:00
|
|
|
solrQueryUrl = "/".join([ "", "solr", collection, requestHandler ])
|
|
|
|
solrQueryUrl += ("?fl=" + ",".join([ "id", "score", "[features store="+solrFeatureStoreName+" "+efiParams+"]" ]))
|
|
|
|
solrQueryUrl += "&q="
|
2016-11-01 13:50:14 -04:00
|
|
|
solrQueryUrl = solrQueryUrl.replace(" ","+")
|
|
|
|
solrQueryUrl += urllib.quote_plus("id:")
|
|
|
|
|
|
|
|
|
|
|
|
userQuery = urllib.quote_plus(searchText.strip().replace("'","\\'").replace("/","\\\\/"))
|
|
|
|
solrQuery = solrQueryUrl + '"' + urllib.quote_plus(docId) + '"' #+ solrQueryUrlEnd
|
|
|
|
solrQuery = solrQuery.replace("%24USERQUERY", userQuery).replace('$USERQUERY', urllib.quote_plus("\\'" + userQuery + "\\'"))
|
|
|
|
|
|
|
|
return solrQuery
|
|
|
|
|
2016-12-28 11:40:47 -05:00
|
|
|
|
|
|
|
def generateTrainingData(solrQueries, host, port):
|
2016-11-01 13:50:14 -04:00
|
|
|
'''Given a list of solr queries, yields a tuple of query , docId , score , source , feature vector for each query.
|
2016-12-28 11:40:47 -05:00
|
|
|
Feature Vector is a list of strings of form "key=value"'''
|
|
|
|
conn = httplib.HTTPConnection(host, port)
|
2016-11-01 13:50:14 -04:00
|
|
|
headers = {"Connection":" keep-alive"}
|
|
|
|
|
|
|
|
try:
|
|
|
|
for queryUrl,query,docId,score,source in solrQueries:
|
|
|
|
conn.request("GET", queryUrl, headers=headers)
|
|
|
|
r = conn.getresponse()
|
|
|
|
msg = r.read()
|
|
|
|
msgDict = json.loads(msg)
|
|
|
|
fv = ""
|
|
|
|
docs = msgDict['response']['docs']
|
|
|
|
if len(docs) > 0 and "[features]" in docs[0]:
|
|
|
|
if not msgDict['response']['docs'][0]["[features]"] == None:
|
|
|
|
fv = msgDict['response']['docs'][0]["[features]"];
|
|
|
|
else:
|
|
|
|
print "ERROR NULL FV FOR: " + docId;
|
|
|
|
print msg
|
|
|
|
continue;
|
|
|
|
else:
|
|
|
|
print "ERROR FOR: " + docId;
|
|
|
|
print msg
|
|
|
|
continue;
|
|
|
|
|
|
|
|
if r.status == httplib.OK:
|
|
|
|
#print "http connection was ok for: " + queryUrl
|
2016-12-28 11:40:47 -05:00
|
|
|
yield(query,docId,score,source,fv.split(","));
|
2016-11-01 13:50:14 -04:00
|
|
|
else:
|
|
|
|
raise Exception("Status: {0} {1}\nResponse: {2}".format(r.status, r.reason, msg))
|
|
|
|
except Exception as e:
|
|
|
|
print msg
|
|
|
|
print e
|
|
|
|
|
|
|
|
conn.close()
|
|
|
|
|
|
|
|
|
2016-12-28 11:40:47 -05:00
|
|
|
def uploadModel(collection, host, port, modelFile, modelName):
|
|
|
|
modelUrl = "/solr/" + collection + "/schema/model-store"
|
2016-11-01 13:50:14 -04:00
|
|
|
headers = {'Content-type': 'application/json'}
|
2016-12-28 11:40:47 -05:00
|
|
|
with open(modelFile) as modelBody:
|
|
|
|
conn = httplib.HTTPConnection(host, port)
|
2016-11-01 13:50:14 -04:00
|
|
|
|
2016-12-28 11:40:47 -05:00
|
|
|
conn.request("DELETE", modelUrl+"/"+modelName)
|
|
|
|
r = conn.getresponse()
|
|
|
|
msg = r.read()
|
|
|
|
if (r.status != httplib.OK and
|
|
|
|
r.status != httplib.CREATED and
|
|
|
|
r.status != httplib.ACCEPTED and
|
|
|
|
r.status != httplib.NOT_FOUND):
|
|
|
|
raise Exception("Status: {0} {1}\nResponse: {2}".format(r.status, r.reason, msg))
|
2016-11-01 13:50:14 -04:00
|
|
|
|
2016-12-28 11:40:47 -05:00
|
|
|
conn.request("POST", modelUrl, modelBody, headers)
|
|
|
|
r = conn.getresponse()
|
|
|
|
msg = r.read()
|
|
|
|
if (r.status != httplib.OK and
|
|
|
|
r.status != httplib.CREATED and
|
|
|
|
r.status != httplib.ACCEPTED):
|
|
|
|
raise Exception("Status: {0} {1}\nResponse: {2}".format(r.status, r.reason, msg))
|
2016-11-01 13:50:14 -04:00
|
|
|
|
|
|
|
|
|
|
|
def main(argv=None):
|
|
|
|
if argv is None:
|
|
|
|
argv = sys.argv
|
|
|
|
|
|
|
|
parser = OptionParser(usage="usage: %prog [options] ", version="%prog 1.0")
|
|
|
|
parser.add_option('-c', '--config',
|
|
|
|
dest='configFile',
|
|
|
|
help='File of configuration for the test')
|
|
|
|
(options, args) = parser.parse_args()
|
|
|
|
|
|
|
|
if options.configFile == None:
|
|
|
|
parser.print_help()
|
|
|
|
return 1
|
|
|
|
|
|
|
|
with open(options.configFile) as configFile:
|
|
|
|
config = json.load(configFile)
|
|
|
|
|
2016-12-28 11:40:47 -05:00
|
|
|
print "Uploading features ("+config["solrFeaturesFile"]+") to Solr"
|
|
|
|
setupSolr(config["collection"], config["host"], config["port"], config["solrFeaturesFile"], config["solrFeatureStoreName"])
|
2016-11-01 13:50:14 -04:00
|
|
|
|
2016-12-28 11:40:47 -05:00
|
|
|
print "Converting user queries ("+config["userQueriesFile"]+") into Solr queries for feature extraction"
|
|
|
|
reRankQueries = generateQueries(config["userQueriesFile"], config["collection"], config["requestHandler"], config["solrFeatureStoreName"], config["efiParams"])
|
2016-11-01 13:50:14 -04:00
|
|
|
|
2016-12-28 11:40:47 -05:00
|
|
|
print "Running Solr queries to extract features"
|
|
|
|
fvGenerator = generateTrainingData(reRankQueries, config["host"], config["port"])
|
2016-11-01 13:50:14 -04:00
|
|
|
formatter = libsvm_formatter.LibSvmFormatter();
|
|
|
|
formatter.processQueryDocFeatureVector(fvGenerator,config["trainingFile"]);
|
|
|
|
|
2016-12-28 11:40:47 -05:00
|
|
|
print "Training model using '"+config["trainingLibraryLocation"]+" "+config["trainingLibraryOptions"]+"'"
|
|
|
|
libsvm_formatter.trainLibSvm(config["trainingLibraryLocation"],config["trainingLibraryOptions"],config["trainingFile"],config["trainedModelFile"])
|
2016-11-01 13:50:14 -04:00
|
|
|
|
2016-12-28 11:40:47 -05:00
|
|
|
print "Converting trained model ("+config["trainedModelFile"]+") to solr model ("+config["solrModelFile"]+")"
|
|
|
|
formatter.convertLibSvmModelToLtrModel(config["trainedModelFile"], config["solrModelFile"], config["solrModelName"], config["solrFeatureStoreName"])
|
2016-11-01 13:50:14 -04:00
|
|
|
|
2016-12-28 11:40:47 -05:00
|
|
|
print "Uploading model ("+config["solrModelFile"]+") to Solr"
|
|
|
|
uploadModel(config["collection"], config["host"], config["port"], config["solrModelFile"], config["solrModelName"])
|
2016-11-01 13:50:14 -04:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
sys.exit(main())
|