From 0d378ed742325e890aa34f265fb90b74e1b13e82 Mon Sep 17 00:00:00 2001 From: Mark Robert Miller Date: Sun, 14 Mar 2010 20:20:46 +0000 Subject: [PATCH] initial move of solr to lucene svn - first as a dev branch to get solr on lucene trunk git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@922942 13f79535-47bb-0310-9956-ffa450edef68 --- solr/CHANGES.txt | 2267 +++++++++++++++ solr/KEYS | 453 +++ solr/LICENSE.txt | 1086 +++++++ solr/NOTICE.txt | 254 ++ solr/README.txt | 118 + solr/build.xml | 923 ++++++ solr/client/javascript/README.txt | 5 + solr/client/python/README.txt | 9 + solr/client/ruby/flare/README | 29 + solr/client/ruby/flare/Rakefile | 24 + .../ruby/flare/app/controllers/application.rb | 19 + .../flare/app/controllers/i18n_controller.rb | 19 + .../flare/app/helpers/application_helper.rb | 15 + .../ruby/flare/app/helpers/browse_helper.rb | 14 + .../ruby/flare/app/helpers/simile_helper.rb | 14 + .../flare/app/views/browse/_suggest.rhtml | 24 + .../ruby/flare/app/views/browse/facet.rhtml | 55 + .../views/document/_document_delicious.rhtml | 30 + .../app/views/document/_document_tang.rhtml | 43 + .../app/views/document/_document_uva.rhtml | 36 + .../ruby/flare/app/views/i18n/index.rhtml | 16 + .../ruby/flare/app/views/layouts/browse.rhtml | 28 + solr/client/ruby/flare/config/boot.rb | 57 + solr/client/ruby/flare/config/database.yml | 51 + solr/client/ruby/flare/config/environment.rb | 105 + .../flare/config/environments/development.rb | 33 + .../flare/config/environments/production.rb | 30 + .../ruby/flare/config/environments/test.rb | 31 + solr/client/ruby/flare/config/routes.rb | 40 + solr/client/ruby/flare/db/schema.rb | 11 + .../tasks/clear_database_prerequisites.rake | 17 + solr/client/ruby/flare/lib/tasks/routes.rake | 22 + solr/client/ruby/flare/public/.htaccess | 52 + solr/client/ruby/flare/public/404.html | 43 + solr/client/ruby/flare/public/500.html | 43 + solr/client/ruby/flare/public/dispatch.cgi | 22 + solr/client/ruby/flare/public/dispatch.fcgi | 37 + solr/client/ruby/flare/public/dispatch.rb | 22 + solr/client/ruby/flare/public/favicon.ico | 0 .../client/ruby/flare/public/images/flare.jpg | Bin 0 -> 31282 bytes .../client/ruby/flare/public/images/pie_0.png | Bin 0 -> 655 bytes .../client/ruby/flare/public/images/pie_1.png | Bin 0 -> 761 bytes .../ruby/flare/public/images/pie_10.png | Bin 0 -> 795 bytes .../ruby/flare/public/images/pie_100.png | Bin 0 -> 736 bytes .../ruby/flare/public/images/pie_11.png | Bin 0 -> 796 bytes .../ruby/flare/public/images/pie_12.png | Bin 0 -> 806 bytes .../ruby/flare/public/images/pie_13.png | Bin 0 -> 779 bytes .../ruby/flare/public/images/pie_14.png | Bin 0 -> 793 bytes .../ruby/flare/public/images/pie_15.png | Bin 0 -> 792 bytes .../ruby/flare/public/images/pie_16.png | Bin 0 -> 781 bytes .../ruby/flare/public/images/pie_17.png | Bin 0 -> 795 bytes .../ruby/flare/public/images/pie_18.png | Bin 0 -> 785 bytes .../ruby/flare/public/images/pie_19.png | Bin 0 -> 783 bytes .../client/ruby/flare/public/images/pie_2.png | Bin 0 -> 769 bytes .../ruby/flare/public/images/pie_20.png | Bin 0 -> 782 bytes .../ruby/flare/public/images/pie_21.png | Bin 0 -> 772 bytes .../ruby/flare/public/images/pie_22.png | Bin 0 -> 793 bytes .../ruby/flare/public/images/pie_23.png | Bin 0 -> 774 bytes .../ruby/flare/public/images/pie_24.png | Bin 0 -> 775 bytes .../ruby/flare/public/images/pie_25.png | Bin 0 -> 716 bytes .../ruby/flare/public/images/pie_26.png | Bin 0 -> 768 bytes .../ruby/flare/public/images/pie_27.png | Bin 0 -> 780 bytes .../ruby/flare/public/images/pie_28.png | Bin 0 -> 789 bytes .../ruby/flare/public/images/pie_29.png | Bin 0 -> 785 bytes .../client/ruby/flare/public/images/pie_3.png | Bin 0 -> 778 bytes .../ruby/flare/public/images/pie_30.png | Bin 0 -> 799 bytes .../ruby/flare/public/images/pie_31.png | Bin 0 -> 811 bytes .../ruby/flare/public/images/pie_32.png | Bin 0 -> 807 bytes .../ruby/flare/public/images/pie_33.png | Bin 0 -> 792 bytes .../ruby/flare/public/images/pie_34.png | Bin 0 -> 823 bytes .../ruby/flare/public/images/pie_35.png | Bin 0 -> 818 bytes .../ruby/flare/public/images/pie_36.png | Bin 0 -> 830 bytes .../ruby/flare/public/images/pie_37.png | Bin 0 -> 842 bytes .../ruby/flare/public/images/pie_38.png | Bin 0 -> 813 bytes .../ruby/flare/public/images/pie_39.png | Bin 0 -> 818 bytes .../client/ruby/flare/public/images/pie_4.png | Bin 0 -> 770 bytes .../ruby/flare/public/images/pie_40.png | Bin 0 -> 820 bytes .../ruby/flare/public/images/pie_41.png | Bin 0 -> 822 bytes .../ruby/flare/public/images/pie_42.png | Bin 0 -> 837 bytes .../ruby/flare/public/images/pie_43.png | Bin 0 -> 831 bytes .../ruby/flare/public/images/pie_44.png | Bin 0 -> 823 bytes .../ruby/flare/public/images/pie_45.png | Bin 0 -> 825 bytes .../ruby/flare/public/images/pie_46.png | Bin 0 -> 827 bytes .../ruby/flare/public/images/pie_47.png | Bin 0 -> 813 bytes .../ruby/flare/public/images/pie_48.png | Bin 0 -> 798 bytes .../ruby/flare/public/images/pie_49.png | Bin 0 -> 789 bytes .../client/ruby/flare/public/images/pie_5.png | Bin 0 -> 786 bytes .../ruby/flare/public/images/pie_50.png | Bin 0 -> 715 bytes .../ruby/flare/public/images/pie_51.png | Bin 0 -> 772 bytes .../ruby/flare/public/images/pie_52.png | Bin 0 -> 787 bytes .../ruby/flare/public/images/pie_53.png | Bin 0 -> 803 bytes .../ruby/flare/public/images/pie_54.png | Bin 0 -> 803 bytes .../ruby/flare/public/images/pie_55.png | Bin 0 -> 807 bytes .../ruby/flare/public/images/pie_56.png | Bin 0 -> 823 bytes .../ruby/flare/public/images/pie_57.png | Bin 0 -> 821 bytes .../ruby/flare/public/images/pie_58.png | Bin 0 -> 826 bytes .../ruby/flare/public/images/pie_59.png | Bin 0 -> 824 bytes .../client/ruby/flare/public/images/pie_6.png | Bin 0 -> 807 bytes .../ruby/flare/public/images/pie_60.png | Bin 0 -> 824 bytes .../ruby/flare/public/images/pie_61.png | Bin 0 -> 827 bytes .../ruby/flare/public/images/pie_62.png | Bin 0 -> 834 bytes .../ruby/flare/public/images/pie_63.png | Bin 0 -> 813 bytes .../ruby/flare/public/images/pie_64.png | Bin 0 -> 837 bytes .../ruby/flare/public/images/pie_65.png | Bin 0 -> 830 bytes .../ruby/flare/public/images/pie_66.png | Bin 0 -> 815 bytes .../ruby/flare/public/images/pie_67.png | Bin 0 -> 831 bytes .../ruby/flare/public/images/pie_68.png | Bin 0 -> 814 bytes .../ruby/flare/public/images/pie_69.png | Bin 0 -> 830 bytes .../client/ruby/flare/public/images/pie_7.png | Bin 0 -> 807 bytes .../ruby/flare/public/images/pie_70.png | Bin 0 -> 819 bytes .../ruby/flare/public/images/pie_71.png | Bin 0 -> 809 bytes .../ruby/flare/public/images/pie_72.png | Bin 0 -> 828 bytes .../ruby/flare/public/images/pie_73.png | Bin 0 -> 805 bytes .../ruby/flare/public/images/pie_74.png | Bin 0 -> 802 bytes .../ruby/flare/public/images/pie_75.png | Bin 0 -> 731 bytes .../ruby/flare/public/images/pie_76.png | Bin 0 -> 826 bytes .../ruby/flare/public/images/pie_77.png | Bin 0 -> 819 bytes .../ruby/flare/public/images/pie_78.png | Bin 0 -> 800 bytes .../ruby/flare/public/images/pie_79.png | Bin 0 -> 831 bytes .../client/ruby/flare/public/images/pie_8.png | Bin 0 -> 799 bytes .../ruby/flare/public/images/pie_80.png | Bin 0 -> 809 bytes .../ruby/flare/public/images/pie_81.png | Bin 0 -> 824 bytes .../ruby/flare/public/images/pie_82.png | Bin 0 -> 825 bytes .../ruby/flare/public/images/pie_83.png | Bin 0 -> 814 bytes .../ruby/flare/public/images/pie_84.png | Bin 0 -> 805 bytes .../ruby/flare/public/images/pie_85.png | Bin 0 -> 844 bytes .../ruby/flare/public/images/pie_86.png | Bin 0 -> 818 bytes .../ruby/flare/public/images/pie_87.png | Bin 0 -> 831 bytes .../ruby/flare/public/images/pie_88.png | Bin 0 -> 790 bytes .../ruby/flare/public/images/pie_89.png | Bin 0 -> 815 bytes .../client/ruby/flare/public/images/pie_9.png | Bin 0 -> 801 bytes .../ruby/flare/public/images/pie_90.png | Bin 0 -> 811 bytes .../ruby/flare/public/images/pie_91.png | Bin 0 -> 817 bytes .../ruby/flare/public/images/pie_92.png | Bin 0 -> 809 bytes .../ruby/flare/public/images/pie_93.png | Bin 0 -> 809 bytes .../ruby/flare/public/images/pie_94.png | Bin 0 -> 797 bytes .../ruby/flare/public/images/pie_95.png | Bin 0 -> 798 bytes .../ruby/flare/public/images/pie_96.png | Bin 0 -> 791 bytes .../ruby/flare/public/images/pie_97.png | Bin 0 -> 757 bytes .../ruby/flare/public/images/pie_98.png | Bin 0 -> 738 bytes .../ruby/flare/public/images/pie_99.png | Bin 0 -> 743 bytes .../client/ruby/flare/public/images/rails.png | Bin 0 -> 1787 bytes .../flare/public/images/simile-exhibit.png | Bin 0 -> 708 bytes .../ruby/flare/public/images/solr_small.png | Bin 0 -> 7926 bytes .../ruby/flare/public/images/x-close.gif | Bin 0 -> 88 bytes .../flare/public/javascripts/application.js | 16 + .../ruby/flare/public/javascripts/controls.js | 833 ++++++ .../ruby/flare/public/javascripts/dragdrop.js | 942 ++++++ .../ruby/flare/public/javascripts/effects.js | 1088 +++++++ .../flare/public/javascripts/prototype.js | 2515 +++++++++++++++++ .../ruby/flare/public/plugin_assets/README | 17 + .../ruby/flare/public/stylesheets/flare.css | 180 ++ solr/client/ruby/flare/script/about | 15 + solr/client/ruby/flare/script/breakpointer | 15 + solr/client/ruby/flare/script/console | 15 + solr/client/ruby/flare/script/destroy | 15 + solr/client/ruby/flare/script/generate | 15 + .../ruby/flare/script/performance/benchmarker | 15 + .../ruby/flare/script/performance/profiler | 15 + solr/client/ruby/flare/script/pie.rb | 26 + solr/client/ruby/flare/script/plugin | 15 + .../ruby/flare/script/process/inspector | 15 + solr/client/ruby/flare/script/process/reaper | 15 + solr/client/ruby/flare/script/process/spawner | 15 + solr/client/ruby/flare/script/runner | 15 + solr/client/ruby/flare/script/server | 15 + .../test/functional/browse_controller_test.rb | 30 + solr/client/ruby/flare/test/test_helper.rb | 47 + .../flare/vendor/plugins/engines/CHANGELOG | 241 ++ .../flare/vendor/plugins/engines/MIT-LICENSE | 21 + .../ruby/flare/vendor/plugins/engines/README | 64 + .../flare/vendor/plugins/engines/Rakefile | 32 + .../flare/vendor/plugins/engines/UPGRADING | 93 + .../flare/vendor/plugins/engines/about.yml | 7 + .../engines/generators/plugin_migration/USAGE | 45 + .../plugin_migration_generator.rb | 79 + .../templates/plugin_migration.erb | 13 + .../ruby/flare/vendor/plugins/engines/init.rb | 40 + .../flare/vendor/plugins/engines/install.rb | 32 + .../vendor/plugins/engines/lib/engines.rb | 323 +++ .../lib/engines/deprecated_config_support.rb | 135 + .../plugins/engines/lib/engines/plugin.rb | 214 ++ .../engines/lib/engines/plugin_list.rb | 31 + .../engines/lib/engines/plugin_migrator.rb | 60 + .../engines/lib/engines/rails_extensions.rb | 6 + .../engines/rails_extensions/active_record.rb | 19 + .../engines/rails_extensions/dependencies.rb | 143 + .../engines/rails_extensions/migrations.rb | 155 + .../rails_extensions/public_asset_helpers.rb | 116 + .../lib/engines/rails_extensions/rails.rb | 20 + .../rails_extensions/rails_initializer.rb | 86 + .../lib/engines/rails_extensions/routing.rb | 77 + .../lib/engines/rails_extensions/templates.rb | 140 + .../plugins/engines/lib/engines/testing.rb | 87 + .../vendor/plugins/engines/tasks/engines.rake | 149 + .../ruby/flare/vendor/plugins/flare/README | 11 + .../ruby/flare/vendor/plugins/flare/Rakefile | 34 + .../app/controllers/browse_controller.rb | 15 + .../app/controllers/document_controller.rb | 16 + .../app/controllers/simile_controller.rb | 46 + .../flare/app/helpers/application_helper.rb | 17 + .../flare/app/views/browse/_suggest.rhtml | 24 + .../flare/app/views/browse/facet.rhtml | 55 + .../flare/app/views/browse/index.rhtml | 121 + .../document/_document_development.rhtml | 25 + .../flare/app/views/layouts/browse.rhtml | 28 + .../flare/app/views/simile/exhibit.rhtml | 37 + .../flare/app/views/simile/timeline.rhtml | 58 + .../flare/app/views/simile/timeline.rxml | 50 + .../ruby/flare/vendor/plugins/flare/init.rb | 13 + .../flare/vendor/plugins/flare/lib/flare.rb | 15 + .../vendor/plugins/flare/lib/flare/context.rb | 151 + .../flare/lib/flare/controller_extensions.rb | 171 ++ .../ruby/flare/vendor/plugins/flare/routes.rb | 13 + .../plugins/flare/test/flare_context_test.rb | 40 + solr/client/ruby/solr-ruby/CHANGES.yml | 50 + solr/client/ruby/solr-ruby/LICENSE.txt | 201 ++ solr/client/ruby/solr-ruby/README | 56 + solr/client/ruby/solr-ruby/Rakefile | 190 ++ .../examples/delicious_library/dl_importer.rb | 60 + .../delicious_library/sample_export.txt | 164 ++ .../solr-ruby/examples/marc/marc_importer.rb | 106 + .../solr-ruby/examples/tang/tang_importer.rb | 58 + solr/client/ruby/solr-ruby/lib/solr.rb | 21 + .../ruby/solr-ruby/lib/solr/connection.rb | 179 ++ .../ruby/solr-ruby/lib/solr/document.rb | 73 + .../ruby/solr-ruby/lib/solr/exception.rb | 13 + solr/client/ruby/solr-ruby/lib/solr/field.rb | 39 + .../ruby/solr-ruby/lib/solr/importer.rb | 19 + .../lib/solr/importer/array_mapper.rb | 26 + .../solr/importer/delimited_file_source.rb | 38 + .../lib/solr/importer/hpricot_mapper.rb | 27 + .../solr-ruby/lib/solr/importer/mapper.rb | 51 + .../lib/solr/importer/solr_source.rb | 43 + .../lib/solr/importer/xpath_mapper.rb | 35 + .../client/ruby/solr-ruby/lib/solr/indexer.rb | 52 + .../client/ruby/solr-ruby/lib/solr/request.rb | 26 + .../lib/solr/request/add_document.rb | 63 + .../ruby/solr-ruby/lib/solr/request/base.rb | 36 + .../ruby/solr-ruby/lib/solr/request/commit.rb | 31 + .../ruby/solr-ruby/lib/solr/request/delete.rb | 50 + .../ruby/solr-ruby/lib/solr/request/dismax.rb | 46 + .../solr-ruby/lib/solr/request/index_info.rb | 22 + .../lib/solr/request/modify_document.rb | 51 + .../solr-ruby/lib/solr/request/optimize.rb | 21 + .../ruby/solr-ruby/lib/solr/request/ping.rb | 36 + .../ruby/solr-ruby/lib/solr/request/select.rb | 56 + .../solr-ruby/lib/solr/request/spellcheck.rb | 30 + .../solr-ruby/lib/solr/request/standard.rb | 374 +++ .../ruby/solr-ruby/lib/solr/request/update.rb | 23 + .../ruby/solr-ruby/lib/solr/response.rb | 27 + .../lib/solr/response/add_document.rb | 17 + .../ruby/solr-ruby/lib/solr/response/base.rb | 42 + .../solr-ruby/lib/solr/response/commit.rb | 17 + .../solr-ruby/lib/solr/response/delete.rb | 13 + .../solr-ruby/lib/solr/response/dismax.rb | 20 + .../solr-ruby/lib/solr/response/index_info.rb | 26 + .../lib/solr/response/modify_document.rb | 17 + .../solr-ruby/lib/solr/response/optimize.rb | 14 + .../ruby/solr-ruby/lib/solr/response/ping.rb | 28 + .../ruby/solr-ruby/lib/solr/response/ruby.rb | 42 + .../solr-ruby/lib/solr/response/select.rb | 17 + .../solr-ruby/lib/solr/response/spellcheck.rb | 20 + .../solr-ruby/lib/solr/response/standard.rb | 60 + .../ruby/solr-ruby/lib/solr/response/xml.rb | 42 + .../ruby/solr-ruby/lib/solr/solrtasks.rb | 27 + solr/client/ruby/solr-ruby/lib/solr/util.rb | 32 + solr/client/ruby/solr-ruby/lib/solr/xml.rb | 47 + solr/client/ruby/solr-ruby/script/setup.rb | 14 + solr/client/ruby/solr-ruby/script/solrshell | 18 + .../ruby/solr-ruby/solr/conf/admin-extra.html | 31 + .../ruby/solr-ruby/solr/conf/protwords.txt | 21 + .../ruby/solr-ruby/solr/conf/schema.xml | 221 ++ .../ruby/solr-ruby/solr/conf/scripts.conf | 24 + .../ruby/solr-ruby/solr/conf/solrconfig.xml | 394 +++ .../ruby/solr-ruby/solr/conf/stopwords.txt | 58 + .../ruby/solr-ruby/solr/conf/synonyms.txt | 31 + .../ruby/solr-ruby/solr/conf/xslt/example.xsl | 132 + .../ruby/solr-ruby/test/conf/admin-extra.html | 31 + .../ruby/solr-ruby/test/conf/protwords.txt | 21 + .../ruby/solr-ruby/test/conf/schema.xml | 237 ++ .../ruby/solr-ruby/test/conf/scripts.conf | 24 + .../ruby/solr-ruby/test/conf/solrconfig.xml | 376 +++ .../ruby/solr-ruby/test/conf/stopwords.txt | 58 + .../ruby/solr-ruby/test/conf/synonyms.txt | 31 + .../solr-ruby/test/functional/server_test.rb | 218 ++ .../test/functional/test_solr_server.rb | 104 + .../solr-ruby/test/unit/add_document_test.rb | 40 + .../solr-ruby/test/unit/array_mapper_test.rb | 37 + .../solr-ruby/test/unit/changes_yaml_test.rb | 21 + .../ruby/solr-ruby/test/unit/commit_test.rb | 41 + .../solr-ruby/test/unit/connection_test.rb | 55 + .../solr-ruby/test/unit/data_mapper_test.rb | 75 + .../ruby/solr-ruby/test/unit/delete_test.rb | 56 + .../test/unit/delimited_file_source_test.rb | 29 + .../test/unit/dismax_request_test.rb | 26 + .../ruby/solr-ruby/test/unit/document_test.rb | 69 + .../ruby/solr-ruby/test/unit/field_test.rb | 48 + .../test/unit/hpricot_mapper_test.rb | 44 + .../solr-ruby/test/unit/hpricot_test_file.xml | 26 + .../ruby/solr-ruby/test/unit/indexer_test.rb | 57 + .../test/unit/modify_document_test.rb | 24 + .../ruby/solr-ruby/test/unit/ping_test.rb | 51 + .../ruby/solr-ruby/test/unit/request_test.rb | 61 + .../ruby/solr-ruby/test/unit/response_test.rb | 43 + .../ruby/solr-ruby/test/unit/select_test.rb | 25 + .../solr-ruby/test/unit/solr_mock_base.rb | 40 + .../test/unit/spellcheck_response_test.rb | 26 + .../test/unit/spellchecker_request_test.rb | 27 + .../test/unit/standard_request_test.rb | 324 +++ .../test/unit/standard_response_test.rb | 174 ++ solr/client/ruby/solr-ruby/test/unit/suite.rb | 16 + .../solr-ruby/test/unit/tab_delimited.txt | 2 + .../ruby/solr-ruby/test/unit/util_test.rb | 24 + .../solr-ruby/test/unit/xpath_mapper_test.rb | 38 + .../solr-ruby/test/unit/xpath_test_file.xml | 25 + solr/common-build.xml | 431 +++ solr/contrib/clustering/CHANGES.txt | 20 + solr/contrib/clustering/README.txt | 8 + solr/contrib/clustering/build.xml | 182 ++ .../clustering/lib/carrot2-mini-3.1.0.jar | 2 + .../clustering/lib/commons-lang-2.4.jar | 2 + solr/contrib/clustering/lib/ehcache-1.6.2.jar | 2 + .../lib/jackson-core-asl-0.9.9-6.jar | 2 + .../lib/jackson-mapper-asl-0.9.9-6.jar | 2 + solr/contrib/clustering/lib/log4j-1.2.14.jar | 2 + .../solr-clustering-pom.xml.template | 63 + .../clustering/ClusteringComponent.java | 190 ++ .../handler/clustering/ClusteringEngine.java | 40 + .../handler/clustering/ClusteringParams.java | 37 + .../clustering/DocumentClusteringEngine.java | 54 + .../clustering/SearchClusteringEngine.java | 37 + .../carrot2/CarrotClusteringEngine.java | 259 ++ .../clustering/carrot2/CarrotParams.java | 42 + .../clustering/AbstractClusteringTest.java | 198 ++ .../clustering/ClusteringComponentTest.java | 80 + .../MockDocumentClusteringEngine.java | 37 + .../carrot2/CarrotClusteringEngineTest.java | 183 ++ .../carrot2/MockClusteringAlgorithm.java | 83 + .../solr/conf/mapping-ISOLatin1Accent.txt | 246 ++ .../test/resources/solr/conf/protwords.txt | 21 + .../src/test/resources/solr/conf/schema.xml | 347 +++ .../test/resources/solr/conf/solrconfig.xml | 561 ++++ .../test/resources/solr/conf/spellings.txt | 2 + .../test/resources/solr/conf/stopwords.txt | 58 + .../src/test/resources/solr/conf/synonyms.txt | 31 + solr/contrib/dataimporthandler/CHANGES.txt | 426 +++ solr/contrib/dataimporthandler/build.xml | 210 ++ .../dataimporthandler/lib/activation-1.1.jar | 2 + .../dataimporthandler/lib/mail-1.4.1.jar | 2 + ...-dataimporthandler-extras-pom.xml.template | 52 + .../solr-dataimporthandler-pom.xml.template | 39 + .../dataimport/MailEntityProcessor.java | 599 ++++ .../dataimport/TikaEntityProcessor.java | 193 ++ .../dataimport/TestMailEntityProcessor.java | 211 ++ .../dataimport/TestTikaEntityProcessor.java | 61 + .../conf/dataimport-schema-no-unique-key.xml | 203 ++ .../solr/conf/dataimport-solrconfig.xml | 404 +++ .../AbstractDataImportHandlerTest.java | 245 ++ .../BinContentStreamDataSource.java | 68 + .../handler/dataimport/BinFileDataSource.java | 63 + .../handler/dataimport/BinURLDataSource.java | 101 + .../dataimport/CachedSqlEntityProcessor.java | 79 + .../handler/dataimport/ClobTransformer.java | 84 + .../dataimport/ContentStreamDataSource.java | 67 + .../solr/handler/dataimport/Context.java | 226 ++ .../solr/handler/dataimport/ContextImpl.java | 237 ++ .../solr/handler/dataimport/DataConfig.java | 371 +++ .../handler/dataimport/DataImportHandler.java | 360 +++ .../DataImportHandlerException.java | 78 + .../solr/handler/dataimport/DataImporter.java | 583 ++++ .../solr/handler/dataimport/DataSource.java | 72 + .../dataimport/DateFormatTransformer.java | 100 + .../solr/handler/dataimport/DebugLogger.java | 281 ++ .../solr/handler/dataimport/DocBuilder.java | 1019 +++++++ .../handler/dataimport/EntityProcessor.java | 117 + .../dataimport/EntityProcessorBase.java | 288 ++ .../dataimport/EntityProcessorWrapper.java | 289 ++ .../solr/handler/dataimport/Evaluator.java | 47 + .../solr/handler/dataimport/EvaluatorBag.java | 303 ++ .../handler/dataimport/EventListener.java | 36 + .../dataimport/FieldReaderDataSource.java | 136 + .../dataimport/FieldStreamDataSource.java | 95 + .../handler/dataimport/FileDataSource.java | 136 + .../dataimport/FileListEntityProcessor.java | 292 ++ .../dataimport/HTMLStripTransformer.java | 97 + .../handler/dataimport/HttpDataSource.java | 51 + .../handler/dataimport/JdbcDataSource.java | 415 +++ .../dataimport/LineEntityProcessor.java | 156 + .../handler/dataimport/LogTransformer.java | 66 + .../handler/dataimport/MockDataSource.java | 59 + .../dataimport/NumberFormatTransformer.java | 140 + .../dataimport/PlainTextEntityProcessor.java | 79 + .../handler/dataimport/RegexTransformer.java | 200 ++ .../handler/dataimport/ScriptTransformer.java | 105 + .../solr/handler/dataimport/SolrWriter.java | 245 ++ .../dataimport/SqlEntityProcessor.java | 166 ++ .../handler/dataimport/TemplateString.java | 115 + .../dataimport/TemplateTransformer.java | 104 + .../handler/dataimport/ThreadedContext.java | 96 + .../ThreadedEntityProcessorWrapper.java | 115 + .../solr/handler/dataimport/Transformer.java | 52 + .../handler/dataimport/URLDataSource.java | 149 + .../handler/dataimport/VariableResolver.java | 53 + .../dataimport/VariableResolverImpl.java | 150 + .../dataimport/XPathEntityProcessor.java | 521 ++++ .../handler/dataimport/XPathRecordReader.java | 644 +++++ .../src/main/webapp/admin/dataimport.jsp | 54 + .../src/main/webapp/admin/debug.jsp | 106 + .../dataimport/MockInitialContextFactory.java | 62 + .../TestCachedSqlEntityProcessor.java | 263 ++ .../dataimport/TestClobTransformer.java | 62 + .../TestContentStreamDataSource.java | 155 + .../handler/dataimport/TestDataConfig.java | 91 + .../dataimport/TestDateFormatTransformer.java | 92 + .../handler/dataimport/TestDocBuilder.java | 259 ++ .../handler/dataimport/TestDocBuilder2.java | 361 +++ .../dataimport/TestEntityProcessorBase.java | 83 + .../handler/dataimport/TestErrorHandling.java | 176 ++ .../handler/dataimport/TestEvaluatorBag.java | 158 ++ .../handler/dataimport/TestFieldReader.java | 66 + .../TestFileListEntityProcessor.java | 201 ++ .../dataimport/TestJdbcDataSource.java | 182 ++ .../dataimport/TestLineEntityProcessor.java | 250 ++ .../TestNumberFormatTransformer.java | 166 ++ .../TestPlainTextEntityProcessor.java | 70 + .../dataimport/TestRegexTransformer.java | 209 ++ .../dataimport/TestScriptTransformer.java | 143 + .../dataimport/TestSqlEntityProcessor.java | 179 ++ .../dataimport/TestSqlEntityProcessor2.java | 274 ++ .../TestSqlEntityProcessorDelta.java | 295 ++ .../TestSqlEntityProcessorDelta2.java | 291 ++ .../dataimport/TestTemplateString.java | 55 + .../dataimport/TestTemplateTransformer.java | 75 + .../solr/handler/dataimport/TestThreaded.java | 72 + .../handler/dataimport/TestURLDataSource.java | 47 + .../dataimport/TestVariableResolver.java | 175 ++ .../dataimport/TestXPathEntityProcessor.java | 304 ++ .../dataimport/TestXPathRecordReader.java | 569 ++++ .../solr/conf/contentstream-solrconfig.xml | 408 +++ .../solr/conf/data-config-with-datasource.xml | 9 + .../conf/data-config-with-transformer.xml | 10 + .../solr/conf/dataconfig-contentstream.xml | 10 + .../dataimport-nodatasource-solrconfig.xml | 404 +++ .../resources/solr/conf/dataimport-schema.xml | 304 ++ .../solr/conf/dataimport-solr_id-schema.xml | 304 ++ .../solr/conf/dataimport-solrconfig.xml | 404 +++ .../test/resources/solr/conf/protwords.txt | 20 + .../solr/conf/single-entity-data-config.xml | 9 + .../test/resources/solr/conf/stopwords.txt | 16 + .../src/test/resources/solr/conf/synonyms.txt | 22 + solr/contrib/extraction/CHANGES.txt | 49 + solr/contrib/extraction/build.xml | 139 + solr/contrib/extraction/lib/asm-3.1.jar | 2 + .../extraction/lib/commons-compress-1.0.jar | 2 + .../extraction/lib/commons-logging-1.1.1.jar | 2 + solr/contrib/extraction/lib/dom4j-1.6.1.jar | 2 + .../lib/fontbox-0.8.0-incubator.jar | 2 + .../lib/geronimo-stax-api_1.0_spec-1.0.1.jar | 2 + solr/contrib/extraction/lib/icu4j-4_2_1.jar | 2 + .../lib/jempbox-0.8.0-incubator.jar | 2 + solr/contrib/extraction/lib/log4j-1.2.14.jar | 2 + .../lib/metadata-extractor-2.4.0-beta-1.jar | 2 + .../lib/pdfbox-0.8.0-incubating.jar | 2 + solr/contrib/extraction/lib/poi-3.6.jar | 2 + solr/contrib/extraction/lib/poi-ooxml-3.6.jar | 2 + .../extraction/lib/poi-ooxml-schemas-3.6.jar | 2 + .../extraction/lib/poi-scratchpad-3.6.jar | 2 + solr/contrib/extraction/lib/tagsoup-1.2.jar | 2 + solr/contrib/extraction/lib/tika-core-0.6.jar | 2 + .../extraction/lib/tika-parsers-0.6.jar | 2 + .../extraction/lib/xercesImpl-2.8.1.jar | 2 + .../extraction/lib/xml-apis-1.0.b2.jar | 2 + .../contrib/extraction/lib/xmlbeans-2.3.0.jar | 2 + .../extraction/solr-cell-pom.xml.template | 51 + .../extraction/ExtractingDocumentLoader.java | 221 ++ .../ExtractingMetadataConstants.java | 29 + .../handler/extraction/ExtractingParams.java | 142 + .../extraction/ExtractingRequestHandler.java | 130 + .../extraction/SolrContentHandler.java | 308 ++ .../extraction/SolrContentHandlerFactory.java | 41 + .../handler/ExtractingRequestHandlerTest.java | 354 +++ .../extraction/src/test/resources/arabic.pdf | Bin 0 -> 11935 bytes .../src/test/resources/example.html | 49 + .../extraction/src/test/resources/simple.html | 12 + .../src/test/resources/solr-word.pdf | Bin 0 -> 21052 bytes .../test/resources/solr/conf/protwords.txt | 20 + .../src/test/resources/solr/conf/schema.xml | 471 +++ .../test/resources/solr/conf/solrconfig.xml | 359 +++ .../test/resources/solr/conf/stopwords.txt | 16 + .../src/test/resources/solr/conf/synonyms.txt | 22 + .../src/test/resources/version_control.txt | 18 + .../src/test/resources/version_control.xml | 42 + solr/contrib/velocity/build.xml | 125 + .../velocity/solr-velocity-pom.xml.template | 61 + solr/contrib/velocity/src/main/java/footer.vm | 8 + .../org/apache/solr/request/PageTool.java | 80 + .../solr/request/SolrParamResourceLoader.java | 66 + .../request/SolrVelocityResourceLoader.java | 50 + .../solr/request/VelocityResponseWriter.java | 194 ++ .../src/main/solr/conf/admin-extra.html | 31 + .../velocity/src/main/solr/conf/elevate.xml | 36 + .../velocity/src/main/solr/conf/protwords.txt | 21 + .../velocity/src/main/solr/conf/schema.xml | 413 +++ .../velocity/src/main/solr/conf/scripts.conf | 24 + .../src/main/solr/conf/solrconfig.xml | 784 +++++ .../velocity/src/main/solr/conf/spellings.txt | 2 + .../velocity/src/main/solr/conf/stopwords.txt | 58 + .../velocity/src/main/solr/conf/synonyms.txt | 31 + .../src/main/solr/conf/velocity.properties | 3 + .../solr/conf/velocity/VM_global_library.vm | 50 + .../src/main/solr/conf/velocity/browse.vm | 67 + .../src/main/solr/conf/velocity/debug.vm | 9 + .../src/main/solr/conf/velocity/header.vm | 0 .../src/main/solr/conf/velocity/hit.vm | 16 + .../src/main/solr/conf/velocity/main.css | 96 + .../src/main/solr/conf/xslt/example.xsl | 132 + .../src/main/solr/conf/xslt/example_atom.xsl | 67 + .../src/main/solr/conf/xslt/example_rss.xsl | 66 + .../velocity/src/main/solr/conf/xslt/luke.xsl | 337 +++ .../main/solr/lib/commons-beanutils-1.7.0.jar | 2 + .../solr/lib/commons-collections-3.2.1.jar | 2 + .../src/main/solr/lib/commons-lang-2.4.jar | 2 + .../src/main/solr/lib/velocity-1.6.1.jar | 2 + .../solr/lib/velocity-tools-2.0-beta3.jar | 2 + solr/contrib/velocity/src/test/java/TODO.txt | 1 + solr/example/README.txt | 42 + solr/example/etc/jetty.xml | 212 ++ solr/example/etc/webdefault.xml | 379 +++ solr/example/example-DIH/README.txt | 43 + solr/example/example-DIH/hsqldb/ex.backup | Bin 0 -> 11354 bytes solr/example/example-DIH/hsqldb/ex.data | Bin 0 -> 1048576 bytes solr/example/example-DIH/hsqldb/ex.log | 2 + solr/example/example-DIH/hsqldb/ex.properties | 17 + solr/example/example-DIH/hsqldb/ex.script | 12 + .../example-DIH/solr/db/conf/admin-extra.html | 31 + .../solr/db/conf/db-data-config.xml | 31 + .../example-DIH/solr/db/conf/elevate.xml | 36 + .../example-DIH/solr/db/conf/protwords.txt | 21 + .../example-DIH/solr/db/conf/schema.xml | 356 +++ .../example-DIH/solr/db/conf/scripts.conf | 24 + .../example-DIH/solr/db/conf/solrconfig.xml | 705 +++++ .../example-DIH/solr/db/conf/stopwords.txt | 58 + .../example-DIH/solr/db/conf/synonyms.txt | 31 + .../example-DIH/solr/db/conf/xslt/example.xsl | 132 + .../solr/db/conf/xslt/example_atom.xsl | 63 + .../solr/db/conf/xslt/example_rss.xsl | 62 + .../example-DIH/solr/db/conf/xslt/luke.xsl | 345 +++ .../solr/db/lib/hsqldb-1.8.0.10.jar | 2 + .../solr/mail/conf/data-config.xml | 11 + .../example-DIH/solr/mail/conf/protwords.txt | 21 + .../example-DIH/solr/mail/conf/schema.xml | 370 +++ .../example-DIH/solr/mail/conf/solrconfig.xml | 807 ++++++ .../example-DIH/solr/mail/conf/stopwords.txt | 58 + .../example-DIH/solr/mail/conf/synonyms.txt | 31 + .../solr/rss/conf/admin-extra.html | 31 + .../example-DIH/solr/rss/conf/elevate.xml | 36 + .../example-DIH/solr/rss/conf/protwords.txt | 21 + .../solr/rss/conf/rss-data-config.xml | 26 + .../example-DIH/solr/rss/conf/schema.xml | 316 +++ .../example-DIH/solr/rss/conf/scripts.conf | 24 + .../example-DIH/solr/rss/conf/solrconfig.xml | 704 +++++ .../example-DIH/solr/rss/conf/stopwords.txt | 58 + .../example-DIH/solr/rss/conf/synonyms.txt | 31 + solr/example/example-DIH/solr/solr.xml | 9 + .../example-DIH/solr/tika/conf/schema.xml | 203 ++ .../example-DIH/solr/tika/conf/solrconfig.xml | 410 +++ .../solr/tika/conf/tika-data-config.xml | 10 + solr/example/exampledocs/books.csv | 11 + solr/example/exampledocs/hd.xml | 51 + solr/example/exampledocs/ipod_other.xml | 56 + solr/example/exampledocs/ipod_video.xml | 38 + solr/example/exampledocs/mem.xml | 66 + solr/example/exampledocs/monitor.xml | 33 + solr/example/exampledocs/monitor2.xml | 32 + solr/example/exampledocs/mp500.xml | 41 + solr/example/exampledocs/payload.xml | 57 + solr/example/exampledocs/post.sh | 28 + solr/example/exampledocs/sd500.xml | 36 + solr/example/exampledocs/solr.xml | 38 + solr/example/exampledocs/test_utf8.sh | 83 + solr/example/exampledocs/utf8-example.xml | 42 + solr/example/exampledocs/vidcard.xml | 57 + solr/example/lib/jetty-6.1.3.jar | 2 + solr/example/lib/jetty-util-6.1.3.jar | 2 + solr/example/lib/jsp-2.1/ant-1.6.5.jar | 2 + solr/example/lib/jsp-2.1/core-3.1.1.jar | 2 + solr/example/lib/jsp-2.1/jsp-2.1.jar | 2 + solr/example/lib/jsp-2.1/jsp-api-2.1.jar | 2 + solr/example/lib/servlet-api-2.5-6.1.3.jar | 2 + solr/example/multicore/README.txt | 3 + solr/example/multicore/core0/conf/schema.xml | 41 + .../multicore/core0/conf/solrconfig.xml | 40 + solr/example/multicore/core1/conf/schema.xml | 41 + .../multicore/core1/conf/solrconfig.xml | 40 + .../multicore/exampledocs/ipod_other.xml | 34 + .../multicore/exampledocs/ipod_video.xml | 22 + solr/example/multicore/solr.xml | 35 + solr/example/solr/README.txt | 54 + solr/example/solr/conf/admin-extra.html | 31 + solr/example/solr/conf/elevate.xml | 36 + .../solr/conf/mapping-ISOLatin1Accent.txt | 246 ++ solr/example/solr/conf/protwords.txt | 21 + solr/example/solr/conf/schema.xml | 608 ++++ solr/example/solr/conf/scripts.conf | 24 + solr/example/solr/conf/solrconfig.xml | 1048 +++++++ solr/example/solr/conf/spellings.txt | 2 + solr/example/solr/conf/stopwords.txt | 58 + solr/example/solr/conf/synonyms.txt | 31 + solr/example/solr/conf/xslt/example.xsl | 132 + solr/example/solr/conf/xslt/example_atom.xsl | 67 + solr/example/solr/conf/xslt/example_rss.xsl | 66 + solr/example/solr/conf/xslt/luke.xsl | 337 +++ solr/example/solr/solr.xml | 34 + solr/example/start.jar | 2 + solr/lib/README.committers.txt | 34 + solr/lib/commons-codec-1.3.jar | 2 + solr/lib/commons-csv-1.0-SNAPSHOT-r609327.jar | 2 + solr/lib/commons-fileupload-1.2.1.jar | 2 + solr/lib/commons-httpclient-3.1.jar | 2 + solr/lib/commons-io-1.4.jar | 2 + solr/lib/easymock.jar | 2 + solr/lib/geronimo-stax-api_1.0_spec-1.0.1.jar | 2 + solr/lib/google-collect-1.0.jar | 2 + solr/lib/jcl-over-slf4j-1.5.5.jar | 2 + solr/lib/junit-4.3.jar | 2 + solr/lib/lucene-analyzers-2.9.2.jar | 2 + solr/lib/lucene-collation-2.9.2.jar | 2 + solr/lib/lucene-core-2.9.2.jar | 2 + .../lucene-fast-vector-highlighter-2.9.2.jar | 2 + solr/lib/lucene-highlighter-2.9.2.jar | 2 + solr/lib/lucene-memory-2.9.2.jar | 2 + solr/lib/lucene-misc-2.9.2.jar | 2 + solr/lib/lucene-queries-2.9.2.jar | 2 + solr/lib/lucene-snowball-2.9.2.jar | 2 + solr/lib/lucene-spatial-2.9.2.jar | 2 + solr/lib/lucene-spellchecker-2.9.2.jar | 2 + solr/lib/servlet-api-2.4.jar | 2 + solr/lib/slf4j-api-1.5.5.jar | 2 + solr/lib/slf4j-jdk14-1.5.5.jar | 2 + solr/lib/solr-commons-csv-pom.xml.template | 36 + solr/lib/wstx-asl-3.2.7.jar | 2 + solr/site/.htaccess | 4 + solr/site/broken-links.xml | 2 + solr/site/doap.rdf | 87 + solr/site/features.html | 473 ++++ solr/site/features.pdf | 427 +++ .../site/images/built-with-forrest-button.png | Bin 0 -> 1936 bytes solr/site/images/favicon.ico | Bin 0 -> 1146 bytes solr/site/images/instruction_arrow.png | Bin 0 -> 285 bytes solr/site/images/lucene_green_150.gif | Bin 0 -> 1113 bytes .../images/lucidworks_reference_guide.png | Bin 0 -> 13987 bytes solr/site/images/powered_by_solr.ai | 1405 +++++++++ solr/site/images/powered_by_solr.eps | Bin 0 -> 564570 bytes solr/site/images/powered_by_solr.png | Bin 0 -> 16131 bytes solr/site/images/powered_by_solr.svg | 1202 ++++++++ solr/site/images/solr-book-image.jpg | Bin 0 -> 14993 bytes solr/site/images/solr.jpg | Bin 0 -> 15574 bytes solr/site/images/solr.png | Bin 0 -> 9970 bytes solr/site/index.html | 599 ++++ solr/site/index.pdf | 1595 +++++++++++ solr/site/issue_tracking.html | 212 ++ solr/site/issue_tracking.pdf | 118 + solr/site/linkmap.html | 342 +++ solr/site/linkmap.pdf | 94 + solr/site/mailing_lists.html | 299 ++ solr/site/mailing_lists.pdf | 382 +++ solr/site/skin/CommonMessages_de.xml | 23 + solr/site/skin/CommonMessages_en_US.xml | 23 + solr/site/skin/CommonMessages_es.xml | 23 + solr/site/skin/CommonMessages_fr.xml | 23 + solr/site/skin/basic.css | 166 ++ solr/site/skin/breadcrumbs-optimized.js | 90 + solr/site/skin/breadcrumbs.js | 237 ++ solr/site/skin/fontsize.js | 166 ++ solr/site/skin/forrest.css.xslt | 86 + solr/site/skin/getBlank.js | 40 + solr/site/skin/getMenu.js | 45 + solr/site/skin/images/README.txt | 1 + solr/site/skin/images/add.jpg | Bin 0 -> 1142 bytes .../skin/images/built-with-forrest-button.png | Bin 0 -> 1936 bytes solr/site/skin/images/chapter.gif | Bin 0 -> 49 bytes solr/site/skin/images/chapter_open.gif | Bin 0 -> 49 bytes solr/site/skin/images/corner-imports.svg.xslt | 92 + solr/site/skin/images/current.gif | Bin 0 -> 54 bytes solr/site/skin/images/dc.svg.xslt | 28 + solr/site/skin/images/error.png | Bin 0 -> 1709 bytes solr/site/skin/images/external-link.gif | Bin 0 -> 71 bytes solr/site/skin/images/fix.jpg | Bin 0 -> 932 bytes solr/site/skin/images/forrest-credit-logo.png | Bin 0 -> 4633 bytes solr/site/skin/images/hack.jpg | Bin 0 -> 743 bytes solr/site/skin/images/header_white_line.gif | Bin 0 -> 37 bytes solr/site/skin/images/info.png | Bin 0 -> 1320 bytes solr/site/skin/images/instruction_arrow.png | Bin 0 -> 285 bytes solr/site/skin/images/label.gif | Bin 0 -> 54 bytes solr/site/skin/images/page.gif | Bin 0 -> 79 bytes solr/site/skin/images/pdfdoc.gif | Bin 0 -> 457 bytes solr/site/skin/images/poddoc.png | Bin 0 -> 856 bytes solr/site/skin/images/poddoc.svg.xslt | 55 + solr/site/skin/images/printer.gif | Bin 0 -> 438 bytes .../images/rc-b-l-15-1body-2menu-3menu.png | Bin 0 -> 350 bytes .../images/rc-b-r-15-1body-2menu-3menu.png | Bin 0 -> 308 bytes ...-5-1header-2tab-selected-3tab-selected.png | Bin 0 -> 191 bytes ...rc-t-l-5-1header-2searchbox-3searchbox.png | Bin 0 -> 197 bytes ...-5-1header-2tab-selected-3tab-selected.png | Bin 0 -> 222 bytes ...header-2tab-unselected-3tab-unselected.png | Bin 0 -> 197 bytes .../images/rc-t-r-15-1body-2menu-3menu.png | Bin 0 -> 390 bytes ...rc-t-r-5-1header-2searchbox-3searchbox.png | Bin 0 -> 207 bytes ...-5-1header-2tab-selected-3tab-selected.png | Bin 0 -> 219 bytes ...header-2tab-unselected-3tab-unselected.png | Bin 0 -> 207 bytes solr/site/skin/images/rc.svg.xslt | 27 + solr/site/skin/images/remove.jpg | Bin 0 -> 1251 bytes solr/site/skin/images/rss.png | Bin 0 -> 360 bytes solr/site/skin/images/spacer.gif | Bin 0 -> 43 bytes solr/site/skin/images/success.png | Bin 0 -> 1291 bytes solr/site/skin/images/txtdoc.png | Bin 0 -> 784 bytes solr/site/skin/images/txtdoc.svg.xslt | 55 + solr/site/skin/images/update.jpg | Bin 0 -> 990 bytes solr/site/skin/images/valid-html401.png | Bin 0 -> 2948 bytes solr/site/skin/images/vcss.png | Bin 0 -> 1134 bytes solr/site/skin/images/warning.png | Bin 0 -> 1215 bytes solr/site/skin/images/xmldoc.gif | Bin 0 -> 647 bytes solr/site/skin/menu.js | 48 + solr/site/skin/note.txt | 50 + solr/site/skin/print.css | 54 + solr/site/skin/profile.css | 175 ++ solr/site/skin/profile.css.xslt | 208 ++ solr/site/skin/prototype.js | 1257 ++++++++ solr/site/skin/screen.css | 587 ++++ solr/site/skin/skinconf.xsl | 137 + solr/site/tutorial.html | 781 +++++ solr/site/tutorial.pdf | 1456 ++++++++++ solr/site/version_control.html | 270 ++ solr/site/version_control.pdf | 384 +++ solr/site/who.html | 261 ++ solr/site/who.pdf | 179 ++ .../apache/solr/common/ResourceLoader.java | 47 + .../org/apache/solr/common/SolrDocument.java | 302 ++ .../apache/solr/common/SolrDocumentList.java | 67 + .../org/apache/solr/common/SolrException.java | 182 ++ .../apache/solr/common/SolrInputDocument.java | 229 ++ .../apache/solr/common/SolrInputField.java | 202 ++ .../apache/solr/common/luke/FieldFlag.java | 68 + .../solr/common/params/AnalysisParams.java | 60 + .../common/params/AppendedSolrParams.java | 50 + .../solr/common/params/CommonParams.java | 128 + .../solr/common/params/CoreAdminParams.java | 84 + .../solr/common/params/DefaultSolrParams.java | 60 + .../solr/common/params/DisMaxParams.java | 66 + .../solr/common/params/EventParams.java | 29 + .../solr/common/params/FacetParams.java | 181 ++ .../solr/common/params/HighlightParams.java | 56 + .../solr/common/params/MapSolrParams.java | 75 + .../common/params/ModifiableSolrParams.java | 210 ++ .../common/params/MoreLikeThisParams.java | 71 + .../common/params/MultiMapSolrParams.java | 92 + .../common/params/RequiredSolrParams.java | 151 + .../solr/common/params/ShardParams.java | 39 + .../apache/solr/common/params/SolrParams.java | 317 +++ .../solr/common/params/SpellingParams.java | 84 + .../solr/common/params/StatsParams.java | 27 + .../solr/common/params/TermVectorParams.java | 66 + .../solr/common/params/TermsParams.java | 120 + .../solr/common/params/UpdateParams.java | 54 + .../org/apache/solr/common/util/Base64.java | 153 + .../solr/common/util/ConcurrentLRUCache.java | 613 ++++ .../solr/common/util/ContentStream.java | 73 + .../solr/common/util/ContentStreamBase.java | 204 ++ .../org/apache/solr/common/util/DOMUtil.java | 397 +++ .../org/apache/solr/common/util/DateUtil.java | 200 ++ .../solr/common/util/FastInputStream.java | 211 ++ .../solr/common/util/FastOutputStream.java | 191 ++ .../apache/solr/common/util/FastWriter.java | 128 + .../apache/solr/common/util/FileUtils.java | 99 + .../org/apache/solr/common/util/Hash.java | 242 ++ .../solr/common/util/IteratorChain.java | 82 + .../apache/solr/common/util/JavaBinCodec.java | 708 +++++ .../apache/solr/common/util/NamedList.java | 391 +++ .../solr/common/util/NamedListCodec.java | 31 + .../org/apache/solr/common/util/RTimer.java | 154 + .../solr/common/util/RegexFileFilter.java | 43 + .../solr/common/util/SimpleOrderedMap.java | 67 + .../org/apache/solr/common/util/StrUtils.java | 268 ++ .../org/apache/solr/common/util/XML.java | 209 ++ .../dev-tools/stub-analysis-factory-maker.pl | 166 ++ .../analysis/ASCIIFoldingFilterFactory.java | 29 + .../ArabicLetterTokenizerFactory.java | 34 + .../ArabicNormalizationFilterFactory.java | 32 + .../analysis/ArabicStemFilterFactory.java | 33 + .../solr/analysis/BaseCharFilterFactory.java | 62 + .../solr/analysis/BaseTokenFilterFactory.java | 77 + .../solr/analysis/BaseTokenizerFactory.java | 43 + .../analysis/BrazilianStemFilterFactory.java | 35 + .../solr/analysis/BufferedTokenStream.java | 152 + .../solr/analysis/CJKTokenizerFactory.java | 31 + .../analysis/CapitalizationFilterFactory.java | 242 ++ .../solr/analysis/CharFilterFactory.java | 34 + .../solr/analysis/ChineseFilterFactory.java | 30 + .../analysis/ChineseTokenizerFactory.java | 30 + .../analysis/CollationKeyFilterFactory.java | 164 ++ .../solr/analysis/CommonGramsFilter.java | 223 ++ .../analysis/CommonGramsFilterFactory.java | 83 + .../solr/analysis/CommonGramsQueryFilter.java | 138 + .../CommonGramsQueryFilterFactory.java | 91 + .../DelimitedPayloadTokenFilterFactory.java | 73 + ...tionaryCompoundWordTokenFilterFactory.java | 65 + .../solr/analysis/DoubleMetaphoneFilter.java | 112 + .../DoubleMetaphoneFilterFactory.java | 47 + .../solr/analysis/DutchStemFilterFactory.java | 36 + .../solr/analysis/EdgeNGramFilterFactory.java | 54 + .../analysis/EdgeNGramTokenizerFactory.java | 53 + .../solr/analysis/ElisionFilterFactory.java | 62 + .../analysis/EnglishPorterFilterFactory.java | 84 + .../analysis/FrenchStemFilterFactory.java | 35 + .../analysis/GermanStemFilterFactory.java | 33 + .../analysis/GreekLowerCaseFilterFactory.java | 71 + .../solr/analysis/HTMLStripCharFilter.java | 1360 +++++++++ .../analysis/HTMLStripCharFilterFactory.java | 29 + .../apache/solr/analysis/HTMLStripReader.java | 58 + .../HTMLStripStandardTokenizerFactory.java | 41 + .../HTMLStripWhitespaceTokenizerFactory.java | 41 + .../solr/analysis/HyphenatedWordsFilter.java | 97 + .../HyphenatedWordsFilterFactory.java | 30 + .../ISOLatin1AccentFilterFactory.java | 30 + .../apache/solr/analysis/KeepWordFilter.java | 54 + .../solr/analysis/KeepWordFilterFactory.java | 88 + .../analysis/KeywordTokenizerFactory.java | 32 + .../apache/solr/analysis/LengthFilter.java | 49 + .../solr/analysis/LengthFilterFactory.java | 42 + .../solr/analysis/LetterTokenizerFactory.java | 32 + .../solr/analysis/LowerCaseFilterFactory.java | 30 + .../analysis/LowerCaseTokenizerFactory.java | 32 + .../analysis/MappingCharFilterFactory.java | 121 + .../solr/analysis/NGramFilterFactory.java | 48 + .../solr/analysis/NGramTokenizerFactory.java | 48 + .../NumericPayloadTokenFilterFactory.java | 40 + .../analysis/PatternReplaceCharFilter.java | 193 ++ .../PatternReplaceCharFilterFactory.java | 57 + .../solr/analysis/PatternReplaceFilter.java | 88 + .../analysis/PatternReplaceFilterFactory.java | 66 + .../solr/analysis/PatternTokenizer.java | 139 + .../analysis/PatternTokenizerFactory.java | 168 ++ .../PersianNormalizationFilterFactory.java | 32 + .../apache/solr/analysis/PhoneticFilter.java | 101 + .../solr/analysis/PhoneticFilterFactory.java | 97 + .../analysis/PorterStemFilterFactory.java | 30 + .../solr/analysis/PositionFilterFactory.java | 45 + .../analysis/RemoveDuplicatesTokenFilter.java | 57 + .../RemoveDuplicatesTokenFilterFactory.java | 29 + .../analysis/ReverseStringFilterFactory.java | 34 + .../solr/analysis/ReversedWildcardFilter.java | 150 + .../ReversedWildcardFilterFactory.java | 131 + .../apache/solr/analysis/RussianCommon.java | 61 + .../RussianLetterTokenizerFactory.java | 39 + .../RussianLowerCaseFilterFactory.java | 39 + .../analysis/RussianStemFilterFactory.java | 40 + .../solr/analysis/ShingleFilterFactory.java | 44 + .../analysis/SnowballPorterFilterFactory.java | 132 + .../apache/solr/analysis/SolrAnalyzer.java | 79 + .../solr/analysis/StandardFilterFactory.java | 30 + .../analysis/StandardTokenizerFactory.java | 33 + .../solr/analysis/StopFilterFactory.java | 86 + .../apache/solr/analysis/SynonymFilter.java | 213 ++ .../solr/analysis/SynonymFilterFactory.java | 169 ++ .../org/apache/solr/analysis/SynonymMap.java | 157 + .../solr/analysis/ThaiWordFilterFactory.java | 35 + .../solr/analysis/TokenFilterFactory.java | 69 + .../TokenOffsetPayloadTokenFilterFactory.java | 33 + .../apache/solr/analysis/TokenizerChain.java | 92 + .../solr/analysis/TokenizerFactory.java | 70 + .../solr/analysis/TrieTokenizerFactory.java | 118 + .../org/apache/solr/analysis/TrimFilter.java | 86 + .../solr/analysis/TrimFilterFactory.java | 51 + .../TypeAsPayloadTokenFilterFactory.java | 33 + .../analysis/WhitespaceTokenizerFactory.java | 32 + .../solr/analysis/WordDelimiterFilter.java | 679 +++++ .../analysis/WordDelimiterFilterFactory.java | 97 + .../solr/core/AbstractSolrEventListener.java | 69 + .../java/org/apache/solr/core/CloseHook.java | 44 + .../src/java/org/apache/solr/core/Config.java | 320 +++ .../org/apache/solr/core/CoreContainer.java | 784 +++++ .../org/apache/solr/core/CoreDescriptor.java | 174 ++ .../apache/solr/core/DirectoryFactory.java | 41 + .../solr/core/IndexDeletionPolicyWrapper.java | 244 ++ .../apache/solr/core/IndexReaderFactory.java | 66 + .../org/apache/solr/core/JmxMonitoredMap.java | 297 ++ .../java/org/apache/solr/core/PluginInfo.java | 108 + .../apache/solr/core/QuerySenderListener.java | 79 + .../org/apache/solr/core/RequestHandlers.java | 326 +++ .../solr/core/RunExecutableListener.java | 119 + .../java/org/apache/solr/core/SolrConfig.java | 465 +++ .../java/org/apache/solr/core/SolrCore.java | 1636 +++++++++++ .../apache/solr/core/SolrDeletionPolicy.java | 223 ++ .../apache/solr/core/SolrEventListener.java | 64 + .../org/apache/solr/core/SolrException.java | 47 + .../org/apache/solr/core/SolrInfoMBean.java | 70 + .../apache/solr/core/SolrInfoRegistry.java | 39 + .../apache/solr/core/SolrResourceLoader.java | 618 ++++ .../solr/core/StandardDirectoryFactory.java | 34 + .../solr/core/StandardIndexReaderFactory.java | 40 + .../solr/handler/AnalysisRequestHandler.java | 222 ++ .../handler/AnalysisRequestHandlerBase.java | 330 +++ .../handler/BinaryUpdateRequestHandler.java | 147 + .../solr/handler/CSVRequestHandler.java | 404 +++ .../handler/ContentStreamHandlerBase.java | 68 + .../solr/handler/ContentStreamLoader.java | 51 + .../solr/handler/DisMaxRequestHandler.java | 175 ++ .../DocumentAnalysisRequestHandler.java | 356 +++ .../solr/handler/DumpRequestHandler.java | 84 + .../handler/FieldAnalysisRequestHandler.java | 248 ++ .../solr/handler/MoreLikeThisHandler.java | 395 +++ .../solr/handler/PingRequestHandler.java | 100 + .../solr/handler/ReplicationHandler.java | 1122 ++++++++ .../solr/handler/RequestHandlerBase.java | 187 ++ .../solr/handler/RequestHandlerUtils.java | 131 + .../org/apache/solr/handler/SnapPuller.java | 1198 ++++++++ .../org/apache/solr/handler/SnapShooter.java | 197 ++ .../handler/SpellCheckerRequestHandler.java | 437 +++ .../solr/handler/StandardRequestHandler.java | 83 + .../handler/SystemInfoRequestHandler.java | 126 + .../org/apache/solr/handler/XMLLoader.java | 321 +++ .../solr/handler/XmlUpdateRequestHandler.java | 167 ++ .../solr/handler/admin/AdminHandlers.java | 141 + .../solr/handler/admin/CoreAdminHandler.java | 493 ++++ .../handler/admin/LukeRequestHandler.java | 675 +++++ .../solr/handler/admin/PluginInfoHandler.java | 117 + .../admin/PropertiesRequestHandler.java | 71 + .../handler/admin/ShowFileRequestHandler.java | 236 ++ .../solr/handler/admin/SystemInfoHandler.java | 324 +++ .../solr/handler/admin/ThreadDumpHandler.java | 150 + .../handler/component/DebugComponent.java | 252 ++ .../handler/component/FacetComponent.java | 682 +++++ .../handler/component/FieldFacetStats.java | 148 + .../handler/component/HighlightComponent.java | 220 ++ .../component/MoreLikeThisComponent.java | 93 + .../handler/component/QueryComponent.java | 605 ++++ .../component/QueryElevationComponent.java | 493 ++++ .../handler/component/ResponseBuilder.java | 294 ++ .../handler/component/SearchComponent.java | 108 + .../solr/handler/component/SearchHandler.java | 502 ++++ .../solr/handler/component/ShardDoc.java | 285 ++ .../solr/handler/component/ShardRequest.java | 67 + .../solr/handler/component/ShardResponse.java | 82 + .../component/SpellCheckComponent.java | 622 ++++ .../handler/component/StatsComponent.java | 307 ++ .../solr/handler/component/StatsValues.java | 159 ++ .../component/TermVectorComponent.java | 323 +++ .../handler/component/TermsComponent.java | 440 +++ .../highlight/DefaultSolrHighlighter.java | 590 ++++ .../apache/solr/highlight/GapFragmenter.java | 114 + .../highlight/HighlightingPluginBase.java | 72 + .../apache/solr/highlight/HtmlFormatter.java | 65 + ...ultiColoredScoreOrderFragmentsBuilder.java | 62 + .../MultiColoredSimpleFragmentsBuilder.java | 62 + .../solr/highlight/RegexFragmenter.java | 299 ++ .../highlight/ScoreOrderFragmentsBuilder.java | 59 + .../solr/highlight/SimpleFragListBuilder.java | 59 + .../highlight/SimpleFragmentsBuilder.java | 55 + .../apache/solr/highlight/SolrFormatter.java | 44 + .../solr/highlight/SolrFragListBuilder.java | 42 + .../apache/solr/highlight/SolrFragmenter.java | 44 + .../solr/highlight/SolrFragmentsBuilder.java | 43 + .../solr/highlight/SolrHighlighter.java | 132 + .../solr/request/AppendedSolrParams.java | 33 + .../request/BinaryQueryResponseWriter.java | 26 + .../solr/request/BinaryResponseWriter.java | 26 + .../solr/request/DefaultSolrParams.java | 33 + .../solr/request/DisMaxRequestHandler.java | 27 + .../solr/request/JSONResponseWriter.java | 26 + .../solr/request/LocalSolrQueryRequest.java | 71 + .../apache/solr/request/MapSolrParams.java | 33 + .../solr/request/MultiMapSolrParams.java | 32 + .../solr/request/PHPResponseWriter.java | 26 + .../request/PHPSerializedResponseWriter.java | 26 + .../solr/request/PythonResponseWriter.java | 26 + .../solr/request/QueryResponseWriter.java | 26 + .../solr/request/RawResponseWriter.java | 26 + .../solr/request/RequiredSolrParams.java | 33 + .../solr/request/RubyResponseWriter.java | 26 + .../solr/request/ServletSolrParams.java | 39 + .../org/apache/solr/request/SimpleFacets.java | 714 +++++ .../org/apache/solr/request/SolrParams.java | 42 + .../apache/solr/request/SolrQueryRequest.java | 136 + .../solr/request/SolrQueryRequestBase.java | 255 ++ .../solr/request/SolrQueryResponse.java | 26 + .../solr/request/SolrRequestHandler.java | 64 + .../solr/request/StandardRequestHandler.java | 27 + .../solr/request/TextResponseWriter.java | 32 + .../apache/solr/request/UnInvertedField.java | 1097 +++++++ .../solr/request/XMLResponseWriter.java | 26 + .../solr/request/XSLTResponseWriter.java | 26 + .../solr/response/BaseResponseWriter.java | 330 +++ .../response/BinaryQueryResponseWriter.java | 39 + .../solr/response/BinaryResponseWriter.java | 229 ++ .../response/GenericBinaryResponseWriter.java | 86 + .../response/GenericTextResponseWriter.java | 78 + .../solr/response/JSONResponseWriter.java | 846 ++++++ .../solr/response/PHPResponseWriter.java | 109 + .../response/PHPSerializedResponseWriter.java | 330 +++ .../solr/response/PythonResponseWriter.java | 147 + .../solr/response/QueryResponseWriter.java | 87 + .../solr/response/RawResponseWriter.java | 96 + .../solr/response/RubyResponseWriter.java | 89 + .../solr/response/SolrQueryResponse.java | 229 ++ .../solr/response/TextResponseWriter.java | 245 ++ .../solr/response/XMLResponseWriter.java | 41 + .../org/apache/solr/response/XMLWriter.java | 844 ++++++ .../solr/response/XSLTResponseWriter.java | 124 + .../solr/schema/AbstractSubTypeFieldType.java | 120 + .../org/apache/solr/schema/BCDIntField.java | 75 + .../org/apache/solr/schema/BCDLongField.java | 36 + .../org/apache/solr/schema/BCDStrField.java | 40 + .../org/apache/solr/schema/BinaryField.java | 87 + .../org/apache/solr/schema/BoolField.java | 124 + .../org/apache/solr/schema/ByteField.java | 80 + .../apache/solr/schema/CompressableField.java | 66 + .../solr/schema/CoordinateFieldType.java | 65 + .../org/apache/solr/schema/CopyField.java | 82 + .../org/apache/solr/schema/DateField.java | 469 +++ .../org/apache/solr/schema/DoubleField.java | 81 + .../apache/solr/schema/ExternalFileField.java | 98 + .../apache/solr/schema/FieldProperties.java | 114 + .../org/apache/solr/schema/FieldType.java | 531 ++++ .../org/apache/solr/schema/FloatField.java | 77 + .../org/apache/solr/schema/GeoHashField.java | 81 + .../org/apache/solr/schema/IndexSchema.java | 1316 +++++++++ .../java/org/apache/solr/schema/IntField.java | 77 + .../apache/solr/schema/LegacyDateField.java | 117 + .../org/apache/solr/schema/LongField.java | 84 + .../org/apache/solr/schema/PointType.java | 177 ++ .../apache/solr/schema/RandomSortField.java | 211 ++ .../org/apache/solr/schema/SchemaAware.java | 39 + .../org/apache/solr/schema/SchemaField.java | 228 ++ .../org/apache/solr/schema/ShortField.java | 87 + .../apache/solr/schema/SimilarityFactory.java | 29 + .../solr/schema/SortableDoubleField.java | 147 + .../solr/schema/SortableFloatField.java | 144 + .../apache/solr/schema/SortableIntField.java | 144 + .../apache/solr/schema/SortableLongField.java | 141 + .../apache/solr/schema/SpatialTileField.java | 190 ++ .../java/org/apache/solr/schema/StrField.java | 112 + .../org/apache/solr/schema/TextField.java | 253 ++ .../org/apache/solr/schema/TrieDateField.java | 209 ++ .../apache/solr/schema/TrieDoubleField.java | 24 + .../org/apache/solr/schema/TrieField.java | 521 ++++ .../apache/solr/schema/TrieFloatField.java | 24 + .../org/apache/solr/schema/TrieIntField.java | 24 + .../org/apache/solr/schema/TrieLongField.java | 24 + .../org/apache/solr/schema/UUIDField.java | 100 + .../org/apache/solr/search/BitDocSet.java | 208 ++ .../solr/search/BoostQParserPlugin.java | 85 + .../org/apache/solr/search/CacheConfig.java | 119 + .../apache/solr/search/CacheRegenerator.java | 43 + .../solr/search/ConstantScorePrefixQuery.java | 81 + .../org/apache/solr/search/DisMaxQParser.java | 240 ++ .../solr/search/DisMaxQParserPlugin.java | 39 + .../org/apache/solr/search/DocIterator.java | 54 + .../java/org/apache/solr/search/DocList.java | 140 + .../org/apache/solr/search/DocListAndSet.java | 37 + .../java/org/apache/solr/search/DocSet.java | 306 ++ .../solr/search/DocSetHitCollector.java | 143 + .../java/org/apache/solr/search/DocSlice.java | 144 + .../search/ExtendedDismaxQParserPlugin.java | 1151 ++++++++ .../org/apache/solr/search/FastLRUCache.java | 285 ++ .../solr/search/FieldQParserPlugin.java | 62 + .../apache/solr/search/FunctionQParser.java | 281 ++ .../solr/search/FunctionQParserPlugin.java | 37 + .../search/FunctionRangeQParserPlugin.java | 71 + .../org/apache/solr/search/HashDocSet.java | 255 ++ .../java/org/apache/solr/search/LRUCache.java | 285 ++ .../solr/search/LuceneQParserPlugin.java | 134 + .../solr/search/LuceneQueryOptimizer.java | 117 + .../MissingStringLastComparatorSource.java | 199 ++ .../solr/search/NestedQParserPlugin.java | 70 + .../solr/search/OldLuceneQParserPlugin.java | 37 + .../org/apache/solr/search/PrefixFilter.java | 128 + .../solr/search/PrefixQParserPlugin.java | 47 + .../java/org/apache/solr/search/QParser.java | 261 ++ .../org/apache/solr/search/QParserPlugin.java | 46 + .../org/apache/solr/search/QueryParsing.java | 833 ++++++ .../apache/solr/search/QueryResultKey.java | 94 + .../org/apache/solr/search/QueryUtils.java | 102 + .../apache/solr/search/RawQParserPlugin.java | 45 + .../org/apache/solr/search/SolrCache.java | 136 + .../solr/search/SolrConstantScoreQuery.java | 201 ++ .../solr/search/SolrFieldCacheMBean.java | 83 + .../org/apache/solr/search/SolrFilter.java | 46 + .../apache/solr/search/SolrIndexReader.java | 527 ++++ .../apache/solr/search/SolrIndexSearcher.java | 1710 +++++++++++ .../apache/solr/search/SolrQueryParser.java | 215 ++ .../apache/solr/search/SolrSimilarity.java | 37 + .../java/org/apache/solr/search/SortSpec.java | 82 + .../apache/solr/search/SortedIntDocSet.java | 662 +++++ .../java/org/apache/solr/search/Sorting.java | 57 + .../apache/solr/search/ValueSourceParser.java | 853 ++++++ .../apache/solr/search/WildcardFilter.java | 116 + .../solr/search/function/BoostedQuery.java | 209 ++ .../solr/search/function/ByteFieldSource.java | 105 + .../search/function/ConstValueSource.java | 73 + .../search/function/DivFloatFunction.java | 38 + .../solr/search/function/DocValues.java | 131 + .../search/function/DoubleFieldSource.java | 156 + .../search/function/DualFloatFunction.java | 92 + .../search/function/FieldCacheSource.java | 55 + .../solr/search/function/FileFloatSource.java | 358 +++ .../search/function/FloatFieldSource.java | 96 + .../solr/search/function/FunctionQuery.java | 232 ++ .../solr/search/function/IntFieldSource.java | 130 + .../search/function/LinearFloatFunction.java | 93 + .../search/function/LiteralValueSource.java | 78 + .../solr/search/function/LongFieldSource.java | 137 + .../search/function/MaxFloatFunction.java | 90 + .../search/function/MultiFloatFunction.java | 113 + .../search/function/MultiValueSource.java | 29 + .../solr/search/function/OrdFieldSource.java | 99 + .../search/function/PowFloatFunction.java | 44 + .../search/function/ProductFloatFunction.java | 39 + .../search/function/QueryValueSource.java | 131 + .../function/RangeMapFloatFunction.java | 105 + .../function/ReciprocalFloatFunction.java | 111 + .../function/ReverseOrdFieldSource.java | 105 + .../search/function/ScaleFloatFunction.java | 133 + .../search/function/ShortFieldSource.java | 102 + .../search/function/SimpleFloatFunction.java | 58 + .../solr/search/function/SingleFunction.java | 55 + .../search/function/StringIndexDocValues.java | 97 + .../search/function/SumFloatFunction.java | 47 + .../solr/search/function/TopValueSource.java | 101 + .../solr/search/function/ValueSource.java | 253 ++ .../function/ValueSourceRangeFilter.java | 102 + .../search/function/VectorValueSource.java | 219 ++ .../search/function/distance/Constants.java | 27 + .../function/distance/DistanceUtils.java | 239 ++ .../function/distance/GeohashFunction.java | 100 + .../distance/GeohashHaversineFunction.java | 142 + .../function/distance/HaversineFunction.java | 168 ++ .../distance/SquaredEuclideanFunction.java | 72 + .../distance/StringDistanceFunction.java | 96 + .../distance/VectorDistanceFunction.java | 169 ++ .../spelling/AbstractLuceneSpellChecker.java | 234 ++ .../solr/spelling/FileBasedSpellChecker.java | 127 + .../solr/spelling/IndexBasedSpellChecker.java | 117 + .../apache/solr/spelling/QueryConverter.java | 78 + .../solr/spelling/SolrSpellChecker.java | 115 + .../solr/spelling/SpellingQueryConverter.java | 116 + .../apache/solr/spelling/SpellingResult.java | 141 + .../apache/solr/tst/OldRequestHandler.java | 139 + .../apache/solr/tst/TestRequestHandler.java | 298 ++ .../apache/solr/update/AddUpdateCommand.java | 122 + .../solr/update/CommitUpdateCommand.java | 46 + .../solr/update/DeleteUpdateCommand.java | 41 + .../solr/update/DirectUpdateHandler.java | 406 +++ .../solr/update/DirectUpdateHandler2.java | 702 +++++ .../apache/solr/update/DocumentBuilder.java | 368 +++ .../solr/update/MergeIndexesCommand.java | 52 + .../solr/update/RollbackUpdateCommand.java | 30 + .../apache/solr/update/SolrIndexConfig.java | 137 + .../apache/solr/update/SolrIndexWriter.java | 279 ++ .../org/apache/solr/update/UpdateCommand.java | 35 + .../org/apache/solr/update/UpdateHandler.java | 185 ++ .../processor/LogUpdateProcessorFactory.java | 176 ++ .../update/processor/Lookup3Signature.java | 41 + .../solr/update/processor/MD5Signature.java | 57 + .../processor/RunUpdateProcessorFactory.java | 100 + .../solr/update/processor/Signature.java | 28 + .../SignatureUpdateProcessorFactory.java | 173 ++ .../processor/TextProfileSignature.java | 158 ++ .../processor/UpdateRequestProcessor.java | 81 + .../UpdateRequestProcessorChain.java | 83 + .../UpdateRequestProcessorFactory.java | 44 + .../solr/util/AbstractSolrTestCase.java | 384 +++ .../org/apache/solr/util/ArraysUtils.java | 51 + .../java/org/apache/solr/util/BCDUtils.java | 532 ++++ .../org/apache/solr/util/BitSetIterator.java | 158 ++ .../java/org/apache/solr/util/BitUtil.java | 26 + .../org/apache/solr/util/BoundedTreeSet.java | 68 + .../org/apache/solr/util/CharArrayMap.java | 411 +++ .../org/apache/solr/util/CommonParams.java | 122 + .../org/apache/solr/util/ContentStream.java | 28 + .../apache/solr/util/ContentStreamBase.java | 29 + .../java/org/apache/solr/util/DOMUtil.java | 28 + .../org/apache/solr/util/DateMathParser.java | 288 ++ .../org/apache/solr/util/DisMaxParams.java | 178 ++ .../solr/util/HighFrequencyDictionary.java | 136 + .../apache/solr/util/HighlightingUtils.java | 150 + .../org/apache/solr/util/IteratorChain.java | 28 + .../java/org/apache/solr/util/NamedList.java | 42 + .../org/apache/solr/util/NumberUtils.java | 159 ++ .../java/org/apache/solr/util/OpenBitSet.java | 108 + .../java/org/apache/solr/util/RefCounted.java | 62 + .../apache/solr/util/SimpleOrderedMap.java | 42 + .../org/apache/solr/util/SimplePostTool.java | 287 ++ .../org/apache/solr/util/SolrPluginUtils.java | 1004 +++++++ .../java/org/apache/solr/util/StrUtils.java | 29 + .../solr/util/SuggestMissingFactories.java | 213 ++ .../org/apache/solr/util/TestHarness.java | 588 ++++ .../org/apache/solr/util/UpdateParams.java | 29 + .../org/apache/solr/util/VersionedFile.java | 109 + solr/src/java/org/apache/solr/util/XML.java | 28 + .../solr/util/doc-files/min-should-match.html | 126 + .../util/plugin/AbstractPluginLoader.java | 258 ++ .../util/plugin/MapInitializedPlugin.java | 30 + .../solr/util/plugin/MapPluginLoader.java | 53 + .../plugin/NamedListInitializedPlugin.java | 30 + .../util/plugin/NamedListPluginLoader.java | 51 + .../util/plugin/PluginInfoInitialized.java | 31 + .../solr/util/plugin/ResourceLoaderAware.java | 28 + .../solr/util/plugin/SolrCoreAware.java | 28 + .../solr/util/xslt/TransformerProvider.java | 117 + solr/src/maven/solr-core-pom.xml.template | 133 + solr/src/maven/solr-parent-pom.xml.template | 99 + solr/src/maven/solr-solrj-pom.xml.template | 84 + solr/src/scripts/abc | 190 ++ solr/src/scripts/abo | 190 ++ solr/src/scripts/backup | 117 + solr/src/scripts/backupcleaner | 142 + solr/src/scripts/commit | 133 + solr/src/scripts/optimize | 134 + solr/src/scripts/readercycle | 129 + solr/src/scripts/rsyncd-disable | 77 + solr/src/scripts/rsyncd-enable | 76 + solr/src/scripts/rsyncd-start | 145 + solr/src/scripts/rsyncd-stop | 105 + solr/src/scripts/scripts-util | 99 + solr/src/scripts/snapcleaner | 154 + solr/src/scripts/snapinstaller | 198 ++ solr/src/scripts/snappuller | 269 ++ solr/src/scripts/snappuller-disable | 77 + solr/src/scripts/snappuller-enable | 77 + solr/src/scripts/snapshooter | 136 + solr/src/site/README.txt | 5 + solr/src/site/forrest.properties | 129 + .../classes/CatalogManager.properties | 57 + .../site/src/documentation/content/.htaccess | 4 + .../documentation/content/xdocs/features.xml | 173 ++ .../content/xdocs/images/favicon.ico | Bin 0 -> 1146 bytes .../content/xdocs/images/lucene_green_150.gif | Bin 0 -> 1113 bytes .../images/lucidworks_reference_guide.png | Bin 0 -> 13987 bytes .../content/xdocs/images/powered_by_solr.ai | 1405 +++++++++ .../content/xdocs/images/powered_by_solr.eps | Bin 0 -> 564570 bytes .../content/xdocs/images/powered_by_solr.png | Bin 0 -> 16131 bytes .../content/xdocs/images/powered_by_solr.svg | 1202 ++++++++ .../content/xdocs/images/solr-book-image.jpg | Bin 0 -> 14993 bytes .../content/xdocs/images/solr.jpg | Bin 0 -> 15574 bytes .../content/xdocs/images/solr_FC.eps | Bin 0 -> 532874 bytes .../content/xdocs/images/solr_FC.svg | 1201 ++++++++ .../src/documentation/content/xdocs/index.xml | 303 ++ .../content/xdocs/issue_tracking.xml | 34 + .../content/xdocs/mailing_lists.xml | 80 + .../src/documentation/content/xdocs/site.xml | 92 + .../src/documentation/content/xdocs/tabs.xml | 59 + .../documentation/content/xdocs/tutorial.xml | 510 ++++ .../content/xdocs/version_control.xml | 70 + .../src/documentation/content/xdocs/who.xml | 54 + .../resources/schema/catalog.xcat | 26 + solr/src/site/src/documentation/skinconf.xml | 469 +++ .../skins/common/css/forrest.css.xslt | 78 + .../skins/common/images/README.txt | 1 + .../common/images/corner-imports.svg.xslt | 92 + .../skins/common/images/dc.svg.xslt | 28 + .../skins/common/images/poddoc.svg.xslt | 55 + .../skins/common/images/rc.svg.xslt | 27 + .../skins/common/images/txtdoc.svg.xslt | 55 + .../common/scripts/breadcrumbs-optimized.js | 90 + .../skins/common/scripts/breadcrumbs.js | 237 ++ .../skins/common/scripts/fontsize.js | 166 ++ .../skins/common/scripts/getBlank.js | 40 + .../skins/common/scripts/getMenu.js | 45 + .../skins/common/scripts/menu.js | 48 + .../skins/common/scripts/prototype.js | 1257 ++++++++ .../documentation/skins/common/skinconf.xsl | 238 ++ .../common/translations/CommonMessages_de.xml | 23 + .../translations/CommonMessages_en_US.xml | 23 + .../common/translations/CommonMessages_es.xml | 23 + .../common/translations/CommonMessages_fr.xml | 23 + .../skins/common/xslt/fo/document-to-fo.xsl | 1014 +++++++ .../skins/common/xslt/fo/footerinfo.xsl | 70 + .../skins/common/xslt/fo/pdfoutline.xsl | 45 + .../skins/common/xslt/html/book-to-menu.xsl | 139 + .../common/xslt/html/document-to-html.xsl | 374 +++ .../skins/common/xslt/html/dotdots.xsl | 73 + .../skins/common/xslt/html/pathutils.xsl | 231 ++ .../skins/common/xslt/html/renderlogo.xsl | 67 + .../skins/common/xslt/html/site-to-xhtml.xsl | 388 +++ .../skins/common/xslt/html/split.xsl | 124 + .../common/xslt/html/strip_namespaces.xsl | 39 + .../skins/common/xslt/html/tab-to-menu.xsl | 195 ++ .../skins/common/xslt/html/tabutils.xsl | 98 + .../skins/common/xslt/svg/document-to-svg.xsl | 45 + .../documentation/skins/lucene/css/basic.css | 166 ++ .../documentation/skins/lucene/css/print.css | 54 + .../skins/lucene/css/profile.css.xslt | 182 ++ .../documentation/skins/lucene/css/screen.css | 587 ++++ .../skins/lucene/images/chapter.gif | Bin 0 -> 49 bytes .../skins/lucene/images/chapter_open.gif | Bin 0 -> 49 bytes .../skins/lucene/images/current.gif | Bin 0 -> 54 bytes .../skins/lucene/images/error.png | Bin 0 -> 1709 bytes .../skins/lucene/images/header_white_line.gif | Bin 0 -> 37 bytes .../skins/lucene/images/info.png | Bin 0 -> 1320 bytes .../skins/lucene/images/instruction_arrow.png | Bin 0 -> 285 bytes .../skins/lucene/images/label.gif | Bin 0 -> 54 bytes .../skins/lucene/images/page.gif | Bin 0 -> 79 bytes .../skins/lucene/images/pdfdoc.gif | Bin 0 -> 457 bytes .../skins/lucene/images/printer.gif | Bin 0 -> 438 bytes .../skins/lucene/images/success.png | Bin 0 -> 1291 bytes .../skins/lucene/images/warning.png | Bin 0 -> 1215 bytes .../skins/lucene/images/xmldoc.gif | Bin 0 -> 647 bytes .../src/documentation/skins/lucene/note.txt | 50 + .../documentation/skins/lucene/skinconf.xsl | 137 + .../skins/lucene/xslt/fo/document-to-fo.xsl | 22 + .../skins/lucene/xslt/html/book-to-menu.xsl | 53 + .../lucene/xslt/html/document-to-html.xsl | 154 + .../skins/lucene/xslt/html/site-to-xhtml.xsl | 808 ++++++ .../skins/lucene/xslt/html/tab-to-menu.xsl | 66 + .../solr/client/solrj/ResponseParser.java | 44 + .../apache/solr/client/solrj/SolrQuery.java | 768 +++++ .../apache/solr/client/solrj/SolrRequest.java | 89 + .../solr/client/solrj/SolrResponse.java | 35 + .../apache/solr/client/solrj/SolrServer.java | 136 + .../client/solrj/SolrServerException.java | 54 + .../solrj/beans/DocumentObjectBinder.java | 405 +++ .../apache/solr/client/solrj/beans/Field.java | 35 + .../solrj/impl/BinaryRequestWriter.java | 116 + .../solrj/impl/BinaryResponseParser.java | 54 + .../solrj/impl/CommonsHttpSolrServer.java | 657 +++++ .../client/solrj/impl/LBHttpSolrServer.java | 321 +++ .../solrj/impl/StreamingUpdateSolrServer.java | 261 ++ .../client/solrj/impl/XMLResponseParser.java | 442 +++ .../solrj/request/AbstractUpdateRequest.java | 142 + .../request/ContentStreamUpdateRequest.java | 82 + .../solrj/request/CoreAdminRequest.java | 299 ++ .../solrj/request/DirectXmlRequest.java | 72 + .../request/DocumentAnalysisRequest.java | 207 ++ .../solrj/request/FieldAnalysisRequest.java | 279 ++ .../request/JavaBinUpdateRequestCodec.java | 209 ++ .../client/solrj/request/LukeRequest.java | 126 + .../client/solrj/request/QueryRequest.java | 99 + .../client/solrj/request/RequestWriter.java | 134 + .../solr/client/solrj/request/SolrPing.java | 64 + .../client/solrj/request/UpdateRequest.java | 275 ++ .../solrj/response/AnalysisResponseBase.java | 252 ++ .../solrj/response/CoreAdminResponse.java | 58 + .../response/DocumentAnalysisResponse.java | 250 ++ .../client/solrj/response/FacetField.java | 176 ++ .../solrj/response/FieldAnalysisResponse.java | 204 ++ .../client/solrj/response/FieldStatsInfo.java | 161 ++ .../client/solrj/response/LukeResponse.java | 270 ++ .../client/solrj/response/QueryResponse.java | 351 +++ .../solrj/response/SolrPingResponse.java | 30 + .../solrj/response/SolrResponseBase.java | 91 + .../solrj/response/SpellCheckResponse.java | 165 ++ .../client/solrj/response/TermsResponse.java | 89 + .../client/solrj/response/UpdateResponse.java | 32 + .../solr/client/solrj/util/ClientUtils.java | 228 ++ .../solr/BaseDistributedSearchTestCase.java | 552 ++++ .../apache/solr/BasicFunctionalityTest.java | 689 +++++ .../org/apache/solr/ConvertedLegacyTest.java | 1345 +++++++++ .../apache/solr/DisMaxRequestHandlerTest.java | 220 ++ .../test/org/apache/solr/EchoParamsTest.java | 67 + .../org/apache/solr/MinimalSchemaTest.java | 129 + .../org/apache/solr/OutputWriterTest.java | 102 + solr/src/test/org/apache/solr/SampleTest.java | 123 + .../org/apache/solr/SolrInfoMBeanTest.java | 108 + .../apache/solr/TestDistributedSearch.java | 207 ++ .../org/apache/solr/TestPluginEnable.java | 32 + .../apache/solr/TestSolrCoreProperties.java | 144 + solr/src/test/org/apache/solr/TestTrie.java | 273 ++ .../solr/analysis/BaseTokenTestCase.java | 153 + .../CommonGramsFilterFactoryTest.java | 85 + .../solr/analysis/CommonGramsFilterTest.java | 308 ++ .../CommonGramsQueryFilterFactoryTest.java | 84 + .../DoubleMetaphoneFilterFactoryTest.java | 73 + .../analysis/DoubleMetaphoneFilterTest.java | 67 + .../EnglishPorterFilterFactoryTest.java | 106 + .../analysis/HTMLStripCharFilterTest.java | 266 ++ .../solr/analysis/LengthFilterTest.java | 39 + .../SnowballPorterFilterFactoryTest.java | 130 + .../solr/analysis/TestArabicFilters.java | 65 + .../TestBrazilianStemFilterFactory.java | 41 + .../analysis/TestBufferedTokenStream.java | 92 + .../analysis/TestCJKTokenizerFactory.java | 38 + .../analysis/TestCapitalizationFilter.java | 213 ++ .../analysis/TestChineseFilterFactory.java | 41 + .../analysis/TestChineseTokenizerFactory.java | 38 + .../TestCollationKeyFilterFactory.java | 190 ++ ...estDelimitedPayloadTokenFilterFactory.java | 79 + ...tionaryCompoundWordTokenFilterFactory.java | 52 + .../analysis/TestDutchStemFilterFactory.java | 41 + .../analysis/TestElisionFilterFactory.java | 51 + .../analysis/TestFrenchStemFilterFactory.java | 41 + .../analysis/TestGermanStemFilterFactory.java | 41 + .../TestGreekLowerCaseFilterFactory.java | 41 + .../analysis/TestHyphenatedWordsFilter.java | 51 + .../solr/analysis/TestKeepFilterFactory.java | 60 + .../solr/analysis/TestKeepWordFilter.java | 77 + .../TestMappingCharFilterFactory.java | 52 + .../solr/analysis/TestMultiWordSynonyms.java | 27 + .../solr/analysis/TestNGramFilters.java | 163 ++ .../TestPatternReplaceCharFilter.java | 192 ++ .../analysis/TestPatternReplaceFilter.java | 81 + .../analysis/TestPatternTokenizerFactory.java | 138 + ...TestPersianNormalizationFilterFactory.java | 41 + .../solr/analysis/TestPhoneticFilter.java | 96 + .../analysis/TestPorterStemFilterFactory.java | 41 + .../TestRemoveDuplicatesTokenFilter.java | 121 + .../TestReverseStringFilterFactory.java | 41 + .../TestReversedWildcardFilterFactory.java | 139 + .../solr/analysis/TestRussianFilters.java | 79 + .../analysis/TestShingleFilterFactory.java | 73 + .../solr/analysis/TestStandardFactories.java | 121 + .../solr/analysis/TestStopFilterFactory.java | 61 + .../solr/analysis/TestSynonymFilter.java | 416 +++ .../apache/solr/analysis/TestSynonymMap.java | 273 ++ .../analysis/TestThaiWordFilterFactory.java | 42 + .../apache/solr/analysis/TestTrimFilter.java | 116 + .../analysis/TestWordDelimiterFilter.java | 413 +++ .../client/solrj/LargeVolumeTestBase.java | 115 + .../solrj/MergeIndexesExampleTestBase.java | 136 + .../solrj/MultiCoreExampleTestBase.java | 146 + .../client/solrj/SolrExampleTestBase.java | 57 + .../solr/client/solrj/SolrExampleTests.java | 569 ++++ .../solr/client/solrj/SolrExceptionTest.java | 56 + .../solr/client/solrj/SolrQueryTest.java | 198 ++ .../solr/client/solrj/StartSolrJetty.java | 71 + .../solr/client/solrj/TestBatchUpdate.java | 163 ++ .../client/solrj/TestLBHttpSolrServer.java | 233 ++ .../solrj/beans/TestDocumentObjectBinder.java | 255 ++ .../solrj/embedded/JettyWebappTest.java | 95 + .../embedded/LargeVolumeBinaryJettyTest.java | 77 + .../embedded/LargeVolumeEmbeddedTest.java | 50 + .../solrj/embedded/LargeVolumeJettyTest.java | 77 + .../embedded/MergeIndexesEmbeddedTest.java | 70 + .../solrj/embedded/MultiCoreEmbeddedTest.java | 65 + .../embedded/MultiCoreExampleJettyTest.java | 96 + .../embedded/SolrExampleEmbeddedTest.java | 52 + .../solrj/embedded/SolrExampleJettyTest.java | 96 + .../embedded/SolrExampleStreamingTest.java | 84 + .../solrj/embedded/TestSolrProperties.java | 169 ++ .../solrj/request/TestUpdateRequestCodec.java | 102 + .../response/AnlysisResponseBaseTest.java | 121 + .../DocumentAnalysisResponseTest.java | 153 + .../response/FieldAnalysisResponseTest.java | 123 + .../solrj/response/QueryResponseTest.java | 57 + .../solrj/response/TermsResponseTest.java | 79 + .../response/TestSpellCheckResponse.java | 129 + .../client/solrj/util/ClientUtilsTest.java | 37 + .../apache/solr/common/SolrDocumentTest.java | 194 ++ .../params/ModifiableSolrParamsTest.java | 126 + .../solr/common/params/SolrParamTest.java | 189 ++ .../solr/common/util/ContentStreamTest.java | 90 + .../apache/solr/common/util/DOMUtilTest.java | 88 + .../solr/common/util/FileUtilsTest.java | 33 + .../solr/common/util/IteratorChainTest.java | 106 + .../solr/common/util/NamedListTest.java | 32 + .../solr/common/util/TestFastInputStream.java | 94 + .../org/apache/solr/common/util/TestHash.java | 102 + .../solr/common/util/TestNamedListCodec.java | 260 ++ .../solr/common/util/TestXMLEscaping.java | 71 + .../solr/core/AlternateDirectoryTest.java | 52 + .../solr/core/AlternateIndexReaderTest.java | 61 + .../solr/core/DummyValueSourceParser.java | 55 + .../apache/solr/core/FakeDeletionPolicy.java | 57 + .../solr/core/IndexReaderFactoryTest.java | 48 + .../MockQuerySenderListenerReqHandler.java | 56 + .../apache/solr/core/RequestHandlersTest.java | 79 + .../apache/solr/core/ResourceLoaderTest.java | 91 + .../org/apache/solr/core/SOLR749Test.java | 49 + .../org/apache/solr/core/SolrCoreTest.java | 217 ++ .../solr/core/TestArbitraryIndexDir.java | 118 + .../org/apache/solr/core/TestBadConfig.java | 51 + .../test/org/apache/solr/core/TestConfig.java | 146 + .../apache/solr/core/TestJmxIntegration.java | 120 + .../apache/solr/core/TestJmxMonitoredMap.java | 155 + .../TestLegacyMergeSchedulerPolicyConfig.java | 36 + .../org/apache/solr/core/TestPropInject.java | 57 + .../solr/core/TestQuerySenderListener.java | 61 + .../solr/core/TestSolrDeletionPolicy1.java | 128 + .../solr/core/TestSolrDeletionPolicy2.java | 66 + .../apache/solr/core/TestXIncludeConfig.java | 52 + .../handler/AnalysisRequestHandlerTest.java | 102 + .../AnalysisRequestHandlerTestBase.java | 115 + .../DocumentAnalysisRequestHandlerTest.java | 242 ++ .../FieldAnalysisRequestHandlerTest.java | 322 +++ .../solr/handler/MoreLikeThisHandlerTest.java | 111 + .../SpellCheckerRequestHandlerTest.java | 479 ++++ .../handler/StandardRequestHandlerTest.java | 103 + .../apache/solr/handler/TestCSVLoader.java | 286 ++ .../solr/handler/TestReplicationHandler.java | 657 +++++ .../handler/XmlUpdateRequestHandlerTest.java | 82 + .../handler/admin/LukeRequestHandlerTest.java | 99 + .../handler/admin/SystemInfoHandlerTest.java | 50 + .../DistributedSpellCheckComponentTest.java | 41 + .../DistributedTermsComponentTest.java | 36 + .../QueryElevationComponentTest.java | 253 ++ .../handler/component/SearchHandlerTest.java | 97 + .../component/SpellCheckComponentTest.java | 363 +++ .../handler/component/StatsComponentTest.java | 226 ++ .../component/TermVectorComponentTest.java | 233 ++ .../handler/component/TermsComponentTest.java | 561 ++++ .../solr/highlight/DummyHighlighter.java | 43 + .../highlight/FastVectorHighlighterTest.java | 71 + .../solr/highlight/HighlighterConfigTest.java | 68 + .../solr/highlight/HighlighterTest.java | 749 +++++ .../apache/solr/request/JSONWriterTest.java | 89 + .../request/SimpleFacetsLegacySortTest.java | 98 + .../apache/solr/request/SimpleFacetsTest.java | 825 ++++++ .../request/TestBinaryResponseWriter.java | 68 + .../org/apache/solr/request/TestFaceting.java | 246 ++ .../apache/solr/request/TestWriterPerf.java | 181 ++ .../solr/schema/BadIndexSchemaTest.java | 85 + .../org/apache/solr/schema/CopyFieldTest.java | 180 ++ .../solr/schema/CustomSimilarityFactory.java | 25 + .../org/apache/solr/schema/DateFieldTest.java | 109 + .../apache/solr/schema/IndexSchemaTest.java | 147 + .../solr/schema/LegacyDateFieldTest.java | 104 + .../schema/MockConfigurableSimilarity.java | 31 + .../solr/schema/NotRequiredUniqueKeyTest.java | 56 + .../org/apache/solr/schema/PolyFieldTest.java | 225 ++ .../solr/schema/RequiredFieldsTest.java | 139 + .../apache/solr/schema/TestBinaryField.java | 185 ++ .../org/apache/solr/schema/UUIDFieldTest.java | 68 + .../org/apache/solr/search/DocSetPerf.java | 181 ++ .../apache/solr/search/FooQParserPlugin.java | 46 + .../solr/search/FunctionQParserTest.java | 55 + .../apache/solr/search/QueryParsingTest.java | 201 ++ .../org/apache/solr/search/TestDocSet.java | 448 +++ .../solr/search/TestExtendedDismaxParser.java | 176 ++ .../apache/solr/search/TestFastLRUCache.java | 243 ++ .../apache/solr/search/TestIndexSearcher.java | 138 + .../apache/solr/search/TestQueryTypes.java | 289 ++ .../apache/solr/search/TestQueryUtils.java | 283 ++ .../apache/solr/search/TestRangeQuery.java | 283 ++ .../apache/solr/search/TestSearchPerf.java | 248 ++ .../test/org/apache/solr/search/TestSort.java | 198 ++ .../search/function/NvlValueSourceParser.java | 72 + .../search/function/SortByFunctionTest.java | 96 + .../search/function/TestFunctionQuery.java | 400 +++ .../distance/DistanceFunctionTest.java | 138 + .../apache/solr/servlet/CacheHeaderTest.java | 246 ++ .../solr/servlet/CacheHeaderTestBase.java | 163 ++ .../servlet/DirectSolrConnectionTest.java | 83 + .../solr/servlet/NoCacheHeaderTest.java | 158 ++ .../solr/servlet/SolrRequestParserTest.java | 171 ++ .../spelling/FileBasedSpellCheckerTest.java | 174 ++ .../spelling/IndexBasedSpellCheckerTest.java | 308 ++ .../solr/spelling/SimpleQueryConverter.java | 49 + .../spelling/SpellingQueryConverterTest.java | 132 + .../apache/solr/update/AutoCommitTest.java | 222 ++ .../DirectUpdateHandlerOptimizeTest.java | 101 + .../solr/update/DirectUpdateHandlerTest.java | 345 +++ .../solr/update/DocumentBuilderTest.java | 76 + .../solr/update/TestIndexingPerformance.java | 116 + .../CustomUpdateRequestProcessorFactory.java | 46 + .../SignatureUpdateProcessorFactoryTest.java | 187 ++ .../UpdateRequestProcessorFactoryTest.java | 55 + .../org/apache/solr/util/ArraysUtilsTest.java | 48 + .../test/org/apache/solr/util/BitSetPerf.java | 196 ++ .../apache/solr/util/DateMathParserTest.java | 289 ++ .../apache/solr/util/SolrPluginUtilsTest.java | 382 +++ .../apache/solr/util/TestCharArrayMap.java | 208 ++ .../org/apache/solr/util/TestNumberUtils.java | 256 ++ .../org/apache/solr/util/TestOpenBitSet.java | 209 ++ .../test/org/apache/solr/util/TestUtils.java | 134 + solr/src/test/test-files/README | 21 + solr/src/test/test-files/books.csv | 11 + .../test/test-files/htmlStripReaderTest.html | 350 +++ solr/src/test/test-files/lib-dirs/README | 18 + .../lib-dirs/a/a1/empty-file-a1.txt | 1 + .../lib-dirs/a/a2/empty-file-a2.txt | 1 + .../lib-dirs/b/b1/empty-file-b1.txt | 1 + .../lib-dirs/b/b2/empty-file-b2.txt | 1 + .../lib-dirs/c/c1/empty-file-c1.txt | 1 + .../lib-dirs/c/c2/empty-file-c2.txt | 1 + .../lib-dirs/d/d1/empty-file-d1.txt | 1 + .../lib-dirs/d/d2/empty-file-d2.txt | 1 + solr/src/test/test-files/mailing_lists.pdf | 382 +++ .../test-files/sampleDateFacetResponse.xml | 4 + .../test/test-files/solr/conf/bad-schema.xml | 49 + .../test-files/solr/conf/bad_solrconfig.xml | 29 + .../solr/conf/compoundDictionary.txt | 19 + .../src/test/test-files/solr/conf/elevate.xml | 36 + .../test-files/solr/conf/frenchArticles.txt | 24 + solr/src/test/test-files/solr/conf/keep-1.txt | 17 + solr/src/test/test-files/solr/conf/keep-2.txt | 17 + .../solr/conf/mapping-ISOLatin1Accent.txt | 246 ++ .../test-files/solr/conf/old_synonyms.txt | 22 + .../test/test-files/solr/conf/protwords.txt | 23 + .../solr/conf/schema-binaryfield.xml | 100 + .../solr/conf/schema-copyfield-test.xml | 468 +++ .../test-files/solr/conf/schema-minimal.xml | 25 + .../conf/schema-not-required-unique-key.xml | 46 + .../solr/conf/schema-replication1.xml | 49 + .../solr/conf/schema-replication2.xml | 52 + .../solr/conf/schema-required-fields.xml | 434 +++ .../test-files/solr/conf/schema-reversed.xml | 81 + .../solr/conf/schema-spellchecker.xml | 83 + .../test-files/solr/conf/schema-stop-keep.xml | 67 + .../test/test-files/solr/conf/schema-trie.xml | 328 +++ solr/src/test/test-files/solr/conf/schema.xml | 570 ++++ .../test/test-files/solr/conf/schema11.xml | 350 +++ .../test/test-files/solr/conf/schema12.xml | 533 ++++ .../solr/conf/solrconfig-SOLR-749.xml | 405 +++ .../solr/conf/solrconfig-altdirectory.xml | 412 +++ .../solr/conf/solrconfig-delpolicy1.xml | 427 +++ .../solr/conf/solrconfig-delpolicy2.xml | 425 +++ .../solr/conf/solrconfig-duh-optimize.xml | 415 +++ .../solr/conf/solrconfig-elevate.xml | 223 ++ .../solr/conf/solrconfig-enableplugin.xml | 84 + .../solr/conf/solrconfig-facet-sort.xml | 406 +++ .../solr/conf/solrconfig-functionquery.xml | 313 ++ .../solr/conf/solrconfig-highlight.xml | 351 +++ .../solr/conf/solrconfig-legacy.xml | 455 +++ .../solr/conf/solrconfig-master.xml | 98 + .../solr/conf/solrconfig-master1.xml | 98 + .../solr/conf/solrconfig-master2.xml | 98 + .../solr/conf/solrconfig-nocache.xml | 309 ++ .../solrconfig-propinject-indexdefault.xml | 464 +++ .../solr/conf/solrconfig-propinject.xml | 465 +++ .../solr/conf/solrconfig-querysender.xml | 58 + .../solr/conf/solrconfig-reqHandler.incl | 22 + .../test-files/solr/conf/solrconfig-slave.xml | 88 + .../solr/conf/solrconfig-slave1.xml | 88 + .../conf/solrconfig-solcoreproperties.xml | 82 + .../solr/conf/solrconfig-spellchecker.xml | 103 + .../solr/conf/solrconfig-termindex.xml | 452 +++ .../solr/conf/solrconfig-transformers.xml | 49 + .../solr/conf/solrconfig-xinclude.xml | 429 +++ .../test/test-files/solr/conf/solrconfig.xml | 464 +++ .../test-files/solr/conf/solrconfig_perf.xml | 859 ++++++ solr/src/test/test-files/solr/conf/stop-1.txt | 17 + solr/src/test/test-files/solr/conf/stop-2.txt | 17 + .../test/test-files/solr/conf/stopwords.txt | 58 + .../test/test-files/solr/conf/synonyms.txt | 31 + .../test/test-files/solr/conf/xslt/dummy.xsl | 39 + .../test-files/solr/crazy-path-to-config.xml | 80 + .../test-files/solr/crazy-path-to-schema.xml | 48 + solr/src/test/test-files/solr/lib/README | 18 + .../solr/lib/classes/empty-file-main-lib.txt | 1 + .../test-files/solr/shared/conf/schema.xml | 69 + .../solr/shared/conf/solrconfig.xml | 43 + .../solr/shared/conf/stopwords-en.txt | 16 + .../solr/shared/conf/stopwords-fr.txt | 16 + solr/src/test/test-files/solr/shared/solr.xml | 47 + solr/src/test/test-files/spellings.txt | 16 + .../solrj/embedded/EmbeddedSolrServer.java | 172 ++ .../solrj/embedded/JettySolrRunner.java | 230 ++ .../solr/servlet/DirectSolrConnection.java | 197 ++ .../solr/servlet/LogLevelSelection.java | 307 ++ .../solr/servlet/SolrDispatchFilter.java | 399 +++ .../solr/servlet/SolrRequestParsers.java | 412 +++ .../org/apache/solr/servlet/SolrServlet.java | 125 + .../solr/servlet/SolrServletRequest.java | 33 + .../solr/servlet/SolrUpdateServlet.java | 81 + .../servlet/cache/HttpCacheHeaderUtil.java | 346 +++ .../org/apache/solr/servlet/cache/Method.java | 30 + solr/src/webapp/web/WEB-INF/web.xml | 148 + solr/src/webapp/web/WEB-INF/weblogic.xml | 12 + solr/src/webapp/web/admin/_info.jsp | 118 + solr/src/webapp/web/admin/action.jsp | 94 + solr/src/webapp/web/admin/analysis.jsp | 498 ++++ solr/src/webapp/web/admin/analysis.xsl | 179 ++ .../src/webapp/web/admin/distributiondump.jsp | 160 ++ solr/src/webapp/web/admin/favicon.ico | Bin 0 -> 1146 bytes solr/src/webapp/web/admin/form.jsp | 137 + solr/src/webapp/web/admin/get-file.jsp | 72 + solr/src/webapp/web/admin/get-properties.jsp | 24 + solr/src/webapp/web/admin/header.jsp | 41 + solr/src/webapp/web/admin/index.jsp | 157 + solr/src/webapp/web/admin/jquery-1.2.3.min.js | 32 + solr/src/webapp/web/admin/meta.xsl | 34 + solr/src/webapp/web/admin/ping.jsp | 52 + solr/src/webapp/web/admin/ping.xsl | 71 + solr/src/webapp/web/admin/raw-schema.jsp | 38 + solr/src/webapp/web/admin/registry.jsp | 107 + solr/src/webapp/web/admin/registry.xsl | 321 +++ .../webapp/web/admin/replication/header.jsp | 89 + .../webapp/web/admin/replication/index.jsp | 378 +++ solr/src/webapp/web/admin/schema.jsp | 675 +++++ solr/src/webapp/web/admin/solr-admin.css | 215 ++ solr/src/webapp/web/admin/solr_small.png | Bin 0 -> 7926 bytes solr/src/webapp/web/admin/stats.jsp | 92 + solr/src/webapp/web/admin/stats.xsl | 220 ++ solr/src/webapp/web/admin/tabular.xsl | 141 + solr/src/webapp/web/admin/threaddump.jsp | 110 + solr/src/webapp/web/admin/threaddump.xsl | 103 + solr/src/webapp/web/favicon.ico | Bin 0 -> 1146 bytes solr/src/webapp/web/index.jsp | 49 + 1687 files changed, 228538 insertions(+) create mode 100644 solr/CHANGES.txt create mode 100644 solr/KEYS create mode 100644 solr/LICENSE.txt create mode 100644 solr/NOTICE.txt create mode 100644 solr/README.txt create mode 100644 solr/build.xml create mode 100644 solr/client/javascript/README.txt create mode 100644 solr/client/python/README.txt create mode 100644 solr/client/ruby/flare/README create mode 100644 solr/client/ruby/flare/Rakefile create mode 100644 solr/client/ruby/flare/app/controllers/application.rb create mode 100644 solr/client/ruby/flare/app/controllers/i18n_controller.rb create mode 100644 solr/client/ruby/flare/app/helpers/application_helper.rb create mode 100644 solr/client/ruby/flare/app/helpers/browse_helper.rb create mode 100755 solr/client/ruby/flare/app/helpers/simile_helper.rb create mode 100755 solr/client/ruby/flare/app/views/browse/_suggest.rhtml create mode 100755 solr/client/ruby/flare/app/views/browse/facet.rhtml create mode 100755 solr/client/ruby/flare/app/views/document/_document_delicious.rhtml create mode 100755 solr/client/ruby/flare/app/views/document/_document_tang.rhtml create mode 100755 solr/client/ruby/flare/app/views/document/_document_uva.rhtml create mode 100644 solr/client/ruby/flare/app/views/i18n/index.rhtml create mode 100755 solr/client/ruby/flare/app/views/layouts/browse.rhtml create mode 100644 solr/client/ruby/flare/config/boot.rb create mode 100644 solr/client/ruby/flare/config/database.yml create mode 100644 solr/client/ruby/flare/config/environment.rb create mode 100644 solr/client/ruby/flare/config/environments/development.rb create mode 100644 solr/client/ruby/flare/config/environments/production.rb create mode 100644 solr/client/ruby/flare/config/environments/test.rb create mode 100644 solr/client/ruby/flare/config/routes.rb create mode 100644 solr/client/ruby/flare/db/schema.rb create mode 100755 solr/client/ruby/flare/lib/tasks/clear_database_prerequisites.rake create mode 100755 solr/client/ruby/flare/lib/tasks/routes.rake create mode 100644 solr/client/ruby/flare/public/.htaccess create mode 100644 solr/client/ruby/flare/public/404.html create mode 100644 solr/client/ruby/flare/public/500.html create mode 100755 solr/client/ruby/flare/public/dispatch.cgi create mode 100755 solr/client/ruby/flare/public/dispatch.fcgi create mode 100755 solr/client/ruby/flare/public/dispatch.rb create mode 100644 solr/client/ruby/flare/public/favicon.ico create mode 100644 solr/client/ruby/flare/public/images/flare.jpg create mode 100644 solr/client/ruby/flare/public/images/pie_0.png create mode 100644 solr/client/ruby/flare/public/images/pie_1.png create mode 100644 solr/client/ruby/flare/public/images/pie_10.png create mode 100644 solr/client/ruby/flare/public/images/pie_100.png create mode 100644 solr/client/ruby/flare/public/images/pie_11.png create mode 100644 solr/client/ruby/flare/public/images/pie_12.png create mode 100644 solr/client/ruby/flare/public/images/pie_13.png create mode 100644 solr/client/ruby/flare/public/images/pie_14.png create mode 100644 solr/client/ruby/flare/public/images/pie_15.png create mode 100644 solr/client/ruby/flare/public/images/pie_16.png create mode 100644 solr/client/ruby/flare/public/images/pie_17.png create mode 100644 solr/client/ruby/flare/public/images/pie_18.png create mode 100644 solr/client/ruby/flare/public/images/pie_19.png create mode 100644 solr/client/ruby/flare/public/images/pie_2.png create mode 100644 solr/client/ruby/flare/public/images/pie_20.png create mode 100644 solr/client/ruby/flare/public/images/pie_21.png create mode 100644 solr/client/ruby/flare/public/images/pie_22.png create mode 100644 solr/client/ruby/flare/public/images/pie_23.png create mode 100644 solr/client/ruby/flare/public/images/pie_24.png create mode 100644 solr/client/ruby/flare/public/images/pie_25.png create mode 100644 solr/client/ruby/flare/public/images/pie_26.png create mode 100644 solr/client/ruby/flare/public/images/pie_27.png create mode 100644 solr/client/ruby/flare/public/images/pie_28.png create mode 100644 solr/client/ruby/flare/public/images/pie_29.png create mode 100644 solr/client/ruby/flare/public/images/pie_3.png create mode 100644 solr/client/ruby/flare/public/images/pie_30.png create mode 100644 solr/client/ruby/flare/public/images/pie_31.png create mode 100644 solr/client/ruby/flare/public/images/pie_32.png create mode 100644 solr/client/ruby/flare/public/images/pie_33.png create mode 100644 solr/client/ruby/flare/public/images/pie_34.png create mode 100644 solr/client/ruby/flare/public/images/pie_35.png create mode 100644 solr/client/ruby/flare/public/images/pie_36.png create mode 100644 solr/client/ruby/flare/public/images/pie_37.png create mode 100644 solr/client/ruby/flare/public/images/pie_38.png create mode 100644 solr/client/ruby/flare/public/images/pie_39.png create mode 100644 solr/client/ruby/flare/public/images/pie_4.png create mode 100644 solr/client/ruby/flare/public/images/pie_40.png create mode 100644 solr/client/ruby/flare/public/images/pie_41.png create mode 100644 solr/client/ruby/flare/public/images/pie_42.png create mode 100644 solr/client/ruby/flare/public/images/pie_43.png create mode 100644 solr/client/ruby/flare/public/images/pie_44.png create mode 100644 solr/client/ruby/flare/public/images/pie_45.png create mode 100644 solr/client/ruby/flare/public/images/pie_46.png create mode 100644 solr/client/ruby/flare/public/images/pie_47.png create mode 100644 solr/client/ruby/flare/public/images/pie_48.png create mode 100644 solr/client/ruby/flare/public/images/pie_49.png create mode 100644 solr/client/ruby/flare/public/images/pie_5.png create mode 100644 solr/client/ruby/flare/public/images/pie_50.png create mode 100644 solr/client/ruby/flare/public/images/pie_51.png create mode 100644 solr/client/ruby/flare/public/images/pie_52.png create mode 100644 solr/client/ruby/flare/public/images/pie_53.png create mode 100644 solr/client/ruby/flare/public/images/pie_54.png create mode 100644 solr/client/ruby/flare/public/images/pie_55.png create mode 100644 solr/client/ruby/flare/public/images/pie_56.png create mode 100644 solr/client/ruby/flare/public/images/pie_57.png create mode 100644 solr/client/ruby/flare/public/images/pie_58.png create mode 100644 solr/client/ruby/flare/public/images/pie_59.png create mode 100644 solr/client/ruby/flare/public/images/pie_6.png create mode 100644 solr/client/ruby/flare/public/images/pie_60.png create mode 100644 solr/client/ruby/flare/public/images/pie_61.png create mode 100644 solr/client/ruby/flare/public/images/pie_62.png create mode 100644 solr/client/ruby/flare/public/images/pie_63.png create mode 100644 solr/client/ruby/flare/public/images/pie_64.png create mode 100644 solr/client/ruby/flare/public/images/pie_65.png create mode 100644 solr/client/ruby/flare/public/images/pie_66.png create mode 100644 solr/client/ruby/flare/public/images/pie_67.png create mode 100644 solr/client/ruby/flare/public/images/pie_68.png create mode 100644 solr/client/ruby/flare/public/images/pie_69.png create mode 100644 solr/client/ruby/flare/public/images/pie_7.png create mode 100644 solr/client/ruby/flare/public/images/pie_70.png create mode 100644 solr/client/ruby/flare/public/images/pie_71.png create mode 100644 solr/client/ruby/flare/public/images/pie_72.png create mode 100644 solr/client/ruby/flare/public/images/pie_73.png create mode 100644 solr/client/ruby/flare/public/images/pie_74.png create mode 100644 solr/client/ruby/flare/public/images/pie_75.png create mode 100644 solr/client/ruby/flare/public/images/pie_76.png create mode 100644 solr/client/ruby/flare/public/images/pie_77.png create mode 100644 solr/client/ruby/flare/public/images/pie_78.png create mode 100644 solr/client/ruby/flare/public/images/pie_79.png create mode 100644 solr/client/ruby/flare/public/images/pie_8.png create mode 100644 solr/client/ruby/flare/public/images/pie_80.png create mode 100644 solr/client/ruby/flare/public/images/pie_81.png create mode 100644 solr/client/ruby/flare/public/images/pie_82.png create mode 100644 solr/client/ruby/flare/public/images/pie_83.png create mode 100644 solr/client/ruby/flare/public/images/pie_84.png create mode 100644 solr/client/ruby/flare/public/images/pie_85.png create mode 100644 solr/client/ruby/flare/public/images/pie_86.png create mode 100644 solr/client/ruby/flare/public/images/pie_87.png create mode 100644 solr/client/ruby/flare/public/images/pie_88.png create mode 100644 solr/client/ruby/flare/public/images/pie_89.png create mode 100644 solr/client/ruby/flare/public/images/pie_9.png create mode 100644 solr/client/ruby/flare/public/images/pie_90.png create mode 100644 solr/client/ruby/flare/public/images/pie_91.png create mode 100644 solr/client/ruby/flare/public/images/pie_92.png create mode 100644 solr/client/ruby/flare/public/images/pie_93.png create mode 100644 solr/client/ruby/flare/public/images/pie_94.png create mode 100644 solr/client/ruby/flare/public/images/pie_95.png create mode 100644 solr/client/ruby/flare/public/images/pie_96.png create mode 100644 solr/client/ruby/flare/public/images/pie_97.png create mode 100644 solr/client/ruby/flare/public/images/pie_98.png create mode 100644 solr/client/ruby/flare/public/images/pie_99.png create mode 100644 solr/client/ruby/flare/public/images/rails.png create mode 100644 solr/client/ruby/flare/public/images/simile-exhibit.png create mode 100644 solr/client/ruby/flare/public/images/solr_small.png create mode 100644 solr/client/ruby/flare/public/images/x-close.gif create mode 100644 solr/client/ruby/flare/public/javascripts/application.js create mode 100644 solr/client/ruby/flare/public/javascripts/controls.js create mode 100644 solr/client/ruby/flare/public/javascripts/dragdrop.js create mode 100644 solr/client/ruby/flare/public/javascripts/effects.js create mode 100644 solr/client/ruby/flare/public/javascripts/prototype.js create mode 100644 solr/client/ruby/flare/public/plugin_assets/README create mode 100644 solr/client/ruby/flare/public/stylesheets/flare.css create mode 100755 solr/client/ruby/flare/script/about create mode 100755 solr/client/ruby/flare/script/breakpointer create mode 100755 solr/client/ruby/flare/script/console create mode 100755 solr/client/ruby/flare/script/destroy create mode 100755 solr/client/ruby/flare/script/generate create mode 100755 solr/client/ruby/flare/script/performance/benchmarker create mode 100755 solr/client/ruby/flare/script/performance/profiler create mode 100755 solr/client/ruby/flare/script/pie.rb create mode 100755 solr/client/ruby/flare/script/plugin create mode 100755 solr/client/ruby/flare/script/process/inspector create mode 100755 solr/client/ruby/flare/script/process/reaper create mode 100755 solr/client/ruby/flare/script/process/spawner create mode 100755 solr/client/ruby/flare/script/runner create mode 100755 solr/client/ruby/flare/script/server create mode 100644 solr/client/ruby/flare/test/functional/browse_controller_test.rb create mode 100644 solr/client/ruby/flare/test/test_helper.rb create mode 100644 solr/client/ruby/flare/vendor/plugins/engines/CHANGELOG create mode 100644 solr/client/ruby/flare/vendor/plugins/engines/MIT-LICENSE create mode 100644 solr/client/ruby/flare/vendor/plugins/engines/README create mode 100644 solr/client/ruby/flare/vendor/plugins/engines/Rakefile create mode 100644 solr/client/ruby/flare/vendor/plugins/engines/UPGRADING create mode 100644 solr/client/ruby/flare/vendor/plugins/engines/about.yml create mode 100644 solr/client/ruby/flare/vendor/plugins/engines/generators/plugin_migration/USAGE create mode 100644 solr/client/ruby/flare/vendor/plugins/engines/generators/plugin_migration/plugin_migration_generator.rb create mode 100644 solr/client/ruby/flare/vendor/plugins/engines/generators/plugin_migration/templates/plugin_migration.erb create mode 100644 solr/client/ruby/flare/vendor/plugins/engines/init.rb create mode 100644 solr/client/ruby/flare/vendor/plugins/engines/install.rb create mode 100644 solr/client/ruby/flare/vendor/plugins/engines/lib/engines.rb create mode 100644 solr/client/ruby/flare/vendor/plugins/engines/lib/engines/deprecated_config_support.rb create mode 100644 solr/client/ruby/flare/vendor/plugins/engines/lib/engines/plugin.rb create mode 100644 solr/client/ruby/flare/vendor/plugins/engines/lib/engines/plugin_list.rb create mode 100644 solr/client/ruby/flare/vendor/plugins/engines/lib/engines/plugin_migrator.rb create mode 100644 solr/client/ruby/flare/vendor/plugins/engines/lib/engines/rails_extensions.rb create mode 100644 solr/client/ruby/flare/vendor/plugins/engines/lib/engines/rails_extensions/active_record.rb create mode 100644 solr/client/ruby/flare/vendor/plugins/engines/lib/engines/rails_extensions/dependencies.rb create mode 100644 solr/client/ruby/flare/vendor/plugins/engines/lib/engines/rails_extensions/migrations.rb create mode 100644 solr/client/ruby/flare/vendor/plugins/engines/lib/engines/rails_extensions/public_asset_helpers.rb create mode 100644 solr/client/ruby/flare/vendor/plugins/engines/lib/engines/rails_extensions/rails.rb create mode 100644 solr/client/ruby/flare/vendor/plugins/engines/lib/engines/rails_extensions/rails_initializer.rb create mode 100644 solr/client/ruby/flare/vendor/plugins/engines/lib/engines/rails_extensions/routing.rb create mode 100644 solr/client/ruby/flare/vendor/plugins/engines/lib/engines/rails_extensions/templates.rb create mode 100644 solr/client/ruby/flare/vendor/plugins/engines/lib/engines/testing.rb create mode 100644 solr/client/ruby/flare/vendor/plugins/engines/tasks/engines.rake create mode 100644 solr/client/ruby/flare/vendor/plugins/flare/README create mode 100644 solr/client/ruby/flare/vendor/plugins/flare/Rakefile create mode 100644 solr/client/ruby/flare/vendor/plugins/flare/app/controllers/browse_controller.rb create mode 100755 solr/client/ruby/flare/vendor/plugins/flare/app/controllers/document_controller.rb create mode 100755 solr/client/ruby/flare/vendor/plugins/flare/app/controllers/simile_controller.rb create mode 100755 solr/client/ruby/flare/vendor/plugins/flare/app/helpers/application_helper.rb create mode 100755 solr/client/ruby/flare/vendor/plugins/flare/app/views/browse/_suggest.rhtml create mode 100755 solr/client/ruby/flare/vendor/plugins/flare/app/views/browse/facet.rhtml create mode 100755 solr/client/ruby/flare/vendor/plugins/flare/app/views/browse/index.rhtml create mode 100755 solr/client/ruby/flare/vendor/plugins/flare/app/views/document/_document_development.rhtml create mode 100755 solr/client/ruby/flare/vendor/plugins/flare/app/views/layouts/browse.rhtml create mode 100644 solr/client/ruby/flare/vendor/plugins/flare/app/views/simile/exhibit.rhtml create mode 100755 solr/client/ruby/flare/vendor/plugins/flare/app/views/simile/timeline.rhtml create mode 100755 solr/client/ruby/flare/vendor/plugins/flare/app/views/simile/timeline.rxml create mode 100644 solr/client/ruby/flare/vendor/plugins/flare/init.rb create mode 100644 solr/client/ruby/flare/vendor/plugins/flare/lib/flare.rb create mode 100644 solr/client/ruby/flare/vendor/plugins/flare/lib/flare/context.rb create mode 100755 solr/client/ruby/flare/vendor/plugins/flare/lib/flare/controller_extensions.rb create mode 100755 solr/client/ruby/flare/vendor/plugins/flare/routes.rb create mode 100755 solr/client/ruby/flare/vendor/plugins/flare/test/flare_context_test.rb create mode 100644 solr/client/ruby/solr-ruby/CHANGES.yml create mode 100644 solr/client/ruby/solr-ruby/LICENSE.txt create mode 100644 solr/client/ruby/solr-ruby/README create mode 100644 solr/client/ruby/solr-ruby/Rakefile create mode 100755 solr/client/ruby/solr-ruby/examples/delicious_library/dl_importer.rb create mode 100644 solr/client/ruby/solr-ruby/examples/delicious_library/sample_export.txt create mode 100755 solr/client/ruby/solr-ruby/examples/marc/marc_importer.rb create mode 100755 solr/client/ruby/solr-ruby/examples/tang/tang_importer.rb create mode 100755 solr/client/ruby/solr-ruby/lib/solr.rb create mode 100755 solr/client/ruby/solr-ruby/lib/solr/connection.rb create mode 100644 solr/client/ruby/solr-ruby/lib/solr/document.rb create mode 100644 solr/client/ruby/solr-ruby/lib/solr/exception.rb create mode 100644 solr/client/ruby/solr-ruby/lib/solr/field.rb create mode 100755 solr/client/ruby/solr-ruby/lib/solr/importer.rb create mode 100755 solr/client/ruby/solr-ruby/lib/solr/importer/array_mapper.rb create mode 100755 solr/client/ruby/solr-ruby/lib/solr/importer/delimited_file_source.rb create mode 100644 solr/client/ruby/solr-ruby/lib/solr/importer/hpricot_mapper.rb create mode 100755 solr/client/ruby/solr-ruby/lib/solr/importer/mapper.rb create mode 100755 solr/client/ruby/solr-ruby/lib/solr/importer/solr_source.rb create mode 100755 solr/client/ruby/solr-ruby/lib/solr/importer/xpath_mapper.rb create mode 100755 solr/client/ruby/solr-ruby/lib/solr/indexer.rb create mode 100755 solr/client/ruby/solr-ruby/lib/solr/request.rb create mode 100644 solr/client/ruby/solr-ruby/lib/solr/request/add_document.rb create mode 100644 solr/client/ruby/solr-ruby/lib/solr/request/base.rb create mode 100644 solr/client/ruby/solr-ruby/lib/solr/request/commit.rb create mode 100644 solr/client/ruby/solr-ruby/lib/solr/request/delete.rb create mode 100644 solr/client/ruby/solr-ruby/lib/solr/request/dismax.rb create mode 100755 solr/client/ruby/solr-ruby/lib/solr/request/index_info.rb create mode 100755 solr/client/ruby/solr-ruby/lib/solr/request/modify_document.rb create mode 100755 solr/client/ruby/solr-ruby/lib/solr/request/optimize.rb create mode 100644 solr/client/ruby/solr-ruby/lib/solr/request/ping.rb create mode 100644 solr/client/ruby/solr-ruby/lib/solr/request/select.rb create mode 100644 solr/client/ruby/solr-ruby/lib/solr/request/spellcheck.rb create mode 100755 solr/client/ruby/solr-ruby/lib/solr/request/standard.rb create mode 100644 solr/client/ruby/solr-ruby/lib/solr/request/update.rb create mode 100755 solr/client/ruby/solr-ruby/lib/solr/response.rb create mode 100644 solr/client/ruby/solr-ruby/lib/solr/response/add_document.rb create mode 100644 solr/client/ruby/solr-ruby/lib/solr/response/base.rb create mode 100644 solr/client/ruby/solr-ruby/lib/solr/response/commit.rb create mode 100644 solr/client/ruby/solr-ruby/lib/solr/response/delete.rb create mode 100644 solr/client/ruby/solr-ruby/lib/solr/response/dismax.rb create mode 100755 solr/client/ruby/solr-ruby/lib/solr/response/index_info.rb create mode 100755 solr/client/ruby/solr-ruby/lib/solr/response/modify_document.rb create mode 100755 solr/client/ruby/solr-ruby/lib/solr/response/optimize.rb create mode 100644 solr/client/ruby/solr-ruby/lib/solr/response/ping.rb create mode 100644 solr/client/ruby/solr-ruby/lib/solr/response/ruby.rb create mode 100755 solr/client/ruby/solr-ruby/lib/solr/response/select.rb create mode 100644 solr/client/ruby/solr-ruby/lib/solr/response/spellcheck.rb create mode 100644 solr/client/ruby/solr-ruby/lib/solr/response/standard.rb create mode 100644 solr/client/ruby/solr-ruby/lib/solr/response/xml.rb create mode 100755 solr/client/ruby/solr-ruby/lib/solr/solrtasks.rb create mode 100755 solr/client/ruby/solr-ruby/lib/solr/util.rb create mode 100644 solr/client/ruby/solr-ruby/lib/solr/xml.rb create mode 100755 solr/client/ruby/solr-ruby/script/setup.rb create mode 100755 solr/client/ruby/solr-ruby/script/solrshell create mode 100644 solr/client/ruby/solr-ruby/solr/conf/admin-extra.html create mode 100644 solr/client/ruby/solr-ruby/solr/conf/protwords.txt create mode 100755 solr/client/ruby/solr-ruby/solr/conf/schema.xml create mode 100644 solr/client/ruby/solr-ruby/solr/conf/scripts.conf create mode 100755 solr/client/ruby/solr-ruby/solr/conf/solrconfig.xml create mode 100644 solr/client/ruby/solr-ruby/solr/conf/stopwords.txt create mode 100644 solr/client/ruby/solr-ruby/solr/conf/synonyms.txt create mode 100644 solr/client/ruby/solr-ruby/solr/conf/xslt/example.xsl create mode 100644 solr/client/ruby/solr-ruby/test/conf/admin-extra.html create mode 100644 solr/client/ruby/solr-ruby/test/conf/protwords.txt create mode 100755 solr/client/ruby/solr-ruby/test/conf/schema.xml create mode 100644 solr/client/ruby/solr-ruby/test/conf/scripts.conf create mode 100755 solr/client/ruby/solr-ruby/test/conf/solrconfig.xml create mode 100644 solr/client/ruby/solr-ruby/test/conf/stopwords.txt create mode 100644 solr/client/ruby/solr-ruby/test/conf/synonyms.txt create mode 100644 solr/client/ruby/solr-ruby/test/functional/server_test.rb create mode 100644 solr/client/ruby/solr-ruby/test/functional/test_solr_server.rb create mode 100644 solr/client/ruby/solr-ruby/test/unit/add_document_test.rb create mode 100755 solr/client/ruby/solr-ruby/test/unit/array_mapper_test.rb create mode 100755 solr/client/ruby/solr-ruby/test/unit/changes_yaml_test.rb create mode 100644 solr/client/ruby/solr-ruby/test/unit/commit_test.rb create mode 100755 solr/client/ruby/solr-ruby/test/unit/connection_test.rb create mode 100755 solr/client/ruby/solr-ruby/test/unit/data_mapper_test.rb create mode 100644 solr/client/ruby/solr-ruby/test/unit/delete_test.rb create mode 100755 solr/client/ruby/solr-ruby/test/unit/delimited_file_source_test.rb create mode 100644 solr/client/ruby/solr-ruby/test/unit/dismax_request_test.rb create mode 100644 solr/client/ruby/solr-ruby/test/unit/document_test.rb create mode 100644 solr/client/ruby/solr-ruby/test/unit/field_test.rb create mode 100644 solr/client/ruby/solr-ruby/test/unit/hpricot_mapper_test.rb create mode 100644 solr/client/ruby/solr-ruby/test/unit/hpricot_test_file.xml create mode 100755 solr/client/ruby/solr-ruby/test/unit/indexer_test.rb create mode 100755 solr/client/ruby/solr-ruby/test/unit/modify_document_test.rb create mode 100644 solr/client/ruby/solr-ruby/test/unit/ping_test.rb create mode 100755 solr/client/ruby/solr-ruby/test/unit/request_test.rb create mode 100644 solr/client/ruby/solr-ruby/test/unit/response_test.rb create mode 100755 solr/client/ruby/solr-ruby/test/unit/select_test.rb create mode 100755 solr/client/ruby/solr-ruby/test/unit/solr_mock_base.rb create mode 100644 solr/client/ruby/solr-ruby/test/unit/spellcheck_response_test.rb create mode 100644 solr/client/ruby/solr-ruby/test/unit/spellchecker_request_test.rb create mode 100755 solr/client/ruby/solr-ruby/test/unit/standard_request_test.rb create mode 100644 solr/client/ruby/solr-ruby/test/unit/standard_response_test.rb create mode 100755 solr/client/ruby/solr-ruby/test/unit/suite.rb create mode 100755 solr/client/ruby/solr-ruby/test/unit/tab_delimited.txt create mode 100755 solr/client/ruby/solr-ruby/test/unit/util_test.rb create mode 100755 solr/client/ruby/solr-ruby/test/unit/xpath_mapper_test.rb create mode 100644 solr/client/ruby/solr-ruby/test/unit/xpath_test_file.xml create mode 100644 solr/common-build.xml create mode 100644 solr/contrib/clustering/CHANGES.txt create mode 100644 solr/contrib/clustering/README.txt create mode 100644 solr/contrib/clustering/build.xml create mode 100644 solr/contrib/clustering/lib/carrot2-mini-3.1.0.jar create mode 100644 solr/contrib/clustering/lib/commons-lang-2.4.jar create mode 100644 solr/contrib/clustering/lib/ehcache-1.6.2.jar create mode 100644 solr/contrib/clustering/lib/jackson-core-asl-0.9.9-6.jar create mode 100644 solr/contrib/clustering/lib/jackson-mapper-asl-0.9.9-6.jar create mode 100644 solr/contrib/clustering/lib/log4j-1.2.14.jar create mode 100644 solr/contrib/clustering/solr-clustering-pom.xml.template create mode 100644 solr/contrib/clustering/src/main/java/org/apache/solr/handler/clustering/ClusteringComponent.java create mode 100644 solr/contrib/clustering/src/main/java/org/apache/solr/handler/clustering/ClusteringEngine.java create mode 100644 solr/contrib/clustering/src/main/java/org/apache/solr/handler/clustering/ClusteringParams.java create mode 100644 solr/contrib/clustering/src/main/java/org/apache/solr/handler/clustering/DocumentClusteringEngine.java create mode 100644 solr/contrib/clustering/src/main/java/org/apache/solr/handler/clustering/SearchClusteringEngine.java create mode 100644 solr/contrib/clustering/src/main/java/org/apache/solr/handler/clustering/carrot2/CarrotClusteringEngine.java create mode 100644 solr/contrib/clustering/src/main/java/org/apache/solr/handler/clustering/carrot2/CarrotParams.java create mode 100644 solr/contrib/clustering/src/test/java/org/apache/solr/handler/clustering/AbstractClusteringTest.java create mode 100644 solr/contrib/clustering/src/test/java/org/apache/solr/handler/clustering/ClusteringComponentTest.java create mode 100644 solr/contrib/clustering/src/test/java/org/apache/solr/handler/clustering/MockDocumentClusteringEngine.java create mode 100644 solr/contrib/clustering/src/test/java/org/apache/solr/handler/clustering/carrot2/CarrotClusteringEngineTest.java create mode 100644 solr/contrib/clustering/src/test/java/org/apache/solr/handler/clustering/carrot2/MockClusteringAlgorithm.java create mode 100644 solr/contrib/clustering/src/test/resources/solr/conf/mapping-ISOLatin1Accent.txt create mode 100644 solr/contrib/clustering/src/test/resources/solr/conf/protwords.txt create mode 100644 solr/contrib/clustering/src/test/resources/solr/conf/schema.xml create mode 100644 solr/contrib/clustering/src/test/resources/solr/conf/solrconfig.xml create mode 100644 solr/contrib/clustering/src/test/resources/solr/conf/spellings.txt create mode 100644 solr/contrib/clustering/src/test/resources/solr/conf/stopwords.txt create mode 100644 solr/contrib/clustering/src/test/resources/solr/conf/synonyms.txt create mode 100644 solr/contrib/dataimporthandler/CHANGES.txt create mode 100644 solr/contrib/dataimporthandler/build.xml create mode 100644 solr/contrib/dataimporthandler/lib/activation-1.1.jar create mode 100644 solr/contrib/dataimporthandler/lib/mail-1.4.1.jar create mode 100644 solr/contrib/dataimporthandler/solr-dataimporthandler-extras-pom.xml.template create mode 100644 solr/contrib/dataimporthandler/solr-dataimporthandler-pom.xml.template create mode 100644 solr/contrib/dataimporthandler/src/extras/main/java/org/apache/solr/handler/dataimport/MailEntityProcessor.java create mode 100644 solr/contrib/dataimporthandler/src/extras/main/java/org/apache/solr/handler/dataimport/TikaEntityProcessor.java create mode 100644 solr/contrib/dataimporthandler/src/extras/test/java/org/apache/solr/handler/dataimport/TestMailEntityProcessor.java create mode 100644 solr/contrib/dataimporthandler/src/extras/test/java/org/apache/solr/handler/dataimport/TestTikaEntityProcessor.java create mode 100644 solr/contrib/dataimporthandler/src/extras/test/resources/solr/conf/dataimport-schema-no-unique-key.xml create mode 100644 solr/contrib/dataimporthandler/src/extras/test/resources/solr/conf/dataimport-solrconfig.xml create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/AbstractDataImportHandlerTest.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/BinContentStreamDataSource.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/BinFileDataSource.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/BinURLDataSource.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/CachedSqlEntityProcessor.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/ClobTransformer.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/ContentStreamDataSource.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/Context.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/ContextImpl.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/DataConfig.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/DataImportHandler.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/DataImportHandlerException.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/DataImporter.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/DataSource.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/DateFormatTransformer.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/DebugLogger.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/DocBuilder.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/EntityProcessor.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/EntityProcessorBase.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/EntityProcessorWrapper.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/Evaluator.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/EvaluatorBag.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/EventListener.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/FieldReaderDataSource.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/FieldStreamDataSource.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/FileDataSource.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/FileListEntityProcessor.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/HTMLStripTransformer.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/HttpDataSource.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/JdbcDataSource.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/LineEntityProcessor.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/LogTransformer.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/MockDataSource.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/NumberFormatTransformer.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/PlainTextEntityProcessor.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/RegexTransformer.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/ScriptTransformer.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/SolrWriter.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/SqlEntityProcessor.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/TemplateString.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/TemplateTransformer.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/ThreadedContext.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/ThreadedEntityProcessorWrapper.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/Transformer.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/URLDataSource.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/VariableResolver.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/VariableResolverImpl.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/XPathEntityProcessor.java create mode 100644 solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/XPathRecordReader.java create mode 100644 solr/contrib/dataimporthandler/src/main/webapp/admin/dataimport.jsp create mode 100644 solr/contrib/dataimporthandler/src/main/webapp/admin/debug.jsp create mode 100644 solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/MockInitialContextFactory.java create mode 100644 solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestCachedSqlEntityProcessor.java create mode 100644 solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestClobTransformer.java create mode 100644 solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestContentStreamDataSource.java create mode 100644 solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestDataConfig.java create mode 100644 solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestDateFormatTransformer.java create mode 100644 solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestDocBuilder.java create mode 100644 solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestDocBuilder2.java create mode 100644 solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestEntityProcessorBase.java create mode 100644 solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestErrorHandling.java create mode 100644 solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestEvaluatorBag.java create mode 100644 solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestFieldReader.java create mode 100644 solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestFileListEntityProcessor.java create mode 100644 solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestJdbcDataSource.java create mode 100644 solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestLineEntityProcessor.java create mode 100644 solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestNumberFormatTransformer.java create mode 100644 solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestPlainTextEntityProcessor.java create mode 100644 solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestRegexTransformer.java create mode 100644 solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestScriptTransformer.java create mode 100644 solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestSqlEntityProcessor.java create mode 100644 solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestSqlEntityProcessor2.java create mode 100644 solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestSqlEntityProcessorDelta.java create mode 100644 solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestSqlEntityProcessorDelta2.java create mode 100644 solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestTemplateString.java create mode 100644 solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestTemplateTransformer.java create mode 100644 solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestThreaded.java create mode 100644 solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestURLDataSource.java create mode 100644 solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestVariableResolver.java create mode 100644 solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestXPathEntityProcessor.java create mode 100644 solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestXPathRecordReader.java create mode 100644 solr/contrib/dataimporthandler/src/test/resources/solr/conf/contentstream-solrconfig.xml create mode 100644 solr/contrib/dataimporthandler/src/test/resources/solr/conf/data-config-with-datasource.xml create mode 100644 solr/contrib/dataimporthandler/src/test/resources/solr/conf/data-config-with-transformer.xml create mode 100644 solr/contrib/dataimporthandler/src/test/resources/solr/conf/dataconfig-contentstream.xml create mode 100644 solr/contrib/dataimporthandler/src/test/resources/solr/conf/dataimport-nodatasource-solrconfig.xml create mode 100644 solr/contrib/dataimporthandler/src/test/resources/solr/conf/dataimport-schema.xml create mode 100644 solr/contrib/dataimporthandler/src/test/resources/solr/conf/dataimport-solr_id-schema.xml create mode 100644 solr/contrib/dataimporthandler/src/test/resources/solr/conf/dataimport-solrconfig.xml create mode 100644 solr/contrib/dataimporthandler/src/test/resources/solr/conf/protwords.txt create mode 100644 solr/contrib/dataimporthandler/src/test/resources/solr/conf/single-entity-data-config.xml create mode 100644 solr/contrib/dataimporthandler/src/test/resources/solr/conf/stopwords.txt create mode 100644 solr/contrib/dataimporthandler/src/test/resources/solr/conf/synonyms.txt create mode 100644 solr/contrib/extraction/CHANGES.txt create mode 100644 solr/contrib/extraction/build.xml create mode 100644 solr/contrib/extraction/lib/asm-3.1.jar create mode 100644 solr/contrib/extraction/lib/commons-compress-1.0.jar create mode 100644 solr/contrib/extraction/lib/commons-logging-1.1.1.jar create mode 100644 solr/contrib/extraction/lib/dom4j-1.6.1.jar create mode 100644 solr/contrib/extraction/lib/fontbox-0.8.0-incubator.jar create mode 100644 solr/contrib/extraction/lib/geronimo-stax-api_1.0_spec-1.0.1.jar create mode 100644 solr/contrib/extraction/lib/icu4j-4_2_1.jar create mode 100644 solr/contrib/extraction/lib/jempbox-0.8.0-incubator.jar create mode 100644 solr/contrib/extraction/lib/log4j-1.2.14.jar create mode 100644 solr/contrib/extraction/lib/metadata-extractor-2.4.0-beta-1.jar create mode 100644 solr/contrib/extraction/lib/pdfbox-0.8.0-incubating.jar create mode 100644 solr/contrib/extraction/lib/poi-3.6.jar create mode 100644 solr/contrib/extraction/lib/poi-ooxml-3.6.jar create mode 100644 solr/contrib/extraction/lib/poi-ooxml-schemas-3.6.jar create mode 100644 solr/contrib/extraction/lib/poi-scratchpad-3.6.jar create mode 100644 solr/contrib/extraction/lib/tagsoup-1.2.jar create mode 100644 solr/contrib/extraction/lib/tika-core-0.6.jar create mode 100644 solr/contrib/extraction/lib/tika-parsers-0.6.jar create mode 100644 solr/contrib/extraction/lib/xercesImpl-2.8.1.jar create mode 100644 solr/contrib/extraction/lib/xml-apis-1.0.b2.jar create mode 100644 solr/contrib/extraction/lib/xmlbeans-2.3.0.jar create mode 100644 solr/contrib/extraction/solr-cell-pom.xml.template create mode 100644 solr/contrib/extraction/src/main/java/org/apache/solr/handler/extraction/ExtractingDocumentLoader.java create mode 100644 solr/contrib/extraction/src/main/java/org/apache/solr/handler/extraction/ExtractingMetadataConstants.java create mode 100644 solr/contrib/extraction/src/main/java/org/apache/solr/handler/extraction/ExtractingParams.java create mode 100644 solr/contrib/extraction/src/main/java/org/apache/solr/handler/extraction/ExtractingRequestHandler.java create mode 100644 solr/contrib/extraction/src/main/java/org/apache/solr/handler/extraction/SolrContentHandler.java create mode 100644 solr/contrib/extraction/src/main/java/org/apache/solr/handler/extraction/SolrContentHandlerFactory.java create mode 100644 solr/contrib/extraction/src/test/java/org/apache/solr/handler/ExtractingRequestHandlerTest.java create mode 100644 solr/contrib/extraction/src/test/resources/arabic.pdf create mode 100644 solr/contrib/extraction/src/test/resources/example.html create mode 100644 solr/contrib/extraction/src/test/resources/simple.html create mode 100644 solr/contrib/extraction/src/test/resources/solr-word.pdf create mode 100644 solr/contrib/extraction/src/test/resources/solr/conf/protwords.txt create mode 100644 solr/contrib/extraction/src/test/resources/solr/conf/schema.xml create mode 100644 solr/contrib/extraction/src/test/resources/solr/conf/solrconfig.xml create mode 100644 solr/contrib/extraction/src/test/resources/solr/conf/stopwords.txt create mode 100644 solr/contrib/extraction/src/test/resources/solr/conf/synonyms.txt create mode 100644 solr/contrib/extraction/src/test/resources/version_control.txt create mode 100644 solr/contrib/extraction/src/test/resources/version_control.xml create mode 100644 solr/contrib/velocity/build.xml create mode 100644 solr/contrib/velocity/solr-velocity-pom.xml.template create mode 100644 solr/contrib/velocity/src/main/java/footer.vm create mode 100644 solr/contrib/velocity/src/main/java/org/apache/solr/request/PageTool.java create mode 100644 solr/contrib/velocity/src/main/java/org/apache/solr/request/SolrParamResourceLoader.java create mode 100644 solr/contrib/velocity/src/main/java/org/apache/solr/request/SolrVelocityResourceLoader.java create mode 100644 solr/contrib/velocity/src/main/java/org/apache/solr/request/VelocityResponseWriter.java create mode 100644 solr/contrib/velocity/src/main/solr/conf/admin-extra.html create mode 100644 solr/contrib/velocity/src/main/solr/conf/elevate.xml create mode 100644 solr/contrib/velocity/src/main/solr/conf/protwords.txt create mode 100755 solr/contrib/velocity/src/main/solr/conf/schema.xml create mode 100644 solr/contrib/velocity/src/main/solr/conf/scripts.conf create mode 100755 solr/contrib/velocity/src/main/solr/conf/solrconfig.xml create mode 100644 solr/contrib/velocity/src/main/solr/conf/spellings.txt create mode 100644 solr/contrib/velocity/src/main/solr/conf/stopwords.txt create mode 100644 solr/contrib/velocity/src/main/solr/conf/synonyms.txt create mode 100644 solr/contrib/velocity/src/main/solr/conf/velocity.properties create mode 100644 solr/contrib/velocity/src/main/solr/conf/velocity/VM_global_library.vm create mode 100644 solr/contrib/velocity/src/main/solr/conf/velocity/browse.vm create mode 100644 solr/contrib/velocity/src/main/solr/conf/velocity/debug.vm create mode 100644 solr/contrib/velocity/src/main/solr/conf/velocity/header.vm create mode 100644 solr/contrib/velocity/src/main/solr/conf/velocity/hit.vm create mode 100644 solr/contrib/velocity/src/main/solr/conf/velocity/main.css create mode 100644 solr/contrib/velocity/src/main/solr/conf/xslt/example.xsl create mode 100644 solr/contrib/velocity/src/main/solr/conf/xslt/example_atom.xsl create mode 100644 solr/contrib/velocity/src/main/solr/conf/xslt/example_rss.xsl create mode 100644 solr/contrib/velocity/src/main/solr/conf/xslt/luke.xsl create mode 100644 solr/contrib/velocity/src/main/solr/lib/commons-beanutils-1.7.0.jar create mode 100644 solr/contrib/velocity/src/main/solr/lib/commons-collections-3.2.1.jar create mode 100644 solr/contrib/velocity/src/main/solr/lib/commons-lang-2.4.jar create mode 100644 solr/contrib/velocity/src/main/solr/lib/velocity-1.6.1.jar create mode 100644 solr/contrib/velocity/src/main/solr/lib/velocity-tools-2.0-beta3.jar create mode 100644 solr/contrib/velocity/src/test/java/TODO.txt create mode 100644 solr/example/README.txt create mode 100755 solr/example/etc/jetty.xml create mode 100644 solr/example/etc/webdefault.xml create mode 100644 solr/example/example-DIH/README.txt create mode 100644 solr/example/example-DIH/hsqldb/ex.backup create mode 100644 solr/example/example-DIH/hsqldb/ex.data create mode 100644 solr/example/example-DIH/hsqldb/ex.log create mode 100644 solr/example/example-DIH/hsqldb/ex.properties create mode 100644 solr/example/example-DIH/hsqldb/ex.script create mode 100644 solr/example/example-DIH/solr/db/conf/admin-extra.html create mode 100644 solr/example/example-DIH/solr/db/conf/db-data-config.xml create mode 100644 solr/example/example-DIH/solr/db/conf/elevate.xml create mode 100644 solr/example/example-DIH/solr/db/conf/protwords.txt create mode 100644 solr/example/example-DIH/solr/db/conf/schema.xml create mode 100644 solr/example/example-DIH/solr/db/conf/scripts.conf create mode 100644 solr/example/example-DIH/solr/db/conf/solrconfig.xml create mode 100644 solr/example/example-DIH/solr/db/conf/stopwords.txt create mode 100644 solr/example/example-DIH/solr/db/conf/synonyms.txt create mode 100644 solr/example/example-DIH/solr/db/conf/xslt/example.xsl create mode 100644 solr/example/example-DIH/solr/db/conf/xslt/example_atom.xsl create mode 100644 solr/example/example-DIH/solr/db/conf/xslt/example_rss.xsl create mode 100644 solr/example/example-DIH/solr/db/conf/xslt/luke.xsl create mode 100644 solr/example/example-DIH/solr/db/lib/hsqldb-1.8.0.10.jar create mode 100644 solr/example/example-DIH/solr/mail/conf/data-config.xml create mode 100644 solr/example/example-DIH/solr/mail/conf/protwords.txt create mode 100644 solr/example/example-DIH/solr/mail/conf/schema.xml create mode 100644 solr/example/example-DIH/solr/mail/conf/solrconfig.xml create mode 100644 solr/example/example-DIH/solr/mail/conf/stopwords.txt create mode 100644 solr/example/example-DIH/solr/mail/conf/synonyms.txt create mode 100644 solr/example/example-DIH/solr/rss/conf/admin-extra.html create mode 100644 solr/example/example-DIH/solr/rss/conf/elevate.xml create mode 100644 solr/example/example-DIH/solr/rss/conf/protwords.txt create mode 100644 solr/example/example-DIH/solr/rss/conf/rss-data-config.xml create mode 100644 solr/example/example-DIH/solr/rss/conf/schema.xml create mode 100644 solr/example/example-DIH/solr/rss/conf/scripts.conf create mode 100644 solr/example/example-DIH/solr/rss/conf/solrconfig.xml create mode 100644 solr/example/example-DIH/solr/rss/conf/stopwords.txt create mode 100644 solr/example/example-DIH/solr/rss/conf/synonyms.txt create mode 100644 solr/example/example-DIH/solr/solr.xml create mode 100644 solr/example/example-DIH/solr/tika/conf/schema.xml create mode 100644 solr/example/example-DIH/solr/tika/conf/solrconfig.xml create mode 100644 solr/example/example-DIH/solr/tika/conf/tika-data-config.xml create mode 100644 solr/example/exampledocs/books.csv create mode 100644 solr/example/exampledocs/hd.xml create mode 100644 solr/example/exampledocs/ipod_other.xml create mode 100644 solr/example/exampledocs/ipod_video.xml create mode 100644 solr/example/exampledocs/mem.xml create mode 100644 solr/example/exampledocs/monitor.xml create mode 100644 solr/example/exampledocs/monitor2.xml create mode 100644 solr/example/exampledocs/mp500.xml create mode 100644 solr/example/exampledocs/payload.xml create mode 100755 solr/example/exampledocs/post.sh create mode 100644 solr/example/exampledocs/sd500.xml create mode 100644 solr/example/exampledocs/solr.xml create mode 100755 solr/example/exampledocs/test_utf8.sh create mode 100644 solr/example/exampledocs/utf8-example.xml create mode 100644 solr/example/exampledocs/vidcard.xml create mode 100644 solr/example/lib/jetty-6.1.3.jar create mode 100644 solr/example/lib/jetty-util-6.1.3.jar create mode 100644 solr/example/lib/jsp-2.1/ant-1.6.5.jar create mode 100644 solr/example/lib/jsp-2.1/core-3.1.1.jar create mode 100644 solr/example/lib/jsp-2.1/jsp-2.1.jar create mode 100644 solr/example/lib/jsp-2.1/jsp-api-2.1.jar create mode 100644 solr/example/lib/servlet-api-2.5-6.1.3.jar create mode 100644 solr/example/multicore/README.txt create mode 100644 solr/example/multicore/core0/conf/schema.xml create mode 100644 solr/example/multicore/core0/conf/solrconfig.xml create mode 100644 solr/example/multicore/core1/conf/schema.xml create mode 100644 solr/example/multicore/core1/conf/solrconfig.xml create mode 100644 solr/example/multicore/exampledocs/ipod_other.xml create mode 100644 solr/example/multicore/exampledocs/ipod_video.xml create mode 100644 solr/example/multicore/solr.xml create mode 100644 solr/example/solr/README.txt create mode 100644 solr/example/solr/conf/admin-extra.html create mode 100644 solr/example/solr/conf/elevate.xml create mode 100644 solr/example/solr/conf/mapping-ISOLatin1Accent.txt create mode 100644 solr/example/solr/conf/protwords.txt create mode 100755 solr/example/solr/conf/schema.xml create mode 100644 solr/example/solr/conf/scripts.conf create mode 100755 solr/example/solr/conf/solrconfig.xml create mode 100644 solr/example/solr/conf/spellings.txt create mode 100644 solr/example/solr/conf/stopwords.txt create mode 100644 solr/example/solr/conf/synonyms.txt create mode 100644 solr/example/solr/conf/xslt/example.xsl create mode 100644 solr/example/solr/conf/xslt/example_atom.xsl create mode 100644 solr/example/solr/conf/xslt/example_rss.xsl create mode 100644 solr/example/solr/conf/xslt/luke.xsl create mode 100644 solr/example/solr/solr.xml create mode 100755 solr/example/start.jar create mode 100644 solr/lib/README.committers.txt create mode 100644 solr/lib/commons-codec-1.3.jar create mode 100755 solr/lib/commons-csv-1.0-SNAPSHOT-r609327.jar create mode 100644 solr/lib/commons-fileupload-1.2.1.jar create mode 100644 solr/lib/commons-httpclient-3.1.jar create mode 100644 solr/lib/commons-io-1.4.jar create mode 100644 solr/lib/easymock.jar create mode 100755 solr/lib/geronimo-stax-api_1.0_spec-1.0.1.jar create mode 100644 solr/lib/google-collect-1.0.jar create mode 100644 solr/lib/jcl-over-slf4j-1.5.5.jar create mode 100644 solr/lib/junit-4.3.jar create mode 100644 solr/lib/lucene-analyzers-2.9.2.jar create mode 100644 solr/lib/lucene-collation-2.9.2.jar create mode 100644 solr/lib/lucene-core-2.9.2.jar create mode 100644 solr/lib/lucene-fast-vector-highlighter-2.9.2.jar create mode 100644 solr/lib/lucene-highlighter-2.9.2.jar create mode 100644 solr/lib/lucene-memory-2.9.2.jar create mode 100644 solr/lib/lucene-misc-2.9.2.jar create mode 100644 solr/lib/lucene-queries-2.9.2.jar create mode 100644 solr/lib/lucene-snowball-2.9.2.jar create mode 100644 solr/lib/lucene-spatial-2.9.2.jar create mode 100644 solr/lib/lucene-spellchecker-2.9.2.jar create mode 100755 solr/lib/servlet-api-2.4.jar create mode 100644 solr/lib/slf4j-api-1.5.5.jar create mode 100644 solr/lib/slf4j-jdk14-1.5.5.jar create mode 100644 solr/lib/solr-commons-csv-pom.xml.template create mode 100755 solr/lib/wstx-asl-3.2.7.jar create mode 100755 solr/site/.htaccess create mode 100755 solr/site/broken-links.xml create mode 100644 solr/site/doap.rdf create mode 100755 solr/site/features.html create mode 100755 solr/site/features.pdf create mode 100755 solr/site/images/built-with-forrest-button.png create mode 100755 solr/site/images/favicon.ico create mode 100755 solr/site/images/instruction_arrow.png create mode 100755 solr/site/images/lucene_green_150.gif create mode 100644 solr/site/images/lucidworks_reference_guide.png create mode 100755 solr/site/images/powered_by_solr.ai create mode 100755 solr/site/images/powered_by_solr.eps create mode 100755 solr/site/images/powered_by_solr.png create mode 100755 solr/site/images/powered_by_solr.svg create mode 100644 solr/site/images/solr-book-image.jpg create mode 100644 solr/site/images/solr.jpg create mode 100644 solr/site/images/solr.png create mode 100755 solr/site/index.html create mode 100755 solr/site/index.pdf create mode 100755 solr/site/issue_tracking.html create mode 100755 solr/site/issue_tracking.pdf create mode 100755 solr/site/linkmap.html create mode 100755 solr/site/linkmap.pdf create mode 100755 solr/site/mailing_lists.html create mode 100755 solr/site/mailing_lists.pdf create mode 100755 solr/site/skin/CommonMessages_de.xml create mode 100755 solr/site/skin/CommonMessages_en_US.xml create mode 100755 solr/site/skin/CommonMessages_es.xml create mode 100755 solr/site/skin/CommonMessages_fr.xml create mode 100755 solr/site/skin/basic.css create mode 100755 solr/site/skin/breadcrumbs-optimized.js create mode 100755 solr/site/skin/breadcrumbs.js create mode 100755 solr/site/skin/fontsize.js create mode 100755 solr/site/skin/forrest.css.xslt create mode 100755 solr/site/skin/getBlank.js create mode 100755 solr/site/skin/getMenu.js create mode 100755 solr/site/skin/images/README.txt create mode 100755 solr/site/skin/images/add.jpg create mode 100755 solr/site/skin/images/built-with-forrest-button.png create mode 100755 solr/site/skin/images/chapter.gif create mode 100755 solr/site/skin/images/chapter_open.gif create mode 100755 solr/site/skin/images/corner-imports.svg.xslt create mode 100755 solr/site/skin/images/current.gif create mode 100755 solr/site/skin/images/dc.svg.xslt create mode 100755 solr/site/skin/images/error.png create mode 100755 solr/site/skin/images/external-link.gif create mode 100755 solr/site/skin/images/fix.jpg create mode 100755 solr/site/skin/images/forrest-credit-logo.png create mode 100755 solr/site/skin/images/hack.jpg create mode 100755 solr/site/skin/images/header_white_line.gif create mode 100755 solr/site/skin/images/info.png create mode 100755 solr/site/skin/images/instruction_arrow.png create mode 100755 solr/site/skin/images/label.gif create mode 100755 solr/site/skin/images/page.gif create mode 100755 solr/site/skin/images/pdfdoc.gif create mode 100755 solr/site/skin/images/poddoc.png create mode 100755 solr/site/skin/images/poddoc.svg.xslt create mode 100755 solr/site/skin/images/printer.gif create mode 100755 solr/site/skin/images/rc-b-l-15-1body-2menu-3menu.png create mode 100755 solr/site/skin/images/rc-b-r-15-1body-2menu-3menu.png create mode 100755 solr/site/skin/images/rc-b-r-5-1header-2tab-selected-3tab-selected.png create mode 100755 solr/site/skin/images/rc-t-l-5-1header-2searchbox-3searchbox.png create mode 100755 solr/site/skin/images/rc-t-l-5-1header-2tab-selected-3tab-selected.png create mode 100755 solr/site/skin/images/rc-t-l-5-1header-2tab-unselected-3tab-unselected.png create mode 100755 solr/site/skin/images/rc-t-r-15-1body-2menu-3menu.png create mode 100755 solr/site/skin/images/rc-t-r-5-1header-2searchbox-3searchbox.png create mode 100755 solr/site/skin/images/rc-t-r-5-1header-2tab-selected-3tab-selected.png create mode 100755 solr/site/skin/images/rc-t-r-5-1header-2tab-unselected-3tab-unselected.png create mode 100755 solr/site/skin/images/rc.svg.xslt create mode 100755 solr/site/skin/images/remove.jpg create mode 100755 solr/site/skin/images/rss.png create mode 100755 solr/site/skin/images/spacer.gif create mode 100755 solr/site/skin/images/success.png create mode 100755 solr/site/skin/images/txtdoc.png create mode 100755 solr/site/skin/images/txtdoc.svg.xslt create mode 100755 solr/site/skin/images/update.jpg create mode 100755 solr/site/skin/images/valid-html401.png create mode 100755 solr/site/skin/images/vcss.png create mode 100755 solr/site/skin/images/warning.png create mode 100755 solr/site/skin/images/xmldoc.gif create mode 100755 solr/site/skin/menu.js create mode 100755 solr/site/skin/note.txt create mode 100755 solr/site/skin/print.css create mode 100755 solr/site/skin/profile.css create mode 100755 solr/site/skin/profile.css.xslt create mode 100755 solr/site/skin/prototype.js create mode 100755 solr/site/skin/screen.css create mode 100755 solr/site/skin/skinconf.xsl create mode 100755 solr/site/tutorial.html create mode 100755 solr/site/tutorial.pdf create mode 100755 solr/site/version_control.html create mode 100755 solr/site/version_control.pdf create mode 100755 solr/site/who.html create mode 100755 solr/site/who.pdf create mode 100644 solr/src/common/org/apache/solr/common/ResourceLoader.java create mode 100644 solr/src/common/org/apache/solr/common/SolrDocument.java create mode 100644 solr/src/common/org/apache/solr/common/SolrDocumentList.java create mode 100644 solr/src/common/org/apache/solr/common/SolrException.java create mode 100644 solr/src/common/org/apache/solr/common/SolrInputDocument.java create mode 100644 solr/src/common/org/apache/solr/common/SolrInputField.java create mode 100644 solr/src/common/org/apache/solr/common/luke/FieldFlag.java create mode 100644 solr/src/common/org/apache/solr/common/params/AnalysisParams.java create mode 100644 solr/src/common/org/apache/solr/common/params/AppendedSolrParams.java create mode 100755 solr/src/common/org/apache/solr/common/params/CommonParams.java create mode 100644 solr/src/common/org/apache/solr/common/params/CoreAdminParams.java create mode 100644 solr/src/common/org/apache/solr/common/params/DefaultSolrParams.java create mode 100755 solr/src/common/org/apache/solr/common/params/DisMaxParams.java create mode 100644 solr/src/common/org/apache/solr/common/params/EventParams.java create mode 100644 solr/src/common/org/apache/solr/common/params/FacetParams.java create mode 100644 solr/src/common/org/apache/solr/common/params/HighlightParams.java create mode 100644 solr/src/common/org/apache/solr/common/params/MapSolrParams.java create mode 100644 solr/src/common/org/apache/solr/common/params/ModifiableSolrParams.java create mode 100644 solr/src/common/org/apache/solr/common/params/MoreLikeThisParams.java create mode 100644 solr/src/common/org/apache/solr/common/params/MultiMapSolrParams.java create mode 100755 solr/src/common/org/apache/solr/common/params/RequiredSolrParams.java create mode 100644 solr/src/common/org/apache/solr/common/params/ShardParams.java create mode 100644 solr/src/common/org/apache/solr/common/params/SolrParams.java create mode 100644 solr/src/common/org/apache/solr/common/params/SpellingParams.java create mode 100644 solr/src/common/org/apache/solr/common/params/StatsParams.java create mode 100644 solr/src/common/org/apache/solr/common/params/TermVectorParams.java create mode 100644 solr/src/common/org/apache/solr/common/params/TermsParams.java create mode 100644 solr/src/common/org/apache/solr/common/params/UpdateParams.java create mode 100644 solr/src/common/org/apache/solr/common/util/Base64.java create mode 100644 solr/src/common/org/apache/solr/common/util/ConcurrentLRUCache.java create mode 100755 solr/src/common/org/apache/solr/common/util/ContentStream.java create mode 100755 solr/src/common/org/apache/solr/common/util/ContentStreamBase.java create mode 100644 solr/src/common/org/apache/solr/common/util/DOMUtil.java create mode 100644 solr/src/common/org/apache/solr/common/util/DateUtil.java create mode 100755 solr/src/common/org/apache/solr/common/util/FastInputStream.java create mode 100755 solr/src/common/org/apache/solr/common/util/FastOutputStream.java create mode 100755 solr/src/common/org/apache/solr/common/util/FastWriter.java create mode 100644 solr/src/common/org/apache/solr/common/util/FileUtils.java create mode 100755 solr/src/common/org/apache/solr/common/util/Hash.java create mode 100644 solr/src/common/org/apache/solr/common/util/IteratorChain.java create mode 100755 solr/src/common/org/apache/solr/common/util/JavaBinCodec.java create mode 100644 solr/src/common/org/apache/solr/common/util/NamedList.java create mode 100755 solr/src/common/org/apache/solr/common/util/NamedListCodec.java create mode 100644 solr/src/common/org/apache/solr/common/util/RTimer.java create mode 100644 solr/src/common/org/apache/solr/common/util/RegexFileFilter.java create mode 100755 solr/src/common/org/apache/solr/common/util/SimpleOrderedMap.java create mode 100644 solr/src/common/org/apache/solr/common/util/StrUtils.java create mode 100644 solr/src/common/org/apache/solr/common/util/XML.java create mode 100755 solr/src/dev-tools/stub-analysis-factory-maker.pl create mode 100644 solr/src/java/org/apache/solr/analysis/ASCIIFoldingFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/ArabicLetterTokenizerFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/ArabicNormalizationFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/ArabicStemFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/BaseCharFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/BaseTokenFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/BaseTokenizerFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/BrazilianStemFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/BufferedTokenStream.java create mode 100644 solr/src/java/org/apache/solr/analysis/CJKTokenizerFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/CapitalizationFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/CharFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/ChineseFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/ChineseTokenizerFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/CollationKeyFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/CommonGramsFilter.java create mode 100644 solr/src/java/org/apache/solr/analysis/CommonGramsFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/CommonGramsQueryFilter.java create mode 100644 solr/src/java/org/apache/solr/analysis/CommonGramsQueryFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/DelimitedPayloadTokenFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/DictionaryCompoundWordTokenFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/DoubleMetaphoneFilter.java create mode 100644 solr/src/java/org/apache/solr/analysis/DoubleMetaphoneFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/DutchStemFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/EdgeNGramFilterFactory.java create mode 100755 solr/src/java/org/apache/solr/analysis/EdgeNGramTokenizerFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/ElisionFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/EnglishPorterFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/FrenchStemFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/GermanStemFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/GreekLowerCaseFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/HTMLStripCharFilter.java create mode 100644 solr/src/java/org/apache/solr/analysis/HTMLStripCharFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/HTMLStripReader.java create mode 100644 solr/src/java/org/apache/solr/analysis/HTMLStripStandardTokenizerFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/HTMLStripWhitespaceTokenizerFactory.java create mode 100755 solr/src/java/org/apache/solr/analysis/HyphenatedWordsFilter.java create mode 100755 solr/src/java/org/apache/solr/analysis/HyphenatedWordsFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/ISOLatin1AccentFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/KeepWordFilter.java create mode 100644 solr/src/java/org/apache/solr/analysis/KeepWordFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/KeywordTokenizerFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/LengthFilter.java create mode 100644 solr/src/java/org/apache/solr/analysis/LengthFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/LetterTokenizerFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/LowerCaseFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/LowerCaseTokenizerFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/MappingCharFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/NGramFilterFactory.java create mode 100755 solr/src/java/org/apache/solr/analysis/NGramTokenizerFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/NumericPayloadTokenFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/PatternReplaceCharFilter.java create mode 100644 solr/src/java/org/apache/solr/analysis/PatternReplaceCharFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/PatternReplaceFilter.java create mode 100644 solr/src/java/org/apache/solr/analysis/PatternReplaceFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/PatternTokenizer.java create mode 100644 solr/src/java/org/apache/solr/analysis/PatternTokenizerFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/PersianNormalizationFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/PhoneticFilter.java create mode 100644 solr/src/java/org/apache/solr/analysis/PhoneticFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/PorterStemFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/PositionFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/RemoveDuplicatesTokenFilter.java create mode 100644 solr/src/java/org/apache/solr/analysis/RemoveDuplicatesTokenFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/ReverseStringFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/ReversedWildcardFilter.java create mode 100644 solr/src/java/org/apache/solr/analysis/ReversedWildcardFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/RussianCommon.java create mode 100644 solr/src/java/org/apache/solr/analysis/RussianLetterTokenizerFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/RussianLowerCaseFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/RussianStemFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/ShingleFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/SnowballPorterFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/SolrAnalyzer.java create mode 100644 solr/src/java/org/apache/solr/analysis/StandardFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/StandardTokenizerFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/StopFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/SynonymFilter.java create mode 100644 solr/src/java/org/apache/solr/analysis/SynonymFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/SynonymMap.java create mode 100644 solr/src/java/org/apache/solr/analysis/ThaiWordFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/TokenFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/TokenOffsetPayloadTokenFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/TokenizerChain.java create mode 100644 solr/src/java/org/apache/solr/analysis/TokenizerFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/TrieTokenizerFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/TrimFilter.java create mode 100644 solr/src/java/org/apache/solr/analysis/TrimFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/TypeAsPayloadTokenFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/WhitespaceTokenizerFactory.java create mode 100644 solr/src/java/org/apache/solr/analysis/WordDelimiterFilter.java create mode 100644 solr/src/java/org/apache/solr/analysis/WordDelimiterFilterFactory.java create mode 100644 solr/src/java/org/apache/solr/core/AbstractSolrEventListener.java create mode 100644 solr/src/java/org/apache/solr/core/CloseHook.java create mode 100644 solr/src/java/org/apache/solr/core/Config.java create mode 100644 solr/src/java/org/apache/solr/core/CoreContainer.java create mode 100644 solr/src/java/org/apache/solr/core/CoreDescriptor.java create mode 100644 solr/src/java/org/apache/solr/core/DirectoryFactory.java create mode 100644 solr/src/java/org/apache/solr/core/IndexDeletionPolicyWrapper.java create mode 100644 solr/src/java/org/apache/solr/core/IndexReaderFactory.java create mode 100644 solr/src/java/org/apache/solr/core/JmxMonitoredMap.java create mode 100644 solr/src/java/org/apache/solr/core/PluginInfo.java create mode 100644 solr/src/java/org/apache/solr/core/QuerySenderListener.java create mode 100644 solr/src/java/org/apache/solr/core/RequestHandlers.java create mode 100644 solr/src/java/org/apache/solr/core/RunExecutableListener.java create mode 100644 solr/src/java/org/apache/solr/core/SolrConfig.java create mode 100644 solr/src/java/org/apache/solr/core/SolrCore.java create mode 100644 solr/src/java/org/apache/solr/core/SolrDeletionPolicy.java create mode 100644 solr/src/java/org/apache/solr/core/SolrEventListener.java create mode 100644 solr/src/java/org/apache/solr/core/SolrException.java create mode 100644 solr/src/java/org/apache/solr/core/SolrInfoMBean.java create mode 100644 solr/src/java/org/apache/solr/core/SolrInfoRegistry.java create mode 100644 solr/src/java/org/apache/solr/core/SolrResourceLoader.java create mode 100644 solr/src/java/org/apache/solr/core/StandardDirectoryFactory.java create mode 100644 solr/src/java/org/apache/solr/core/StandardIndexReaderFactory.java create mode 100644 solr/src/java/org/apache/solr/handler/AnalysisRequestHandler.java create mode 100644 solr/src/java/org/apache/solr/handler/AnalysisRequestHandlerBase.java create mode 100644 solr/src/java/org/apache/solr/handler/BinaryUpdateRequestHandler.java create mode 100755 solr/src/java/org/apache/solr/handler/CSVRequestHandler.java create mode 100644 solr/src/java/org/apache/solr/handler/ContentStreamHandlerBase.java create mode 100644 solr/src/java/org/apache/solr/handler/ContentStreamLoader.java create mode 100644 solr/src/java/org/apache/solr/handler/DisMaxRequestHandler.java create mode 100644 solr/src/java/org/apache/solr/handler/DocumentAnalysisRequestHandler.java create mode 100644 solr/src/java/org/apache/solr/handler/DumpRequestHandler.java create mode 100644 solr/src/java/org/apache/solr/handler/FieldAnalysisRequestHandler.java create mode 100644 solr/src/java/org/apache/solr/handler/MoreLikeThisHandler.java create mode 100644 solr/src/java/org/apache/solr/handler/PingRequestHandler.java create mode 100644 solr/src/java/org/apache/solr/handler/ReplicationHandler.java create mode 100644 solr/src/java/org/apache/solr/handler/RequestHandlerBase.java create mode 100755 solr/src/java/org/apache/solr/handler/RequestHandlerUtils.java create mode 100644 solr/src/java/org/apache/solr/handler/SnapPuller.java create mode 100644 solr/src/java/org/apache/solr/handler/SnapShooter.java create mode 100644 solr/src/java/org/apache/solr/handler/SpellCheckerRequestHandler.java create mode 100644 solr/src/java/org/apache/solr/handler/StandardRequestHandler.java create mode 100644 solr/src/java/org/apache/solr/handler/SystemInfoRequestHandler.java create mode 100644 solr/src/java/org/apache/solr/handler/XMLLoader.java create mode 100644 solr/src/java/org/apache/solr/handler/XmlUpdateRequestHandler.java create mode 100644 solr/src/java/org/apache/solr/handler/admin/AdminHandlers.java create mode 100644 solr/src/java/org/apache/solr/handler/admin/CoreAdminHandler.java create mode 100644 solr/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java create mode 100644 solr/src/java/org/apache/solr/handler/admin/PluginInfoHandler.java create mode 100644 solr/src/java/org/apache/solr/handler/admin/PropertiesRequestHandler.java create mode 100644 solr/src/java/org/apache/solr/handler/admin/ShowFileRequestHandler.java create mode 100644 solr/src/java/org/apache/solr/handler/admin/SystemInfoHandler.java create mode 100644 solr/src/java/org/apache/solr/handler/admin/ThreadDumpHandler.java create mode 100644 solr/src/java/org/apache/solr/handler/component/DebugComponent.java create mode 100644 solr/src/java/org/apache/solr/handler/component/FacetComponent.java create mode 100644 solr/src/java/org/apache/solr/handler/component/FieldFacetStats.java create mode 100644 solr/src/java/org/apache/solr/handler/component/HighlightComponent.java create mode 100644 solr/src/java/org/apache/solr/handler/component/MoreLikeThisComponent.java create mode 100644 solr/src/java/org/apache/solr/handler/component/QueryComponent.java create mode 100644 solr/src/java/org/apache/solr/handler/component/QueryElevationComponent.java create mode 100644 solr/src/java/org/apache/solr/handler/component/ResponseBuilder.java create mode 100644 solr/src/java/org/apache/solr/handler/component/SearchComponent.java create mode 100644 solr/src/java/org/apache/solr/handler/component/SearchHandler.java create mode 100755 solr/src/java/org/apache/solr/handler/component/ShardDoc.java create mode 100755 solr/src/java/org/apache/solr/handler/component/ShardRequest.java create mode 100755 solr/src/java/org/apache/solr/handler/component/ShardResponse.java create mode 100644 solr/src/java/org/apache/solr/handler/component/SpellCheckComponent.java create mode 100644 solr/src/java/org/apache/solr/handler/component/StatsComponent.java create mode 100644 solr/src/java/org/apache/solr/handler/component/StatsValues.java create mode 100644 solr/src/java/org/apache/solr/handler/component/TermVectorComponent.java create mode 100644 solr/src/java/org/apache/solr/handler/component/TermsComponent.java create mode 100644 solr/src/java/org/apache/solr/highlight/DefaultSolrHighlighter.java create mode 100644 solr/src/java/org/apache/solr/highlight/GapFragmenter.java create mode 100644 solr/src/java/org/apache/solr/highlight/HighlightingPluginBase.java create mode 100644 solr/src/java/org/apache/solr/highlight/HtmlFormatter.java create mode 100644 solr/src/java/org/apache/solr/highlight/MultiColoredScoreOrderFragmentsBuilder.java create mode 100644 solr/src/java/org/apache/solr/highlight/MultiColoredSimpleFragmentsBuilder.java create mode 100644 solr/src/java/org/apache/solr/highlight/RegexFragmenter.java create mode 100644 solr/src/java/org/apache/solr/highlight/ScoreOrderFragmentsBuilder.java create mode 100644 solr/src/java/org/apache/solr/highlight/SimpleFragListBuilder.java create mode 100644 solr/src/java/org/apache/solr/highlight/SimpleFragmentsBuilder.java create mode 100644 solr/src/java/org/apache/solr/highlight/SolrFormatter.java create mode 100644 solr/src/java/org/apache/solr/highlight/SolrFragListBuilder.java create mode 100644 solr/src/java/org/apache/solr/highlight/SolrFragmenter.java create mode 100644 solr/src/java/org/apache/solr/highlight/SolrFragmentsBuilder.java create mode 100644 solr/src/java/org/apache/solr/highlight/SolrHighlighter.java create mode 100644 solr/src/java/org/apache/solr/request/AppendedSolrParams.java create mode 100644 solr/src/java/org/apache/solr/request/BinaryQueryResponseWriter.java create mode 100644 solr/src/java/org/apache/solr/request/BinaryResponseWriter.java create mode 100644 solr/src/java/org/apache/solr/request/DefaultSolrParams.java create mode 100644 solr/src/java/org/apache/solr/request/DisMaxRequestHandler.java create mode 100644 solr/src/java/org/apache/solr/request/JSONResponseWriter.java create mode 100644 solr/src/java/org/apache/solr/request/LocalSolrQueryRequest.java create mode 100644 solr/src/java/org/apache/solr/request/MapSolrParams.java create mode 100644 solr/src/java/org/apache/solr/request/MultiMapSolrParams.java create mode 100644 solr/src/java/org/apache/solr/request/PHPResponseWriter.java create mode 100644 solr/src/java/org/apache/solr/request/PHPSerializedResponseWriter.java create mode 100644 solr/src/java/org/apache/solr/request/PythonResponseWriter.java create mode 100644 solr/src/java/org/apache/solr/request/QueryResponseWriter.java create mode 100644 solr/src/java/org/apache/solr/request/RawResponseWriter.java create mode 100644 solr/src/java/org/apache/solr/request/RequiredSolrParams.java create mode 100644 solr/src/java/org/apache/solr/request/RubyResponseWriter.java create mode 100644 solr/src/java/org/apache/solr/request/ServletSolrParams.java create mode 100644 solr/src/java/org/apache/solr/request/SimpleFacets.java create mode 100644 solr/src/java/org/apache/solr/request/SolrParams.java create mode 100644 solr/src/java/org/apache/solr/request/SolrQueryRequest.java create mode 100644 solr/src/java/org/apache/solr/request/SolrQueryRequestBase.java create mode 100644 solr/src/java/org/apache/solr/request/SolrQueryResponse.java create mode 100644 solr/src/java/org/apache/solr/request/SolrRequestHandler.java create mode 100644 solr/src/java/org/apache/solr/request/StandardRequestHandler.java create mode 100644 solr/src/java/org/apache/solr/request/TextResponseWriter.java create mode 100755 solr/src/java/org/apache/solr/request/UnInvertedField.java create mode 100644 solr/src/java/org/apache/solr/request/XMLResponseWriter.java create mode 100644 solr/src/java/org/apache/solr/request/XSLTResponseWriter.java create mode 100644 solr/src/java/org/apache/solr/response/BaseResponseWriter.java create mode 100755 solr/src/java/org/apache/solr/response/BinaryQueryResponseWriter.java create mode 100755 solr/src/java/org/apache/solr/response/BinaryResponseWriter.java create mode 100644 solr/src/java/org/apache/solr/response/GenericBinaryResponseWriter.java create mode 100644 solr/src/java/org/apache/solr/response/GenericTextResponseWriter.java create mode 100644 solr/src/java/org/apache/solr/response/JSONResponseWriter.java create mode 100755 solr/src/java/org/apache/solr/response/PHPResponseWriter.java create mode 100755 solr/src/java/org/apache/solr/response/PHPSerializedResponseWriter.java create mode 100644 solr/src/java/org/apache/solr/response/PythonResponseWriter.java create mode 100644 solr/src/java/org/apache/solr/response/QueryResponseWriter.java create mode 100644 solr/src/java/org/apache/solr/response/RawResponseWriter.java create mode 100644 solr/src/java/org/apache/solr/response/RubyResponseWriter.java create mode 100644 solr/src/java/org/apache/solr/response/SolrQueryResponse.java create mode 100644 solr/src/java/org/apache/solr/response/TextResponseWriter.java create mode 100644 solr/src/java/org/apache/solr/response/XMLResponseWriter.java create mode 100644 solr/src/java/org/apache/solr/response/XMLWriter.java create mode 100644 solr/src/java/org/apache/solr/response/XSLTResponseWriter.java create mode 100644 solr/src/java/org/apache/solr/schema/AbstractSubTypeFieldType.java create mode 100644 solr/src/java/org/apache/solr/schema/BCDIntField.java create mode 100644 solr/src/java/org/apache/solr/schema/BCDLongField.java create mode 100644 solr/src/java/org/apache/solr/schema/BCDStrField.java create mode 100644 solr/src/java/org/apache/solr/schema/BinaryField.java create mode 100644 solr/src/java/org/apache/solr/schema/BoolField.java create mode 100644 solr/src/java/org/apache/solr/schema/ByteField.java create mode 100644 solr/src/java/org/apache/solr/schema/CompressableField.java create mode 100644 solr/src/java/org/apache/solr/schema/CoordinateFieldType.java create mode 100644 solr/src/java/org/apache/solr/schema/CopyField.java create mode 100644 solr/src/java/org/apache/solr/schema/DateField.java create mode 100644 solr/src/java/org/apache/solr/schema/DoubleField.java create mode 100755 solr/src/java/org/apache/solr/schema/ExternalFileField.java create mode 100644 solr/src/java/org/apache/solr/schema/FieldProperties.java create mode 100644 solr/src/java/org/apache/solr/schema/FieldType.java create mode 100644 solr/src/java/org/apache/solr/schema/FloatField.java create mode 100644 solr/src/java/org/apache/solr/schema/GeoHashField.java create mode 100644 solr/src/java/org/apache/solr/schema/IndexSchema.java create mode 100644 solr/src/java/org/apache/solr/schema/IntField.java create mode 100644 solr/src/java/org/apache/solr/schema/LegacyDateField.java create mode 100644 solr/src/java/org/apache/solr/schema/LongField.java create mode 100644 solr/src/java/org/apache/solr/schema/PointType.java create mode 100644 solr/src/java/org/apache/solr/schema/RandomSortField.java create mode 100644 solr/src/java/org/apache/solr/schema/SchemaAware.java create mode 100644 solr/src/java/org/apache/solr/schema/SchemaField.java create mode 100644 solr/src/java/org/apache/solr/schema/ShortField.java create mode 100644 solr/src/java/org/apache/solr/schema/SimilarityFactory.java create mode 100644 solr/src/java/org/apache/solr/schema/SortableDoubleField.java create mode 100644 solr/src/java/org/apache/solr/schema/SortableFloatField.java create mode 100644 solr/src/java/org/apache/solr/schema/SortableIntField.java create mode 100644 solr/src/java/org/apache/solr/schema/SortableLongField.java create mode 100644 solr/src/java/org/apache/solr/schema/SpatialTileField.java create mode 100644 solr/src/java/org/apache/solr/schema/StrField.java create mode 100644 solr/src/java/org/apache/solr/schema/TextField.java create mode 100755 solr/src/java/org/apache/solr/schema/TrieDateField.java create mode 100755 solr/src/java/org/apache/solr/schema/TrieDoubleField.java create mode 100644 solr/src/java/org/apache/solr/schema/TrieField.java create mode 100755 solr/src/java/org/apache/solr/schema/TrieFloatField.java create mode 100755 solr/src/java/org/apache/solr/schema/TrieIntField.java create mode 100755 solr/src/java/org/apache/solr/schema/TrieLongField.java create mode 100644 solr/src/java/org/apache/solr/schema/UUIDField.java create mode 100644 solr/src/java/org/apache/solr/search/BitDocSet.java create mode 100755 solr/src/java/org/apache/solr/search/BoostQParserPlugin.java create mode 100644 solr/src/java/org/apache/solr/search/CacheConfig.java create mode 100644 solr/src/java/org/apache/solr/search/CacheRegenerator.java create mode 100644 solr/src/java/org/apache/solr/search/ConstantScorePrefixQuery.java create mode 100644 solr/src/java/org/apache/solr/search/DisMaxQParser.java create mode 100755 solr/src/java/org/apache/solr/search/DisMaxQParserPlugin.java create mode 100644 solr/src/java/org/apache/solr/search/DocIterator.java create mode 100644 solr/src/java/org/apache/solr/search/DocList.java create mode 100644 solr/src/java/org/apache/solr/search/DocListAndSet.java create mode 100644 solr/src/java/org/apache/solr/search/DocSet.java create mode 100644 solr/src/java/org/apache/solr/search/DocSetHitCollector.java create mode 100644 solr/src/java/org/apache/solr/search/DocSlice.java create mode 100755 solr/src/java/org/apache/solr/search/ExtendedDismaxQParserPlugin.java create mode 100644 solr/src/java/org/apache/solr/search/FastLRUCache.java create mode 100644 solr/src/java/org/apache/solr/search/FieldQParserPlugin.java create mode 100755 solr/src/java/org/apache/solr/search/FunctionQParser.java create mode 100644 solr/src/java/org/apache/solr/search/FunctionQParserPlugin.java create mode 100755 solr/src/java/org/apache/solr/search/FunctionRangeQParserPlugin.java create mode 100644 solr/src/java/org/apache/solr/search/HashDocSet.java create mode 100644 solr/src/java/org/apache/solr/search/LRUCache.java create mode 100755 solr/src/java/org/apache/solr/search/LuceneQParserPlugin.java create mode 100644 solr/src/java/org/apache/solr/search/LuceneQueryOptimizer.java create mode 100644 solr/src/java/org/apache/solr/search/MissingStringLastComparatorSource.java create mode 100755 solr/src/java/org/apache/solr/search/NestedQParserPlugin.java create mode 100755 solr/src/java/org/apache/solr/search/OldLuceneQParserPlugin.java create mode 100644 solr/src/java/org/apache/solr/search/PrefixFilter.java create mode 100755 solr/src/java/org/apache/solr/search/PrefixQParserPlugin.java create mode 100755 solr/src/java/org/apache/solr/search/QParser.java create mode 100755 solr/src/java/org/apache/solr/search/QParserPlugin.java create mode 100644 solr/src/java/org/apache/solr/search/QueryParsing.java create mode 100644 solr/src/java/org/apache/solr/search/QueryResultKey.java create mode 100755 solr/src/java/org/apache/solr/search/QueryUtils.java create mode 100644 solr/src/java/org/apache/solr/search/RawQParserPlugin.java create mode 100644 solr/src/java/org/apache/solr/search/SolrCache.java create mode 100755 solr/src/java/org/apache/solr/search/SolrConstantScoreQuery.java create mode 100644 solr/src/java/org/apache/solr/search/SolrFieldCacheMBean.java create mode 100644 solr/src/java/org/apache/solr/search/SolrFilter.java create mode 100755 solr/src/java/org/apache/solr/search/SolrIndexReader.java create mode 100644 solr/src/java/org/apache/solr/search/SolrIndexSearcher.java create mode 100644 solr/src/java/org/apache/solr/search/SolrQueryParser.java create mode 100644 solr/src/java/org/apache/solr/search/SolrSimilarity.java create mode 100644 solr/src/java/org/apache/solr/search/SortSpec.java create mode 100755 solr/src/java/org/apache/solr/search/SortedIntDocSet.java create mode 100644 solr/src/java/org/apache/solr/search/Sorting.java create mode 100755 solr/src/java/org/apache/solr/search/ValueSourceParser.java create mode 100755 solr/src/java/org/apache/solr/search/WildcardFilter.java create mode 100755 solr/src/java/org/apache/solr/search/function/BoostedQuery.java create mode 100644 solr/src/java/org/apache/solr/search/function/ByteFieldSource.java create mode 100755 solr/src/java/org/apache/solr/search/function/ConstValueSource.java create mode 100755 solr/src/java/org/apache/solr/search/function/DivFloatFunction.java create mode 100644 solr/src/java/org/apache/solr/search/function/DocValues.java create mode 100644 solr/src/java/org/apache/solr/search/function/DoubleFieldSource.java create mode 100755 solr/src/java/org/apache/solr/search/function/DualFloatFunction.java create mode 100644 solr/src/java/org/apache/solr/search/function/FieldCacheSource.java create mode 100755 solr/src/java/org/apache/solr/search/function/FileFloatSource.java create mode 100644 solr/src/java/org/apache/solr/search/function/FloatFieldSource.java create mode 100644 solr/src/java/org/apache/solr/search/function/FunctionQuery.java create mode 100644 solr/src/java/org/apache/solr/search/function/IntFieldSource.java create mode 100644 solr/src/java/org/apache/solr/search/function/LinearFloatFunction.java create mode 100644 solr/src/java/org/apache/solr/search/function/LiteralValueSource.java create mode 100644 solr/src/java/org/apache/solr/search/function/LongFieldSource.java create mode 100644 solr/src/java/org/apache/solr/search/function/MaxFloatFunction.java create mode 100644 solr/src/java/org/apache/solr/search/function/MultiFloatFunction.java create mode 100644 solr/src/java/org/apache/solr/search/function/MultiValueSource.java create mode 100644 solr/src/java/org/apache/solr/search/function/OrdFieldSource.java create mode 100755 solr/src/java/org/apache/solr/search/function/PowFloatFunction.java create mode 100755 solr/src/java/org/apache/solr/search/function/ProductFloatFunction.java create mode 100755 solr/src/java/org/apache/solr/search/function/QueryValueSource.java create mode 100755 solr/src/java/org/apache/solr/search/function/RangeMapFloatFunction.java create mode 100644 solr/src/java/org/apache/solr/search/function/ReciprocalFloatFunction.java create mode 100644 solr/src/java/org/apache/solr/search/function/ReverseOrdFieldSource.java create mode 100755 solr/src/java/org/apache/solr/search/function/ScaleFloatFunction.java create mode 100644 solr/src/java/org/apache/solr/search/function/ShortFieldSource.java create mode 100755 solr/src/java/org/apache/solr/search/function/SimpleFloatFunction.java create mode 100755 solr/src/java/org/apache/solr/search/function/SingleFunction.java create mode 100755 solr/src/java/org/apache/solr/search/function/StringIndexDocValues.java create mode 100755 solr/src/java/org/apache/solr/search/function/SumFloatFunction.java create mode 100755 solr/src/java/org/apache/solr/search/function/TopValueSource.java create mode 100644 solr/src/java/org/apache/solr/search/function/ValueSource.java create mode 100755 solr/src/java/org/apache/solr/search/function/ValueSourceRangeFilter.java create mode 100644 solr/src/java/org/apache/solr/search/function/VectorValueSource.java create mode 100644 solr/src/java/org/apache/solr/search/function/distance/Constants.java create mode 100644 solr/src/java/org/apache/solr/search/function/distance/DistanceUtils.java create mode 100644 solr/src/java/org/apache/solr/search/function/distance/GeohashFunction.java create mode 100644 solr/src/java/org/apache/solr/search/function/distance/GeohashHaversineFunction.java create mode 100644 solr/src/java/org/apache/solr/search/function/distance/HaversineFunction.java create mode 100644 solr/src/java/org/apache/solr/search/function/distance/SquaredEuclideanFunction.java create mode 100644 solr/src/java/org/apache/solr/search/function/distance/StringDistanceFunction.java create mode 100644 solr/src/java/org/apache/solr/search/function/distance/VectorDistanceFunction.java create mode 100644 solr/src/java/org/apache/solr/spelling/AbstractLuceneSpellChecker.java create mode 100644 solr/src/java/org/apache/solr/spelling/FileBasedSpellChecker.java create mode 100644 solr/src/java/org/apache/solr/spelling/IndexBasedSpellChecker.java create mode 100644 solr/src/java/org/apache/solr/spelling/QueryConverter.java create mode 100644 solr/src/java/org/apache/solr/spelling/SolrSpellChecker.java create mode 100644 solr/src/java/org/apache/solr/spelling/SpellingQueryConverter.java create mode 100644 solr/src/java/org/apache/solr/spelling/SpellingResult.java create mode 100644 solr/src/java/org/apache/solr/tst/OldRequestHandler.java create mode 100644 solr/src/java/org/apache/solr/tst/TestRequestHandler.java create mode 100644 solr/src/java/org/apache/solr/update/AddUpdateCommand.java create mode 100644 solr/src/java/org/apache/solr/update/CommitUpdateCommand.java create mode 100644 solr/src/java/org/apache/solr/update/DeleteUpdateCommand.java create mode 100644 solr/src/java/org/apache/solr/update/DirectUpdateHandler.java create mode 100644 solr/src/java/org/apache/solr/update/DirectUpdateHandler2.java create mode 100644 solr/src/java/org/apache/solr/update/DocumentBuilder.java create mode 100644 solr/src/java/org/apache/solr/update/MergeIndexesCommand.java create mode 100644 solr/src/java/org/apache/solr/update/RollbackUpdateCommand.java create mode 100644 solr/src/java/org/apache/solr/update/SolrIndexConfig.java create mode 100644 solr/src/java/org/apache/solr/update/SolrIndexWriter.java create mode 100644 solr/src/java/org/apache/solr/update/UpdateCommand.java create mode 100644 solr/src/java/org/apache/solr/update/UpdateHandler.java create mode 100644 solr/src/java/org/apache/solr/update/processor/LogUpdateProcessorFactory.java create mode 100755 solr/src/java/org/apache/solr/update/processor/Lookup3Signature.java create mode 100755 solr/src/java/org/apache/solr/update/processor/MD5Signature.java create mode 100644 solr/src/java/org/apache/solr/update/processor/RunUpdateProcessorFactory.java create mode 100755 solr/src/java/org/apache/solr/update/processor/Signature.java create mode 100755 solr/src/java/org/apache/solr/update/processor/SignatureUpdateProcessorFactory.java create mode 100755 solr/src/java/org/apache/solr/update/processor/TextProfileSignature.java create mode 100644 solr/src/java/org/apache/solr/update/processor/UpdateRequestProcessor.java create mode 100644 solr/src/java/org/apache/solr/update/processor/UpdateRequestProcessorChain.java create mode 100644 solr/src/java/org/apache/solr/update/processor/UpdateRequestProcessorFactory.java create mode 100644 solr/src/java/org/apache/solr/util/AbstractSolrTestCase.java create mode 100644 solr/src/java/org/apache/solr/util/ArraysUtils.java create mode 100644 solr/src/java/org/apache/solr/util/BCDUtils.java create mode 100644 solr/src/java/org/apache/solr/util/BitSetIterator.java create mode 100644 solr/src/java/org/apache/solr/util/BitUtil.java create mode 100644 solr/src/java/org/apache/solr/util/BoundedTreeSet.java create mode 100755 solr/src/java/org/apache/solr/util/CharArrayMap.java create mode 100644 solr/src/java/org/apache/solr/util/CommonParams.java create mode 100644 solr/src/java/org/apache/solr/util/ContentStream.java create mode 100644 solr/src/java/org/apache/solr/util/ContentStreamBase.java create mode 100644 solr/src/java/org/apache/solr/util/DOMUtil.java create mode 100644 solr/src/java/org/apache/solr/util/DateMathParser.java create mode 100644 solr/src/java/org/apache/solr/util/DisMaxParams.java create mode 100644 solr/src/java/org/apache/solr/util/HighFrequencyDictionary.java create mode 100644 solr/src/java/org/apache/solr/util/HighlightingUtils.java create mode 100644 solr/src/java/org/apache/solr/util/IteratorChain.java create mode 100644 solr/src/java/org/apache/solr/util/NamedList.java create mode 100644 solr/src/java/org/apache/solr/util/NumberUtils.java create mode 100644 solr/src/java/org/apache/solr/util/OpenBitSet.java create mode 100644 solr/src/java/org/apache/solr/util/RefCounted.java create mode 100644 solr/src/java/org/apache/solr/util/SimpleOrderedMap.java create mode 100644 solr/src/java/org/apache/solr/util/SimplePostTool.java create mode 100644 solr/src/java/org/apache/solr/util/SolrPluginUtils.java create mode 100644 solr/src/java/org/apache/solr/util/StrUtils.java create mode 100644 solr/src/java/org/apache/solr/util/SuggestMissingFactories.java create mode 100644 solr/src/java/org/apache/solr/util/TestHarness.java create mode 100644 solr/src/java/org/apache/solr/util/UpdateParams.java create mode 100644 solr/src/java/org/apache/solr/util/VersionedFile.java create mode 100644 solr/src/java/org/apache/solr/util/XML.java create mode 100644 solr/src/java/org/apache/solr/util/doc-files/min-should-match.html create mode 100644 solr/src/java/org/apache/solr/util/plugin/AbstractPluginLoader.java create mode 100644 solr/src/java/org/apache/solr/util/plugin/MapInitializedPlugin.java create mode 100644 solr/src/java/org/apache/solr/util/plugin/MapPluginLoader.java create mode 100644 solr/src/java/org/apache/solr/util/plugin/NamedListInitializedPlugin.java create mode 100644 solr/src/java/org/apache/solr/util/plugin/NamedListPluginLoader.java create mode 100644 solr/src/java/org/apache/solr/util/plugin/PluginInfoInitialized.java create mode 100644 solr/src/java/org/apache/solr/util/plugin/ResourceLoaderAware.java create mode 100644 solr/src/java/org/apache/solr/util/plugin/SolrCoreAware.java create mode 100644 solr/src/java/org/apache/solr/util/xslt/TransformerProvider.java create mode 100644 solr/src/maven/solr-core-pom.xml.template create mode 100644 solr/src/maven/solr-parent-pom.xml.template create mode 100644 solr/src/maven/solr-solrj-pom.xml.template create mode 100755 solr/src/scripts/abc create mode 100755 solr/src/scripts/abo create mode 100755 solr/src/scripts/backup create mode 100755 solr/src/scripts/backupcleaner create mode 100755 solr/src/scripts/commit create mode 100755 solr/src/scripts/optimize create mode 100755 solr/src/scripts/readercycle create mode 100755 solr/src/scripts/rsyncd-disable create mode 100755 solr/src/scripts/rsyncd-enable create mode 100755 solr/src/scripts/rsyncd-start create mode 100755 solr/src/scripts/rsyncd-stop create mode 100755 solr/src/scripts/scripts-util create mode 100755 solr/src/scripts/snapcleaner create mode 100755 solr/src/scripts/snapinstaller create mode 100755 solr/src/scripts/snappuller create mode 100755 solr/src/scripts/snappuller-disable create mode 100755 solr/src/scripts/snappuller-enable create mode 100755 solr/src/scripts/snapshooter create mode 100644 solr/src/site/README.txt create mode 100755 solr/src/site/forrest.properties create mode 100755 solr/src/site/src/documentation/classes/CatalogManager.properties create mode 100644 solr/src/site/src/documentation/content/.htaccess create mode 100755 solr/src/site/src/documentation/content/xdocs/features.xml create mode 100755 solr/src/site/src/documentation/content/xdocs/images/favicon.ico create mode 100644 solr/src/site/src/documentation/content/xdocs/images/lucene_green_150.gif create mode 100644 solr/src/site/src/documentation/content/xdocs/images/lucidworks_reference_guide.png create mode 100755 solr/src/site/src/documentation/content/xdocs/images/powered_by_solr.ai create mode 100755 solr/src/site/src/documentation/content/xdocs/images/powered_by_solr.eps create mode 100755 solr/src/site/src/documentation/content/xdocs/images/powered_by_solr.png create mode 100755 solr/src/site/src/documentation/content/xdocs/images/powered_by_solr.svg create mode 100644 solr/src/site/src/documentation/content/xdocs/images/solr-book-image.jpg create mode 100644 solr/src/site/src/documentation/content/xdocs/images/solr.jpg create mode 100644 solr/src/site/src/documentation/content/xdocs/images/solr_FC.eps create mode 100644 solr/src/site/src/documentation/content/xdocs/images/solr_FC.svg create mode 100755 solr/src/site/src/documentation/content/xdocs/index.xml create mode 100755 solr/src/site/src/documentation/content/xdocs/issue_tracking.xml create mode 100755 solr/src/site/src/documentation/content/xdocs/mailing_lists.xml create mode 100755 solr/src/site/src/documentation/content/xdocs/site.xml create mode 100755 solr/src/site/src/documentation/content/xdocs/tabs.xml create mode 100755 solr/src/site/src/documentation/content/xdocs/tutorial.xml create mode 100755 solr/src/site/src/documentation/content/xdocs/version_control.xml create mode 100755 solr/src/site/src/documentation/content/xdocs/who.xml create mode 100644 solr/src/site/src/documentation/resources/schema/catalog.xcat create mode 100755 solr/src/site/src/documentation/skinconf.xml create mode 100644 solr/src/site/src/documentation/skins/common/css/forrest.css.xslt create mode 100644 solr/src/site/src/documentation/skins/common/images/README.txt create mode 100644 solr/src/site/src/documentation/skins/common/images/corner-imports.svg.xslt create mode 100644 solr/src/site/src/documentation/skins/common/images/dc.svg.xslt create mode 100644 solr/src/site/src/documentation/skins/common/images/poddoc.svg.xslt create mode 100644 solr/src/site/src/documentation/skins/common/images/rc.svg.xslt create mode 100644 solr/src/site/src/documentation/skins/common/images/txtdoc.svg.xslt create mode 100644 solr/src/site/src/documentation/skins/common/scripts/breadcrumbs-optimized.js create mode 100644 solr/src/site/src/documentation/skins/common/scripts/breadcrumbs.js create mode 100644 solr/src/site/src/documentation/skins/common/scripts/fontsize.js create mode 100644 solr/src/site/src/documentation/skins/common/scripts/getBlank.js create mode 100644 solr/src/site/src/documentation/skins/common/scripts/getMenu.js create mode 100644 solr/src/site/src/documentation/skins/common/scripts/menu.js create mode 100644 solr/src/site/src/documentation/skins/common/scripts/prototype.js create mode 100644 solr/src/site/src/documentation/skins/common/skinconf.xsl create mode 100644 solr/src/site/src/documentation/skins/common/translations/CommonMessages_de.xml create mode 100644 solr/src/site/src/documentation/skins/common/translations/CommonMessages_en_US.xml create mode 100644 solr/src/site/src/documentation/skins/common/translations/CommonMessages_es.xml create mode 100644 solr/src/site/src/documentation/skins/common/translations/CommonMessages_fr.xml create mode 100644 solr/src/site/src/documentation/skins/common/xslt/fo/document-to-fo.xsl create mode 100644 solr/src/site/src/documentation/skins/common/xslt/fo/footerinfo.xsl create mode 100644 solr/src/site/src/documentation/skins/common/xslt/fo/pdfoutline.xsl create mode 100644 solr/src/site/src/documentation/skins/common/xslt/html/book-to-menu.xsl create mode 100644 solr/src/site/src/documentation/skins/common/xslt/html/document-to-html.xsl create mode 100644 solr/src/site/src/documentation/skins/common/xslt/html/dotdots.xsl create mode 100644 solr/src/site/src/documentation/skins/common/xslt/html/pathutils.xsl create mode 100644 solr/src/site/src/documentation/skins/common/xslt/html/renderlogo.xsl create mode 100644 solr/src/site/src/documentation/skins/common/xslt/html/site-to-xhtml.xsl create mode 100644 solr/src/site/src/documentation/skins/common/xslt/html/split.xsl create mode 100644 solr/src/site/src/documentation/skins/common/xslt/html/strip_namespaces.xsl create mode 100644 solr/src/site/src/documentation/skins/common/xslt/html/tab-to-menu.xsl create mode 100644 solr/src/site/src/documentation/skins/common/xslt/html/tabutils.xsl create mode 100644 solr/src/site/src/documentation/skins/common/xslt/svg/document-to-svg.xsl create mode 100644 solr/src/site/src/documentation/skins/lucene/css/basic.css create mode 100644 solr/src/site/src/documentation/skins/lucene/css/print.css create mode 100644 solr/src/site/src/documentation/skins/lucene/css/profile.css.xslt create mode 100644 solr/src/site/src/documentation/skins/lucene/css/screen.css create mode 100755 solr/src/site/src/documentation/skins/lucene/images/chapter.gif create mode 100755 solr/src/site/src/documentation/skins/lucene/images/chapter_open.gif create mode 100755 solr/src/site/src/documentation/skins/lucene/images/current.gif create mode 100755 solr/src/site/src/documentation/skins/lucene/images/error.png create mode 100755 solr/src/site/src/documentation/skins/lucene/images/header_white_line.gif create mode 100755 solr/src/site/src/documentation/skins/lucene/images/info.png create mode 100755 solr/src/site/src/documentation/skins/lucene/images/instruction_arrow.png create mode 100755 solr/src/site/src/documentation/skins/lucene/images/label.gif create mode 100755 solr/src/site/src/documentation/skins/lucene/images/page.gif create mode 100755 solr/src/site/src/documentation/skins/lucene/images/pdfdoc.gif create mode 100755 solr/src/site/src/documentation/skins/lucene/images/printer.gif create mode 100755 solr/src/site/src/documentation/skins/lucene/images/success.png create mode 100755 solr/src/site/src/documentation/skins/lucene/images/warning.png create mode 100755 solr/src/site/src/documentation/skins/lucene/images/xmldoc.gif create mode 100644 solr/src/site/src/documentation/skins/lucene/note.txt create mode 100644 solr/src/site/src/documentation/skins/lucene/skinconf.xsl create mode 100644 solr/src/site/src/documentation/skins/lucene/xslt/fo/document-to-fo.xsl create mode 100644 solr/src/site/src/documentation/skins/lucene/xslt/html/book-to-menu.xsl create mode 100644 solr/src/site/src/documentation/skins/lucene/xslt/html/document-to-html.xsl create mode 100644 solr/src/site/src/documentation/skins/lucene/xslt/html/site-to-xhtml.xsl create mode 100644 solr/src/site/src/documentation/skins/lucene/xslt/html/tab-to-menu.xsl create mode 100644 solr/src/solrj/org/apache/solr/client/solrj/ResponseParser.java create mode 100644 solr/src/solrj/org/apache/solr/client/solrj/SolrQuery.java create mode 100644 solr/src/solrj/org/apache/solr/client/solrj/SolrRequest.java create mode 100644 solr/src/solrj/org/apache/solr/client/solrj/SolrResponse.java create mode 100644 solr/src/solrj/org/apache/solr/client/solrj/SolrServer.java create mode 100644 solr/src/solrj/org/apache/solr/client/solrj/SolrServerException.java create mode 100644 solr/src/solrj/org/apache/solr/client/solrj/beans/DocumentObjectBinder.java create mode 100644 solr/src/solrj/org/apache/solr/client/solrj/beans/Field.java create mode 100644 solr/src/solrj/org/apache/solr/client/solrj/impl/BinaryRequestWriter.java create mode 100755 solr/src/solrj/org/apache/solr/client/solrj/impl/BinaryResponseParser.java create mode 100644 solr/src/solrj/org/apache/solr/client/solrj/impl/CommonsHttpSolrServer.java create mode 100644 solr/src/solrj/org/apache/solr/client/solrj/impl/LBHttpSolrServer.java create mode 100644 solr/src/solrj/org/apache/solr/client/solrj/impl/StreamingUpdateSolrServer.java create mode 100644 solr/src/solrj/org/apache/solr/client/solrj/impl/XMLResponseParser.java create mode 100644 solr/src/solrj/org/apache/solr/client/solrj/request/AbstractUpdateRequest.java create mode 100644 solr/src/solrj/org/apache/solr/client/solrj/request/ContentStreamUpdateRequest.java create mode 100644 solr/src/solrj/org/apache/solr/client/solrj/request/CoreAdminRequest.java create mode 100644 solr/src/solrj/org/apache/solr/client/solrj/request/DirectXmlRequest.java create mode 100644 solr/src/solrj/org/apache/solr/client/solrj/request/DocumentAnalysisRequest.java create mode 100644 solr/src/solrj/org/apache/solr/client/solrj/request/FieldAnalysisRequest.java create mode 100644 solr/src/solrj/org/apache/solr/client/solrj/request/JavaBinUpdateRequestCodec.java create mode 100644 solr/src/solrj/org/apache/solr/client/solrj/request/LukeRequest.java create mode 100644 solr/src/solrj/org/apache/solr/client/solrj/request/QueryRequest.java create mode 100644 solr/src/solrj/org/apache/solr/client/solrj/request/RequestWriter.java create mode 100644 solr/src/solrj/org/apache/solr/client/solrj/request/SolrPing.java create mode 100644 solr/src/solrj/org/apache/solr/client/solrj/request/UpdateRequest.java create mode 100644 solr/src/solrj/org/apache/solr/client/solrj/response/AnalysisResponseBase.java create mode 100644 solr/src/solrj/org/apache/solr/client/solrj/response/CoreAdminResponse.java create mode 100644 solr/src/solrj/org/apache/solr/client/solrj/response/DocumentAnalysisResponse.java create mode 100644 solr/src/solrj/org/apache/solr/client/solrj/response/FacetField.java create mode 100644 solr/src/solrj/org/apache/solr/client/solrj/response/FieldAnalysisResponse.java create mode 100644 solr/src/solrj/org/apache/solr/client/solrj/response/FieldStatsInfo.java create mode 100644 solr/src/solrj/org/apache/solr/client/solrj/response/LukeResponse.java create mode 100644 solr/src/solrj/org/apache/solr/client/solrj/response/QueryResponse.java create mode 100644 solr/src/solrj/org/apache/solr/client/solrj/response/SolrPingResponse.java create mode 100644 solr/src/solrj/org/apache/solr/client/solrj/response/SolrResponseBase.java create mode 100644 solr/src/solrj/org/apache/solr/client/solrj/response/SpellCheckResponse.java create mode 100644 solr/src/solrj/org/apache/solr/client/solrj/response/TermsResponse.java create mode 100644 solr/src/solrj/org/apache/solr/client/solrj/response/UpdateResponse.java create mode 100644 solr/src/solrj/org/apache/solr/client/solrj/util/ClientUtils.java create mode 100644 solr/src/test/org/apache/solr/BaseDistributedSearchTestCase.java create mode 100644 solr/src/test/org/apache/solr/BasicFunctionalityTest.java create mode 100644 solr/src/test/org/apache/solr/ConvertedLegacyTest.java create mode 100644 solr/src/test/org/apache/solr/DisMaxRequestHandlerTest.java create mode 100644 solr/src/test/org/apache/solr/EchoParamsTest.java create mode 100644 solr/src/test/org/apache/solr/MinimalSchemaTest.java create mode 100644 solr/src/test/org/apache/solr/OutputWriterTest.java create mode 100644 solr/src/test/org/apache/solr/SampleTest.java create mode 100644 solr/src/test/org/apache/solr/SolrInfoMBeanTest.java create mode 100755 solr/src/test/org/apache/solr/TestDistributedSearch.java create mode 100644 solr/src/test/org/apache/solr/TestPluginEnable.java create mode 100644 solr/src/test/org/apache/solr/TestSolrCoreProperties.java create mode 100644 solr/src/test/org/apache/solr/TestTrie.java create mode 100644 solr/src/test/org/apache/solr/analysis/BaseTokenTestCase.java create mode 100644 solr/src/test/org/apache/solr/analysis/CommonGramsFilterFactoryTest.java create mode 100644 solr/src/test/org/apache/solr/analysis/CommonGramsFilterTest.java create mode 100644 solr/src/test/org/apache/solr/analysis/CommonGramsQueryFilterFactoryTest.java create mode 100644 solr/src/test/org/apache/solr/analysis/DoubleMetaphoneFilterFactoryTest.java create mode 100644 solr/src/test/org/apache/solr/analysis/DoubleMetaphoneFilterTest.java create mode 100644 solr/src/test/org/apache/solr/analysis/EnglishPorterFilterFactoryTest.java create mode 100644 solr/src/test/org/apache/solr/analysis/HTMLStripCharFilterTest.java create mode 100644 solr/src/test/org/apache/solr/analysis/LengthFilterTest.java create mode 100644 solr/src/test/org/apache/solr/analysis/SnowballPorterFilterFactoryTest.java create mode 100644 solr/src/test/org/apache/solr/analysis/TestArabicFilters.java create mode 100644 solr/src/test/org/apache/solr/analysis/TestBrazilianStemFilterFactory.java create mode 100644 solr/src/test/org/apache/solr/analysis/TestBufferedTokenStream.java create mode 100644 solr/src/test/org/apache/solr/analysis/TestCJKTokenizerFactory.java create mode 100644 solr/src/test/org/apache/solr/analysis/TestCapitalizationFilter.java create mode 100644 solr/src/test/org/apache/solr/analysis/TestChineseFilterFactory.java create mode 100644 solr/src/test/org/apache/solr/analysis/TestChineseTokenizerFactory.java create mode 100644 solr/src/test/org/apache/solr/analysis/TestCollationKeyFilterFactory.java create mode 100644 solr/src/test/org/apache/solr/analysis/TestDelimitedPayloadTokenFilterFactory.java create mode 100644 solr/src/test/org/apache/solr/analysis/TestDictionaryCompoundWordTokenFilterFactory.java create mode 100644 solr/src/test/org/apache/solr/analysis/TestDutchStemFilterFactory.java create mode 100644 solr/src/test/org/apache/solr/analysis/TestElisionFilterFactory.java create mode 100644 solr/src/test/org/apache/solr/analysis/TestFrenchStemFilterFactory.java create mode 100644 solr/src/test/org/apache/solr/analysis/TestGermanStemFilterFactory.java create mode 100644 solr/src/test/org/apache/solr/analysis/TestGreekLowerCaseFilterFactory.java create mode 100755 solr/src/test/org/apache/solr/analysis/TestHyphenatedWordsFilter.java create mode 100644 solr/src/test/org/apache/solr/analysis/TestKeepFilterFactory.java create mode 100644 solr/src/test/org/apache/solr/analysis/TestKeepWordFilter.java create mode 100644 solr/src/test/org/apache/solr/analysis/TestMappingCharFilterFactory.java create mode 100644 solr/src/test/org/apache/solr/analysis/TestMultiWordSynonyms.java create mode 100644 solr/src/test/org/apache/solr/analysis/TestNGramFilters.java create mode 100644 solr/src/test/org/apache/solr/analysis/TestPatternReplaceCharFilter.java create mode 100644 solr/src/test/org/apache/solr/analysis/TestPatternReplaceFilter.java create mode 100644 solr/src/test/org/apache/solr/analysis/TestPatternTokenizerFactory.java create mode 100644 solr/src/test/org/apache/solr/analysis/TestPersianNormalizationFilterFactory.java create mode 100644 solr/src/test/org/apache/solr/analysis/TestPhoneticFilter.java create mode 100644 solr/src/test/org/apache/solr/analysis/TestPorterStemFilterFactory.java create mode 100644 solr/src/test/org/apache/solr/analysis/TestRemoveDuplicatesTokenFilter.java create mode 100644 solr/src/test/org/apache/solr/analysis/TestReverseStringFilterFactory.java create mode 100644 solr/src/test/org/apache/solr/analysis/TestReversedWildcardFilterFactory.java create mode 100644 solr/src/test/org/apache/solr/analysis/TestRussianFilters.java create mode 100644 solr/src/test/org/apache/solr/analysis/TestShingleFilterFactory.java create mode 100644 solr/src/test/org/apache/solr/analysis/TestStandardFactories.java create mode 100644 solr/src/test/org/apache/solr/analysis/TestStopFilterFactory.java create mode 100644 solr/src/test/org/apache/solr/analysis/TestSynonymFilter.java create mode 100644 solr/src/test/org/apache/solr/analysis/TestSynonymMap.java create mode 100644 solr/src/test/org/apache/solr/analysis/TestThaiWordFilterFactory.java create mode 100644 solr/src/test/org/apache/solr/analysis/TestTrimFilter.java create mode 100644 solr/src/test/org/apache/solr/analysis/TestWordDelimiterFilter.java create mode 100644 solr/src/test/org/apache/solr/client/solrj/LargeVolumeTestBase.java create mode 100644 solr/src/test/org/apache/solr/client/solrj/MergeIndexesExampleTestBase.java create mode 100644 solr/src/test/org/apache/solr/client/solrj/MultiCoreExampleTestBase.java create mode 100644 solr/src/test/org/apache/solr/client/solrj/SolrExampleTestBase.java create mode 100644 solr/src/test/org/apache/solr/client/solrj/SolrExampleTests.java create mode 100644 solr/src/test/org/apache/solr/client/solrj/SolrExceptionTest.java create mode 100644 solr/src/test/org/apache/solr/client/solrj/SolrQueryTest.java create mode 100644 solr/src/test/org/apache/solr/client/solrj/StartSolrJetty.java create mode 100644 solr/src/test/org/apache/solr/client/solrj/TestBatchUpdate.java create mode 100644 solr/src/test/org/apache/solr/client/solrj/TestLBHttpSolrServer.java create mode 100644 solr/src/test/org/apache/solr/client/solrj/beans/TestDocumentObjectBinder.java create mode 100644 solr/src/test/org/apache/solr/client/solrj/embedded/JettyWebappTest.java create mode 100644 solr/src/test/org/apache/solr/client/solrj/embedded/LargeVolumeBinaryJettyTest.java create mode 100644 solr/src/test/org/apache/solr/client/solrj/embedded/LargeVolumeEmbeddedTest.java create mode 100644 solr/src/test/org/apache/solr/client/solrj/embedded/LargeVolumeJettyTest.java create mode 100644 solr/src/test/org/apache/solr/client/solrj/embedded/MergeIndexesEmbeddedTest.java create mode 100644 solr/src/test/org/apache/solr/client/solrj/embedded/MultiCoreEmbeddedTest.java create mode 100644 solr/src/test/org/apache/solr/client/solrj/embedded/MultiCoreExampleJettyTest.java create mode 100644 solr/src/test/org/apache/solr/client/solrj/embedded/SolrExampleEmbeddedTest.java create mode 100644 solr/src/test/org/apache/solr/client/solrj/embedded/SolrExampleJettyTest.java create mode 100644 solr/src/test/org/apache/solr/client/solrj/embedded/SolrExampleStreamingTest.java create mode 100644 solr/src/test/org/apache/solr/client/solrj/embedded/TestSolrProperties.java create mode 100644 solr/src/test/org/apache/solr/client/solrj/request/TestUpdateRequestCodec.java create mode 100644 solr/src/test/org/apache/solr/client/solrj/response/AnlysisResponseBaseTest.java create mode 100644 solr/src/test/org/apache/solr/client/solrj/response/DocumentAnalysisResponseTest.java create mode 100644 solr/src/test/org/apache/solr/client/solrj/response/FieldAnalysisResponseTest.java create mode 100644 solr/src/test/org/apache/solr/client/solrj/response/QueryResponseTest.java create mode 100644 solr/src/test/org/apache/solr/client/solrj/response/TermsResponseTest.java create mode 100644 solr/src/test/org/apache/solr/client/solrj/response/TestSpellCheckResponse.java create mode 100644 solr/src/test/org/apache/solr/client/solrj/util/ClientUtilsTest.java create mode 100644 solr/src/test/org/apache/solr/common/SolrDocumentTest.java create mode 100644 solr/src/test/org/apache/solr/common/params/ModifiableSolrParamsTest.java create mode 100755 solr/src/test/org/apache/solr/common/params/SolrParamTest.java create mode 100755 solr/src/test/org/apache/solr/common/util/ContentStreamTest.java create mode 100644 solr/src/test/org/apache/solr/common/util/DOMUtilTest.java create mode 100644 solr/src/test/org/apache/solr/common/util/FileUtilsTest.java create mode 100644 solr/src/test/org/apache/solr/common/util/IteratorChainTest.java create mode 100644 solr/src/test/org/apache/solr/common/util/NamedListTest.java create mode 100644 solr/src/test/org/apache/solr/common/util/TestFastInputStream.java create mode 100755 solr/src/test/org/apache/solr/common/util/TestHash.java create mode 100755 solr/src/test/org/apache/solr/common/util/TestNamedListCodec.java create mode 100644 solr/src/test/org/apache/solr/common/util/TestXMLEscaping.java create mode 100755 solr/src/test/org/apache/solr/core/AlternateDirectoryTest.java create mode 100644 solr/src/test/org/apache/solr/core/AlternateIndexReaderTest.java create mode 100644 solr/src/test/org/apache/solr/core/DummyValueSourceParser.java create mode 100644 solr/src/test/org/apache/solr/core/FakeDeletionPolicy.java create mode 100644 solr/src/test/org/apache/solr/core/IndexReaderFactoryTest.java create mode 100644 solr/src/test/org/apache/solr/core/MockQuerySenderListenerReqHandler.java create mode 100755 solr/src/test/org/apache/solr/core/RequestHandlersTest.java create mode 100644 solr/src/test/org/apache/solr/core/ResourceLoaderTest.java create mode 100644 solr/src/test/org/apache/solr/core/SOLR749Test.java create mode 100755 solr/src/test/org/apache/solr/core/SolrCoreTest.java create mode 100644 solr/src/test/org/apache/solr/core/TestArbitraryIndexDir.java create mode 100644 solr/src/test/org/apache/solr/core/TestBadConfig.java create mode 100644 solr/src/test/org/apache/solr/core/TestConfig.java create mode 100644 solr/src/test/org/apache/solr/core/TestJmxIntegration.java create mode 100644 solr/src/test/org/apache/solr/core/TestJmxMonitoredMap.java create mode 100644 solr/src/test/org/apache/solr/core/TestLegacyMergeSchedulerPolicyConfig.java create mode 100644 solr/src/test/org/apache/solr/core/TestPropInject.java create mode 100644 solr/src/test/org/apache/solr/core/TestQuerySenderListener.java create mode 100644 solr/src/test/org/apache/solr/core/TestSolrDeletionPolicy1.java create mode 100644 solr/src/test/org/apache/solr/core/TestSolrDeletionPolicy2.java create mode 100644 solr/src/test/org/apache/solr/core/TestXIncludeConfig.java create mode 100644 solr/src/test/org/apache/solr/handler/AnalysisRequestHandlerTest.java create mode 100644 solr/src/test/org/apache/solr/handler/AnalysisRequestHandlerTestBase.java create mode 100644 solr/src/test/org/apache/solr/handler/DocumentAnalysisRequestHandlerTest.java create mode 100644 solr/src/test/org/apache/solr/handler/FieldAnalysisRequestHandlerTest.java create mode 100644 solr/src/test/org/apache/solr/handler/MoreLikeThisHandlerTest.java create mode 100644 solr/src/test/org/apache/solr/handler/SpellCheckerRequestHandlerTest.java create mode 100644 solr/src/test/org/apache/solr/handler/StandardRequestHandlerTest.java create mode 100755 solr/src/test/org/apache/solr/handler/TestCSVLoader.java create mode 100644 solr/src/test/org/apache/solr/handler/TestReplicationHandler.java create mode 100644 solr/src/test/org/apache/solr/handler/XmlUpdateRequestHandlerTest.java create mode 100644 solr/src/test/org/apache/solr/handler/admin/LukeRequestHandlerTest.java create mode 100644 solr/src/test/org/apache/solr/handler/admin/SystemInfoHandlerTest.java create mode 100644 solr/src/test/org/apache/solr/handler/component/DistributedSpellCheckComponentTest.java create mode 100644 solr/src/test/org/apache/solr/handler/component/DistributedTermsComponentTest.java create mode 100644 solr/src/test/org/apache/solr/handler/component/QueryElevationComponentTest.java create mode 100644 solr/src/test/org/apache/solr/handler/component/SearchHandlerTest.java create mode 100644 solr/src/test/org/apache/solr/handler/component/SpellCheckComponentTest.java create mode 100644 solr/src/test/org/apache/solr/handler/component/StatsComponentTest.java create mode 100644 solr/src/test/org/apache/solr/handler/component/TermVectorComponentTest.java create mode 100644 solr/src/test/org/apache/solr/handler/component/TermsComponentTest.java create mode 100644 solr/src/test/org/apache/solr/highlight/DummyHighlighter.java create mode 100644 solr/src/test/org/apache/solr/highlight/FastVectorHighlighterTest.java create mode 100644 solr/src/test/org/apache/solr/highlight/HighlighterConfigTest.java create mode 100755 solr/src/test/org/apache/solr/highlight/HighlighterTest.java create mode 100644 solr/src/test/org/apache/solr/request/JSONWriterTest.java create mode 100755 solr/src/test/org/apache/solr/request/SimpleFacetsLegacySortTest.java create mode 100644 solr/src/test/org/apache/solr/request/SimpleFacetsTest.java create mode 100644 solr/src/test/org/apache/solr/request/TestBinaryResponseWriter.java create mode 100755 solr/src/test/org/apache/solr/request/TestFaceting.java create mode 100755 solr/src/test/org/apache/solr/request/TestWriterPerf.java create mode 100644 solr/src/test/org/apache/solr/schema/BadIndexSchemaTest.java create mode 100644 solr/src/test/org/apache/solr/schema/CopyFieldTest.java create mode 100644 solr/src/test/org/apache/solr/schema/CustomSimilarityFactory.java create mode 100644 solr/src/test/org/apache/solr/schema/DateFieldTest.java create mode 100644 solr/src/test/org/apache/solr/schema/IndexSchemaTest.java create mode 100644 solr/src/test/org/apache/solr/schema/LegacyDateFieldTest.java create mode 100644 solr/src/test/org/apache/solr/schema/MockConfigurableSimilarity.java create mode 100644 solr/src/test/org/apache/solr/schema/NotRequiredUniqueKeyTest.java create mode 100644 solr/src/test/org/apache/solr/schema/PolyFieldTest.java create mode 100644 solr/src/test/org/apache/solr/schema/RequiredFieldsTest.java create mode 100644 solr/src/test/org/apache/solr/schema/TestBinaryField.java create mode 100644 solr/src/test/org/apache/solr/schema/UUIDFieldTest.java create mode 100644 solr/src/test/org/apache/solr/search/DocSetPerf.java create mode 100755 solr/src/test/org/apache/solr/search/FooQParserPlugin.java create mode 100644 solr/src/test/org/apache/solr/search/FunctionQParserTest.java create mode 100644 solr/src/test/org/apache/solr/search/QueryParsingTest.java create mode 100644 solr/src/test/org/apache/solr/search/TestDocSet.java create mode 100755 solr/src/test/org/apache/solr/search/TestExtendedDismaxParser.java create mode 100644 solr/src/test/org/apache/solr/search/TestFastLRUCache.java create mode 100755 solr/src/test/org/apache/solr/search/TestIndexSearcher.java create mode 100755 solr/src/test/org/apache/solr/search/TestQueryTypes.java create mode 100755 solr/src/test/org/apache/solr/search/TestQueryUtils.java create mode 100644 solr/src/test/org/apache/solr/search/TestRangeQuery.java create mode 100755 solr/src/test/org/apache/solr/search/TestSearchPerf.java create mode 100755 solr/src/test/org/apache/solr/search/TestSort.java create mode 100755 solr/src/test/org/apache/solr/search/function/NvlValueSourceParser.java create mode 100644 solr/src/test/org/apache/solr/search/function/SortByFunctionTest.java create mode 100755 solr/src/test/org/apache/solr/search/function/TestFunctionQuery.java create mode 100644 solr/src/test/org/apache/solr/search/function/distance/DistanceFunctionTest.java create mode 100644 solr/src/test/org/apache/solr/servlet/CacheHeaderTest.java create mode 100644 solr/src/test/org/apache/solr/servlet/CacheHeaderTestBase.java create mode 100644 solr/src/test/org/apache/solr/servlet/DirectSolrConnectionTest.java create mode 100644 solr/src/test/org/apache/solr/servlet/NoCacheHeaderTest.java create mode 100644 solr/src/test/org/apache/solr/servlet/SolrRequestParserTest.java create mode 100644 solr/src/test/org/apache/solr/spelling/FileBasedSpellCheckerTest.java create mode 100644 solr/src/test/org/apache/solr/spelling/IndexBasedSpellCheckerTest.java create mode 100644 solr/src/test/org/apache/solr/spelling/SimpleQueryConverter.java create mode 100644 solr/src/test/org/apache/solr/spelling/SpellingQueryConverterTest.java create mode 100644 solr/src/test/org/apache/solr/update/AutoCommitTest.java create mode 100644 solr/src/test/org/apache/solr/update/DirectUpdateHandlerOptimizeTest.java create mode 100644 solr/src/test/org/apache/solr/update/DirectUpdateHandlerTest.java create mode 100644 solr/src/test/org/apache/solr/update/DocumentBuilderTest.java create mode 100755 solr/src/test/org/apache/solr/update/TestIndexingPerformance.java create mode 100644 solr/src/test/org/apache/solr/update/processor/CustomUpdateRequestProcessorFactory.java create mode 100755 solr/src/test/org/apache/solr/update/processor/SignatureUpdateProcessorFactoryTest.java create mode 100644 solr/src/test/org/apache/solr/update/processor/UpdateRequestProcessorFactoryTest.java create mode 100644 solr/src/test/org/apache/solr/util/ArraysUtilsTest.java create mode 100644 solr/src/test/org/apache/solr/util/BitSetPerf.java create mode 100644 solr/src/test/org/apache/solr/util/DateMathParserTest.java create mode 100644 solr/src/test/org/apache/solr/util/SolrPluginUtilsTest.java create mode 100755 solr/src/test/org/apache/solr/util/TestCharArrayMap.java create mode 100644 solr/src/test/org/apache/solr/util/TestNumberUtils.java create mode 100644 solr/src/test/org/apache/solr/util/TestOpenBitSet.java create mode 100755 solr/src/test/org/apache/solr/util/TestUtils.java create mode 100644 solr/src/test/test-files/README create mode 100644 solr/src/test/test-files/books.csv create mode 100755 solr/src/test/test-files/htmlStripReaderTest.html create mode 100644 solr/src/test/test-files/lib-dirs/README create mode 100644 solr/src/test/test-files/lib-dirs/a/a1/empty-file-a1.txt create mode 100644 solr/src/test/test-files/lib-dirs/a/a2/empty-file-a2.txt create mode 100644 solr/src/test/test-files/lib-dirs/b/b1/empty-file-b1.txt create mode 100644 solr/src/test/test-files/lib-dirs/b/b2/empty-file-b2.txt create mode 100644 solr/src/test/test-files/lib-dirs/c/c1/empty-file-c1.txt create mode 100644 solr/src/test/test-files/lib-dirs/c/c2/empty-file-c2.txt create mode 100644 solr/src/test/test-files/lib-dirs/d/d1/empty-file-d1.txt create mode 100644 solr/src/test/test-files/lib-dirs/d/d2/empty-file-d2.txt create mode 100755 solr/src/test/test-files/mailing_lists.pdf create mode 100644 solr/src/test/test-files/sampleDateFacetResponse.xml create mode 100644 solr/src/test/test-files/solr/conf/bad-schema.xml create mode 100644 solr/src/test/test-files/solr/conf/bad_solrconfig.xml create mode 100644 solr/src/test/test-files/solr/conf/compoundDictionary.txt create mode 100644 solr/src/test/test-files/solr/conf/elevate.xml create mode 100644 solr/src/test/test-files/solr/conf/frenchArticles.txt create mode 100644 solr/src/test/test-files/solr/conf/keep-1.txt create mode 100644 solr/src/test/test-files/solr/conf/keep-2.txt create mode 100644 solr/src/test/test-files/solr/conf/mapping-ISOLatin1Accent.txt create mode 100644 solr/src/test/test-files/solr/conf/old_synonyms.txt create mode 100644 solr/src/test/test-files/solr/conf/protwords.txt create mode 100644 solr/src/test/test-files/solr/conf/schema-binaryfield.xml create mode 100644 solr/src/test/test-files/solr/conf/schema-copyfield-test.xml create mode 100644 solr/src/test/test-files/solr/conf/schema-minimal.xml create mode 100644 solr/src/test/test-files/solr/conf/schema-not-required-unique-key.xml create mode 100644 solr/src/test/test-files/solr/conf/schema-replication1.xml create mode 100644 solr/src/test/test-files/solr/conf/schema-replication2.xml create mode 100644 solr/src/test/test-files/solr/conf/schema-required-fields.xml create mode 100644 solr/src/test/test-files/solr/conf/schema-reversed.xml create mode 100644 solr/src/test/test-files/solr/conf/schema-spellchecker.xml create mode 100644 solr/src/test/test-files/solr/conf/schema-stop-keep.xml create mode 100644 solr/src/test/test-files/solr/conf/schema-trie.xml create mode 100644 solr/src/test/test-files/solr/conf/schema.xml create mode 100755 solr/src/test/test-files/solr/conf/schema11.xml create mode 100755 solr/src/test/test-files/solr/conf/schema12.xml create mode 100644 solr/src/test/test-files/solr/conf/solrconfig-SOLR-749.xml create mode 100755 solr/src/test/test-files/solr/conf/solrconfig-altdirectory.xml create mode 100644 solr/src/test/test-files/solr/conf/solrconfig-delpolicy1.xml create mode 100644 solr/src/test/test-files/solr/conf/solrconfig-delpolicy2.xml create mode 100644 solr/src/test/test-files/solr/conf/solrconfig-duh-optimize.xml create mode 100644 solr/src/test/test-files/solr/conf/solrconfig-elevate.xml create mode 100644 solr/src/test/test-files/solr/conf/solrconfig-enableplugin.xml create mode 100755 solr/src/test/test-files/solr/conf/solrconfig-facet-sort.xml create mode 100755 solr/src/test/test-files/solr/conf/solrconfig-functionquery.xml create mode 100644 solr/src/test/test-files/solr/conf/solrconfig-highlight.xml create mode 100644 solr/src/test/test-files/solr/conf/solrconfig-legacy.xml create mode 100644 solr/src/test/test-files/solr/conf/solrconfig-master.xml create mode 100644 solr/src/test/test-files/solr/conf/solrconfig-master1.xml create mode 100644 solr/src/test/test-files/solr/conf/solrconfig-master2.xml create mode 100644 solr/src/test/test-files/solr/conf/solrconfig-nocache.xml create mode 100644 solr/src/test/test-files/solr/conf/solrconfig-propinject-indexdefault.xml create mode 100644 solr/src/test/test-files/solr/conf/solrconfig-propinject.xml create mode 100644 solr/src/test/test-files/solr/conf/solrconfig-querysender.xml create mode 100644 solr/src/test/test-files/solr/conf/solrconfig-reqHandler.incl create mode 100644 solr/src/test/test-files/solr/conf/solrconfig-slave.xml create mode 100644 solr/src/test/test-files/solr/conf/solrconfig-slave1.xml create mode 100644 solr/src/test/test-files/solr/conf/solrconfig-solcoreproperties.xml create mode 100644 solr/src/test/test-files/solr/conf/solrconfig-spellchecker.xml create mode 100644 solr/src/test/test-files/solr/conf/solrconfig-termindex.xml create mode 100644 solr/src/test/test-files/solr/conf/solrconfig-transformers.xml create mode 100644 solr/src/test/test-files/solr/conf/solrconfig-xinclude.xml create mode 100644 solr/src/test/test-files/solr/conf/solrconfig.xml create mode 100755 solr/src/test/test-files/solr/conf/solrconfig_perf.xml create mode 100644 solr/src/test/test-files/solr/conf/stop-1.txt create mode 100644 solr/src/test/test-files/solr/conf/stop-2.txt create mode 100644 solr/src/test/test-files/solr/conf/stopwords.txt create mode 100644 solr/src/test/test-files/solr/conf/synonyms.txt create mode 100644 solr/src/test/test-files/solr/conf/xslt/dummy.xsl create mode 100644 solr/src/test/test-files/solr/crazy-path-to-config.xml create mode 100644 solr/src/test/test-files/solr/crazy-path-to-schema.xml create mode 100644 solr/src/test/test-files/solr/lib/README create mode 100644 solr/src/test/test-files/solr/lib/classes/empty-file-main-lib.txt create mode 100644 solr/src/test/test-files/solr/shared/conf/schema.xml create mode 100644 solr/src/test/test-files/solr/shared/conf/solrconfig.xml create mode 100644 solr/src/test/test-files/solr/shared/conf/stopwords-en.txt create mode 100644 solr/src/test/test-files/solr/shared/conf/stopwords-fr.txt create mode 100644 solr/src/test/test-files/solr/shared/solr.xml create mode 100644 solr/src/test/test-files/spellings.txt create mode 100644 solr/src/webapp/src/org/apache/solr/client/solrj/embedded/EmbeddedSolrServer.java create mode 100644 solr/src/webapp/src/org/apache/solr/client/solrj/embedded/JettySolrRunner.java create mode 100644 solr/src/webapp/src/org/apache/solr/servlet/DirectSolrConnection.java create mode 100644 solr/src/webapp/src/org/apache/solr/servlet/LogLevelSelection.java create mode 100644 solr/src/webapp/src/org/apache/solr/servlet/SolrDispatchFilter.java create mode 100644 solr/src/webapp/src/org/apache/solr/servlet/SolrRequestParsers.java create mode 100644 solr/src/webapp/src/org/apache/solr/servlet/SolrServlet.java create mode 100644 solr/src/webapp/src/org/apache/solr/servlet/SolrServletRequest.java create mode 100644 solr/src/webapp/src/org/apache/solr/servlet/SolrUpdateServlet.java create mode 100644 solr/src/webapp/src/org/apache/solr/servlet/cache/HttpCacheHeaderUtil.java create mode 100644 solr/src/webapp/src/org/apache/solr/servlet/cache/Method.java create mode 100644 solr/src/webapp/web/WEB-INF/web.xml create mode 100755 solr/src/webapp/web/WEB-INF/weblogic.xml create mode 100644 solr/src/webapp/web/admin/_info.jsp create mode 100644 solr/src/webapp/web/admin/action.jsp create mode 100644 solr/src/webapp/web/admin/analysis.jsp create mode 100644 solr/src/webapp/web/admin/analysis.xsl create mode 100644 solr/src/webapp/web/admin/distributiondump.jsp create mode 100755 solr/src/webapp/web/admin/favicon.ico create mode 100644 solr/src/webapp/web/admin/form.jsp create mode 100644 solr/src/webapp/web/admin/get-file.jsp create mode 100644 solr/src/webapp/web/admin/get-properties.jsp create mode 100644 solr/src/webapp/web/admin/header.jsp create mode 100644 solr/src/webapp/web/admin/index.jsp create mode 100644 solr/src/webapp/web/admin/jquery-1.2.3.min.js create mode 100644 solr/src/webapp/web/admin/meta.xsl create mode 100644 solr/src/webapp/web/admin/ping.jsp create mode 100644 solr/src/webapp/web/admin/ping.xsl create mode 100644 solr/src/webapp/web/admin/raw-schema.jsp create mode 100644 solr/src/webapp/web/admin/registry.jsp create mode 100644 solr/src/webapp/web/admin/registry.xsl create mode 100644 solr/src/webapp/web/admin/replication/header.jsp create mode 100644 solr/src/webapp/web/admin/replication/index.jsp create mode 100644 solr/src/webapp/web/admin/schema.jsp create mode 100644 solr/src/webapp/web/admin/solr-admin.css create mode 100644 solr/src/webapp/web/admin/solr_small.png create mode 100644 solr/src/webapp/web/admin/stats.jsp create mode 100644 solr/src/webapp/web/admin/stats.xsl create mode 100644 solr/src/webapp/web/admin/tabular.xsl create mode 100644 solr/src/webapp/web/admin/threaddump.jsp create mode 100644 solr/src/webapp/web/admin/threaddump.xsl create mode 100755 solr/src/webapp/web/favicon.ico create mode 100644 solr/src/webapp/web/index.jsp diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt new file mode 100644 index 00000000000..3ff79d79742 --- /dev/null +++ b/solr/CHANGES.txt @@ -0,0 +1,2267 @@ + + Apache Solr Release Notes + +Introduction +------------ +Apache Solr is an open source enterprise search server based on the Apache Lucene Java +search library, with XML/HTTP and JSON APIs, hit highlighting, faceted search, +caching, replication, and a web administration interface. It runs in a Java +servlet container such as Tomcat. + +See http://lucene.apache.org/solr for more information. + + +Getting Started +--------------- +You need a Java 1.5 VM or later installed. +In this release, there is an example Solr server including a bundled +servlet container in the directory named "example". +See the tutorial at http://lucene.apache.org/solr/tutorial.html + + +$Id$ + +================== 1.5.0-dev ================== +Upgrading from Solr 1.4 +---------------------- +* The experimental ALIAS command is removed in Solr 1.5 (SOLR-1637) + +* Using solr.xml is recommended for single cores also (SOLR-1621) + +* Old syntax of is deprecated (SOLR-1696) + + +Versions of Major Components +--------------------- +Apache Lucene 2.9.2 +Apache Tika 0.6 +Carrot2 3.1.0 + +Detailed Change List +---------------------- + +New Features +---------------------- + +* SOLR-1302: Added several new distance based functions, including + Great Circle (haversine), Manhattan, Euclidean and String (using the + StringDistance methods in the Lucene spellchecker). + Also added geohash(), deg() and rad() convenience functions. + See http://wiki.apache.org/solr/FunctionQuery. (gsingers) + +* SOLR-1553: New dismax parser implementation (accessible as "edismax") + that supports full lucene syntax, improved reserved char escaping, + fielded queries, improved proximity boosting, and improved stopword + handling. (yonik) + +* SOLR-1574: Add many new functions from java Math (e.g. sin, cos) (yonik) + +* SOLR-1569: Allow functions to take in literal strings by modifying the + FunctionQParser and adding LiteralValueSource (gsingers) + +* SOLR-1571: Added unicode collation support though Lucene's CollationKeyFilter + (Robert Muir via shalin) + +* SOLR-785: Distributed Search support for SpellCheckComponent + (Matthew Woytowitz, shalin) + +* SOLR-1625: Add regexp support for TermsComponent (Uri Boness via noble) + +* SOLR-1297: Add sort by Function capability (gsingers) + +* SOLR-1139: Add TermsComponent Query and Response Support in SolrJ (Matt Weber via shalin) + +* SOLR-1177: Distributed Search support for TermsComponent (Matt Weber via shalin) + +* SOLR-1621, SOLR-1722: Allow current single core deployments to be specified by solr.xml (Mark Miller , noble) + +* SOLR-1532: Allow StreamingUpdateSolrServer to use a provided HttpClient (Gabriele Renzi via shalin) + +* SOLR-1653: Add PatternReplaceCharFilter (koji) + +* SOLR-1131: FieldTypes can now output multiple Fields per Type and still be searched. This can be handy for hiding the details of a particular + implementation such as in the spatial case. (Chris Mattmann, shalin, noble, gsingers, yonik) + +* SOLR-1586: Add support for Geohash and Spatial Tile FieldType (Chris Mattmann, gsingers) + +* SOLR-1697: PluginInfo should load plugins w/o class attribute also (noble) + +* SOLR-1268: Incorporate FastVectorHighlighter (koji) + +* SOLR-1750: SystemInfoRequestHandler added for simpler programmatic access + to info currently available from registry.jsp and stats.jsp + (ehatcher, hossman) + +* SOLR-1815: SolrJ now preserves the order of facet queries. (yonik) + +Optimizations +---------------------- + +* SOLR-1679: Don't build up string messages in SolrCore.execute unless they + are necessary for the current log level. + (Fuad Efendi and hossman) + +Bug Fixes +---------------------- + +* SOLR-1432: Make the new ValueSource.getValues(context,reader) delegate + to the original ValueSource.getValues(reader) so custom sources + will work. (yonik) + +* SOLR-1572: FastLRUCache correctly implemented the LRU policy only + for the first 2B accesses. (yonik) + +* SOLR-1582: copyField was ignored for BinaryField types (gsingers) + +* SOLR-1563: Binary fields, including trie-based numeric fields, caused null + pointer exceptions in the luke request handler. (yonik) + +* SOLR-1577: The example solrconfig.xml defaulted to a solr data dir + relative to the current working directory, even if a different solr home + was being used. The new behavior changes the default to a zero length + string, which is treated the same as if no dataDir had been specified, + hence the "data" directory under the solr home will be used. (yonik) + +* SOLR-1584: SolrJ - SolrQuery.setIncludeScore() incorrectly added + fl=score to the parameter list instead of appending score to the + existing field list. (yonik) + +* SOLR-1580: Solr Configuration ignores 'mergeFactor' parameter, always + uses Lucene default. (Lance Norskog via Mark Miller) + +* SOLR-1593: ReverseWildcardFilter didn't work for surrogate pairs + (i.e. code points outside of the BMP), resulting in incorrect + matching. This change requires reindexing for any content with + such characters. (Robert Muir, yonik) + +* SOLR-1596: A rollback operation followed by the shutdown of Solr + or the close of a core resulted in a warning: + "SEVERE: SolrIndexWriter was not closed prior to finalize()" although + there were no other consequences. (yonik) + +* SOLR-1595: StreamingUpdateSolrServer used the platform default character + set when streaming updates, rather than using UTF-8 as the HTTP headers + indicated, leading to an encoding mismatch. (hossman, yonik) + +* SOLR-1587: A distributed search request with fl=score, didn't match + the behavior of a non-distributed request since it only returned + the id,score fields instead of all fields in addition to score. (yonik) + +* SOLR-1601: Schema browser does not indicate presence of charFilter. (koji) + +* SOLR-1615: Backslash escaping did not work in quoted strings + for local param arguments. (Wojtek Piaseczny, yonik) + +* SOLR-1628: log contains incorrect number of adds and deletes. + (Thijs Vonk via yonik) + +* SOLR-343: Date faceting now respects facet.mincount limiting + (Uri Boness, Raiko Eckstein via hossman) + +* SOLR-1624: Highlighter only highlights values from the first field value + in a multivalued field when term positions (term vectors) are stored. + (Chris Harris via yonik) + +* SOLR-1635: Fixed error message when numeric values can't be parsed by + DOMUtils - notably for plugin init params in solrconfig.xml. + (hossman) + +* SOLR-1651: Fixed Incorrect dataimport handler package name in SolrResourceLoader + (Akshay Ukey via shalin) + +* SOLR-1660: CapitalizationFilter crashes if you use the maxWordCountOption + (Robert Muir via shalin) + +* SOLR-1667: PatternTokenizer does not reset attributes such as positionIncrementGap + (Robert Muir via shalin) + +* SOLR-1711: SolrJ - StreamingUpdateSolrServer had a race condition that + could halt the streaming of documents. (Attila Babo via yonik) + +* SOLR-1748, SOLR-1747, SOLR-1746, SOLR-1745, SOLR-1744: Streams and Readers + retrieved from ContentStreams are not closed in various places, resulting + in file descriptor leaks. + (Christoff Brill, Mark Miller) + +* SOLR-1753: StatsComponent throws NPE when getting statistics for facets in distributed search + (Janne Majaranta via koji) + +* SOLR-1736:In the slave , If 'mov'ing file does not succeed , copy the file (noble) + +* SOLR-1579: Fixes to XML escaping in stats.jsp + (David Bowen and hossman) + +* SOLR-1777: fieldTypes with sortMissingLast=true or sortMissingFirst=true can + result in incorrectly sorted results. (yonik) + +* SOLR-1798: Small memory leak (~100 bytes) in fastLRUCache for every + commit. (yonik) + + +Other Changes +---------------------- + +* SOLR-1516: Addition of an abstract BaseResponseWriter class to simplify the + development of QueryResponseWriter implementations. + (Chris A. Mattmann via noble) + +* SOLR-1592: Refactor XMLWriter startTag to allow arbitrary attributes to be written + (Chris A. Mattmann via noble) + +* SOLR-1561: Added Lucene 2.9.1 spatial contrib jar to lib. (gsingers) + +* SOLR-1570: Log warnings if uniqueKey is multi-valued or not stored (hossman, shalin) + +* SOLR-1558: QueryElevationComponent only works if the uniqueKey field is + implemented using StrField. In previous versions of Solr no warning or + error would be generated if you attempted to use QueryElevationComponent, + it would just fail in unexpected ways. This has been changed so that it + will fail with a clear error message on initialization. (hossman) + +* SOLR-1611: Added Lucene 2.9.1 collation contrib jar to lib (shalin) + +* SOLR-1608: Extract base class from TestDistributedSearch to make + it easy to write test cases for other distributed components. (shalin) + +* Upgraded to Lucene 2.9-dev r888785 (shalin) + +* SOLR-1610: Generify SolrCache (Jason Rutherglen via shalin) + +* SOLR-1637: Remove ALIAS command + +* SOLR-1662: Added Javadocs in BufferedTokenStream and fixed incorrect cloning + in TestBufferedTokenStream (Robert Muir, Uwe Schindler via shalin) + +* SOLR-1674: Improve analysis tests and cut over to new TokenStream API. + (Robert Muir via Mark Miller) + +* SOLR-1661: Remove adminCore from CoreContainer . removed deprecated methods setAdminCore(), getAdminCore() (noble) + +* SOLR-1704: Google collections moved from clustering to core (noble) + +* SOLR-1268: Add Lucene 2.9-dev r888785 FastVectorHighlighter contrib jar to lib. (koji) + +* SOLR-1538: Reordering of object allocations in ConcurrentLRUCache to eliminate + (an extremely small) potential for deadlock. + (gabriele renzi via hossman) + +* SOLR-1588: Removed some very old dead code. + (Chris A. Mattmann via hossman) + +* SOLR-1696 : Deprecate old syntax and move configuration to HighlightComponent (noble) + +* SOLR-1727: SolrEventListener should extend NamedListInitializedPlugin (noble) + +* SOLR-1771: Improved error message when StringIndex cannot be initialized + for a function query (hossman) + +* SOLR-1695: Improved error messages when adding a document that does not + contain exactly one value for the uniqueKey field (hossman) + +* SOLR-1776: DismaxQParser and ExtendedDismaxQParser now use the schema.xml + "defaultSearchField" as the default value for the "qf" param instead of failing + with an error when "qf" is not specified. (hossman) + +Build +---------------------- + +* SOLR-1522: Automated release signing process. (gsingers) + +Documentation +---------------------- + +* SOLR-1590: Javadoc for XMLWriter#startTag + (Chris A. Mattmann via hossman) + +* SOLR-1792: Documented peculiar behavior of TestHarness.LocalRequestFactory + (hossman) + +================== Release 1.4.0 ================== +Release Date: See http://lucene.apache.org/solr for the official release date. + +Upgrading from Solr 1.3 +----------------------- + +There is a new default faceting algorithm for multiVaued fields that should be +faster for most cases. One can revert to the previous algorithm (which has +also been improved somewhat) by adding facet.method=enum to the request. + +Searching and sorting is now done on a per-segment basis, meaning that +the FieldCache entries used for sorting and for function queries are +created and used per-segment and can be reused for segments that don't +change between index updates. While generally beneficial, this can lead +to increased memory usage over 1.3 in certain scenarios: + 1) A single valued field that was used for both sorting and faceting +in 1.3 would have used the same top level FieldCache entry. In 1.4, +sorting will use entries at the segment level while faceting will still +use entries at the top reader level, leading to increased memory usage. + 2) Certain function queries such as ord() and rord() require a top level +FieldCache instance and can thus lead to increased memory usage. Consider +replacing ord() and rord() with alternatives, such as function queries +based on ms() for date boosting. + +If you use custom Tokenizer or TokenFilter components in a chain specified in +schema.xml, they must support reusability. If your Tokenizer or TokenFilter +maintains state, it should implement reset(). If your TokenFilteFactory does +not return a subclass of TokenFilter, then it should implement reset() and call +reset() on it's input TokenStream. TokenizerFactory implementations must +now return a Tokenizer rather than a TokenStream. + +New users of Solr 1.4 will have omitTermFreqAndPositions enabled for non-text +indexed fields by default, which avoids indexing term frequency, positions, and +payloads, making the index smaller and faster. If you are upgrading from an +earlier Solr release and want to enable omitTermFreqAndPositions by default, +change the schema version from 1.1 to 1.2 in schema.xml. Remove any existing +index and restart Solr to ensure that omitTermFreqAndPositions completely takes +affect. + +The default QParserPlugin used by the QueryComponent for parsing the "q" param +has been changed, to remove support for the deprecated use of ";" as a separator +between the query string and the sort options when no "sort" param was used. +Users who wish to continue using the semi-colon based method of specifying the +sort options should explicitly set the defType param to "lucenePlusSort" on all +requests. (The simplest way to do this is by specifying it as a default param +for your request handlers in solrconfig.xml, see the example solrconfig.xml for +sample syntax.) + +If spellcheck.extendedResults=true, the response format for suggestions +has changed, see SOLR-1071. + +Use of the "charset" option when configuring the following Analysis +Factories has been deprecated and will cause a warning to be logged. +In future versions of Solr attempting to use this option will cause an +error. See SOLR-1410 for more information. + * GreekLowerCaseFilterFactory + * RussianStemFilterFactory + * RussianLowerCaseFilterFactory + * RussianLetterTokenizerFactory + +Versions of Major Components +---------------------------- +Apache Lucene 2.9.1 (r832363 on 2.9 branch) +Apache Tika 0.4 +Carrot2 3.1.0 + +Lucene Information +---------------- + +Since Solr is built on top of Lucene, many people add customizations to Solr +that are dependent on Lucene. Please see http://lucene.apache.org/java/2_9_0/, +especially http://lucene.apache.org/java/2_9_0/changes/Changes.html for more +information on the version of Lucene used in Solr. + +Detailed Change List +---------------------- + +New Features +---------------------- + 1. SOLR-560: Use SLF4J logging API rather then JDK logging. The packaged .war file is + shipped with a JDK logging implementation, so logging configuration for the .war should + be identical to solr 1.3. However, if you are using the .jar file, you can select + which logging implementation to use by dropping a different binding. + See: http://www.slf4j.org/ (ryan) + + 2. SOLR-617: Allow configurable index deletion policy and provide a default implementation which + allows deletion of commit points on various criteria such as number of commits, age of commit + point and optimized status. + See http://lucene.apache.org/java/2_3_2/api/org/apache/lucene/index/IndexDeletionPolicy.html + (yonik, Noble Paul, Akshay Ukey via shalin) + + 3. SOLR-658: Allow Solr to load index from arbitrary directory in dataDir + (Noble Paul, Akshay Ukey via shalin) + + 4. SOLR-793: Add 'commitWithin' argument to the update add command. This behaves + similar to the global autoCommit maxTime argument except that it is set for + each request. (ryan) + + 5. SOLR-670: Add support for rollbacks in UpdateHandler. This allows user to rollback all changes + since the last commit. (Noble Paul, koji via shalin) + + 6. SOLR-813: Adding DoubleMetaphone Filter and Factory. Similar to the PhoneticFilter, + but this uses DoubleMetaphone specific calls (including alternate encoding) + (Todd Feak via ryan) + + 7. SOLR-680: Add StatsComponent. This gets simple statistics on matched numeric fields, + including: min, max, mean, median, stddev. (koji, ryan) + + 7.1 SOLR-1380: Added support for multi-valued fields (Harish Agarwal via gsingers) + + 8. SOLR-561: Added Replication implemented in Java as a request handler. Supports index replication + as well as configuration replication and exposes detailed statistics and progress information + on the Admin page. Works on all platforms. (Noble Paul, yonik, Akshay Ukey, shalin) + + 9. SOLR-746: Added "omitHeader" request parameter to omit the header from the response. + (Noble Paul via shalin) + +10. SOLR-651: Added TermVectorComponent for serving up term vector information, plus IDF. + See http://wiki.apache.org/solr/TermVectorComponent (gsingers, Vaijanath N. Rao, Noble Paul) + +12. SOLR-795: SpellCheckComponent supports building indices on optimize if configured in solrconfig.xml + (Jason Rennie, shalin) + +13. SOLR-667: A LRU cache implementation based upon ConcurrentHashMap and other techniques to reduce + contention and synchronization overhead, to utilize multiple CPU cores more effectively. + (Fuad Efendi, Noble Paul, yonik via shalin) + +14. SOLR-465: Add configurable DirectoryProvider so that alternate Directory + implementations can be specified via solrconfig.xml. The default + DirectoryProvider will use NIOFSDirectory for better concurrency + on non Windows platforms. (Mark Miller, TJ Laurenzo via yonik) + +15. SOLR-822: Add CharFilter so that characters can be filtered (e.g. character normalization) + before Tokenizer/TokenFilters. (koji) + +16. SOLR-829: Allow slaves to request compressed files from master during replication + (Simon Collins, Noble Paul, Akshay Ukey via shalin) + +17. SOLR-877: Added TermsComponent for accessing Lucene's TermEnum capabilities. + Useful for auto suggest and possibly distributed search. Not distributed search compliant. (gsingers) + - Added mincount and maxcount options (Khee Chin via gsingers) + +18. SOLR-538: Add maxChars attribute for copyField function so that the length limit for destination + can be specified. + (Georgios Stamatis, Lars Kotthoff, Chris Harris via koji) + +19. SOLR-284: Added support for extracting content from binary documents like MS Word and PDF using Apache Tika. See also contrib/extraction/CHANGES.txt (Eric Pugh, Chris Harris, yonik, gsingers) + +20. SOLR-819: Added factories for Arabic support (gsingers) + +21. SOLR-781: Distributed search ability to sort field.facet values + lexicographically. facet.sort values "true" and "false" are + also deprecated and replaced with "count" and "lex". + (Lars Kotthoff via yonik) + +22. SOLR-821: Add support for replication to copy conf file to slave with a different name. This allows replication + of solrconfig.xml + (Noble Paul, Akshay Ukey via shalin) + +23. SOLR-911: Add support for multi-select faceting by allowing filters to be + tagged and facet commands to exclude certain filters. This patch also + added the ability to change the output key for facets in the response, and + optimized distributed faceting refinement by lowering parsing overhead and + by making requests and responses smaller. + +24. SOLR-876: WordDelimiterFilter now supports a splitOnNumerics + option, as well as a list of protected terms. + (Dan Rosher via hossman) + +25. SOLR-928: SolrDocument and SolrInputDocument now implement the Map + interface. This should make plugging into other standard tools easier. (ryan) + +26. SOLR-847: Enhance the snappull command in ReplicationHandler to accept masterUrl. + (Noble Paul, Preetam Rao via shalin) + +27. SOLR-540: Add support for globbing in field names to highlight. + For example, hl.fl=*_text will highlight all fieldnames ending with + _text. (Lars Kotthoff via yonik) + +28. SOLR-906: Adding a StreamingUpdateSolrServer that writes update commands to + an open HTTP connection. If you are using solrj for bulk update requests + you should consider switching to this implementaion. However, note that + the error handling is not immediate as it is with the standard SolrServer. + (ryan) + +29. SOLR-865: Adding support for document updates in binary format and corresponding support in Solrj client. + (Noble Paul via shalin) + +30. SOLR-763: Add support for Lucene's PositionFilter (Mck SembWever via shalin) + +31. SOLR-966: Enhance the map() function query to take in an optional default value (Noble Paul, shalin) + +32. SOLR-820: Support replication on startup of master with new index. (Noble Paul, Akshay Ukey via shalin) + +33. SOLR-943: Make it possible to specify dataDir in solr.xml and accept the dataDir as a request parameter for + the CoreAdmin create command. (Noble Paul via shalin) + +34. SOLR-850: Addition of timeouts for distributed searching. Configurable through 'shard-socket-timeout' and + 'shard-connection-timeout' parameters in SearchHandler. (Patrick O'Leary via shalin) + +35. SOLR-799: Add support for hash based exact/near duplicate document + handling. (Mark Miller, yonik) + +36. SOLR-1026: Add protected words support to SnowballPorterFilterFactory (ehatcher) + +37. SOLR-739: Add support for OmitTf (Mark Miller via yonik) + +38. SOLR-1046: Nested query support for the function query parser + and lucene query parser (the latter existed as an undocumented + feature in 1.3) (yonik) + +39. SOLR-940: Add support for Lucene's Trie Range Queries by providing new FieldTypes in + schema for int, float, long, double and date. Single-valued Trie based + fields with a precisionStep will index multiple precisions and enable + faster range queries. (Uwe Schindler, yonik, shalin) + +40. SOLR-1038: Enhance CommonsHttpSolrServer to add docs in batch using an iterator API (Noble Paul via shalin) + +41. SOLR-844: A SolrServer implementation to front-end multiple solr servers and provides load balancing and failover + support (Noble Paul, Mark Miller, hossman via shalin) + +42. SOLR-939: ValueSourceRangeFilter/Query - filter based on values in a FieldCache entry or on any arbitrary function of field values. (yonik) + +43. SOLR-1095: Fixed performance problem in the StopFilterFactory and simplified code. Added tests as well. (gsingers) + +44. SOLR-1096: Introduced httpConnTimeout and httpReadTimeout in replication slave configuration to avoid stalled + replication. (Jeff Newburn, Noble Paul, shalin) + +45. SOLR-1115: on and yes work as expected in solrconfig.xml. (koji) + +46. SOLR-1099: A FieldAnalysisRequestHandler which provides the analysis functionality of the web admin page as + a service. The AnalysisRequestHandler is renamed to DocumentAnalysisRequestHandler which is enhanced with + query analysis and showMatch support. AnalysisRequestHandler is now deprecated. Support for both + FieldAnalysisRequestHandler and DocumentAnalysisRequestHandler is also provided in the Solrj client. + (Uri Boness, shalin) + +47. SOLR-1106: Made CoreAdminHandler Actions pluggable so that additional actions may be plugged in or the existing + ones can be overridden if needed. (Kay Kay, Noble Paul, shalin) + +48. SOLR-1124: Add a top() function query that causes it's argument to + have it's values derived from the top level IndexReader, even when + invoked from a sub-reader. top() is implicitly used for the + ord() and rord() functions. (yonik) + +49. SOLR-1110: Support sorting on trie fields with Distributed Search. (Mark Miller, Uwe Schindler via shalin) + +50. SOLR-1121: CoreAdminhandler should not need a core . This makes it possible to start a Solr server w/o a core .(noble) + +51. SOLR-769: Added support for clustering in contrib/clustering. See http://wiki.apache.org/solr/ClusteringComponent for more info. (gsingers, Stanislaw Osinski) + +52. SOLR-1175: disable/enable replication on master side. added two commands 'enableReplication' and 'disableReplication' (noble) + +53. SOLR-1179: DocSets can now be used as Lucene Filters via + DocSet.getTopFilter() (yonik) + +54. SOLR-1116: Add a Binary FieldType (noble) + +55. SOLR-1051: Support the merge of multiple indexes as a CoreAdmin and an update command (Ning Li via shalin) + +56. SOLR-1152: Snapshoot on ReplicationHandler should accept location as a request parameter (shalin) + +57. SOLR-1204: Enhance SpellingQueryConverter to handle UTF-8 instead of ASCII only. + Use the NMTOKEN syntax for matching field names. + (Michael Ludwig, shalin) + +58. SOLR-1189: Support providing username and password for basic HTTP authentication in Java replication + (Matthew Gregg, shalin) + +59. SOLR-243: Add configurable IndexReaderFactory so that alternate IndexReader implementations + can be specified via solrconfig.xml. Note that using a custom IndexReader may be incompatible + with ReplicationHandler (see comments in SOLR-1366). This should be treated as an experimental feature. + (Andrzej Bialecki, hossman, Mark Miller, John Wang) + +60. SOLR-1214: differentiate between solr home and instanceDir .deprecates the method SolrResourceLoader#locateInstanceDir() + and it is renamed to locateSolrHome (noble) + +61. SOLR-1216 : disambiguate the replication command names. 'snappull' becomes 'fetchindex' 'abortsnappull' becomes 'abortfetch' (noble) + +62. SOLR-1145: Add capability to specify an infoStream log file for the underlying Lucene IndexWriter in solrconfig.xml. + This is an advanced debug log file that can be used to aid developers in fixing IndexWriter bugs. See the commented + out example in the example solrconfig.xml under the indexDefaults section. + (Chris Harris, Mark Miller) + +63. SOLR-1256: Show the output of CharFilters in analysis.jsp. (koji) + +64. SOLR-1266: Added stemEnglishPossessive option (default=true) to WordDelimiterFilter + that allows disabling of english possessive stemming (removal of trailing 's from tokens) + (Robert Muir via yonik) + +65. SOLR-1237: firstSearcher and newSearcher can now be identified via the CommonParams.EVENT (evt) parameter + in a request. This allows a RequestHandler or SearchComponent to know when a newSearcher or firstSearcher + event happened. QuerySenderListender is the only implementation in Solr that implements this, but outside + implementations may wish to. See the AbstractSolrEventListener for a helper method. (gsingers) + +66. SOLR-1343: Added HTMLStripCharFilter and marked HTMLStripReader, HTMLStripWhitespaceTokenizerFactory and + HTMLStripStandardTokenizerFactory deprecated. To strip HTML tags, HTMLStripCharFilter can be used + with an arbitrary Tokenizer. (koji) + +67. SOLR-1275: Add expungeDeletes to DirectUpdateHandler2 (noble) + +68. SOLR-1372: Enhance FieldAnalysisRequestHandler to accept field value from content stream (ehatcher) + +69. SOLR-1370: Show the output of CharFilters in FieldAnalysisRequestHandler (koji) + +70. SOLR-1373: Add Filter query to admin/form.jsp + (Jason Rutherglen via hossman) + +71. SOLR-1368: Add ms() function query for getting milliseconds from dates and for + high precision date subtraction, add sub() for subtracting other arguments. + (yonik) + +72. SOLR-1156: Sort TermsComponent results by frequency (Matt Weber via yonik) + +73. SOLR-1335 : load core properties from a properties file (noble) + +74. SOLR-1385 : Add an 'enable' attribute to all plugins (noble) + +75. SOLR-1414 : implicit core properties are not set for single core (noble) + +76. SOLR-659 : Adds shards.start and shards.rows to distributed search + to allow more efficient bulk queries (those that retrieve many or all + documents). (Brian Whitman via yonik) + +77. SOLR-1321: Add better support for efficient wildcard handling (Andrzej Bialecki, Robert Muir, gsingers) + +78. SOLR-1326 : New interface PluginInfoInitialized for all types of plugin (noble) + +79. SOLR-1447 : Simple property injection. & syntaxes are now deprecated + (Jason Rutherglen, noble) + +80. SOLR-908 : CommonGramsFilterFactory/CommonGramsQueryFilterFactory for + speeding up phrase queries containing common words by indexing + n-grams and using them at query time. + (Tom Burton-West, Jason Rutherglen via yonik) + +81. SOLR-1292: Add FieldCache introspection to stats.jsp and JMX Monitoring via + a new SolrFieldCacheMBean. (hossman) + +82. SOLR-1167: Solr Config now supports XInclude for XML engines that can support it. (Bryan Talbot via gsingers) + +83. SOLR-1478: Enable sort by Lucene docid. (ehatcher) + +84. SOLR-1449: Add elements to solrconfig.xml to specifying additional + classpath directories and regular expressions. (hossman via yonik) + + +Optimizations +---------------------- + 1. SOLR-374: Use IndexReader.reopen to save resources by re-using parts of the + index that haven't changed. (Mark Miller via yonik) + + 2. SOLR-808: Write string keys in Maps as extern strings in the javabin format. (Noble Paul via shalin) + + 3. SOLR-475: New faceting method with better performance and smaller memory usage for + multi-valued fields with many unique values but relatively few values per document. + Controllable via the facet.method parameter - "fc" is the new default method and "enum" + is the original method. (yonik) + + 4. SOLR-970: Use an ArrayList in SolrPluginUtils.parseQueryStrings + since we know exactly how long the List will be in advance. + (Kay Kay via hossman) + + 5. SOLR-1002: Change SolrIndexSearcher to use insertWithOverflow + with reusable priority queue entries to reduce the amount of + generated garbage during searching. (Mark Miller via yonik) + + 6. SOLR-971: Replace StringBuffer with StringBuilder for instances that do not require thread-safety. + (Kay Kay via shalin) + + 7. SOLR-921: SolrResourceLoader must cache short class name vs fully qualified classname + (Noble Paul, hossman via shalin) + + 8. SOLR-973: CommonsHttpSolrServer writes the xml directly to the server. + (Noble Paul via shalin) + + 9. SOLR-1108: Remove un-needed synchronization in SolrCore constructor. + (Noble Paul via shalin) + +10. SOLR-1166: Speed up docset/filter generation by avoiding top-level + score() call and iterating over leaf readers with TermDocs. (yonik) + +11. SOLR-1169: SortedIntDocSet - a new small set implementation + that saves memory over HashDocSet, is faster to construct, + is ordered for easier implementation of skipTo, and is faster + in the general case. (yonik) + +12. SOLR-1165: Use Lucene Filters and pass them down to the Lucene + search methods to filter earlier and improve performance. (yonik) + +13. SOLR-1111: Use per-segment sorting to share fieldcache elements + across unchanged segments. This saves memory and reduces + commit times for incremental updates to the index. (yonik) + +14. SOLR-1188: Minor efficiency improvement in TermVectorComponent related to ignoring positions or offsets (gsingers) + +15. SOLR-1150: Load Documents for Highlighting one at a time rather than + all at once to avoid OOM with many large Documents. (Siddharth Gargate via Mark Miller) + +16. SOLR-1353: Implement and use reusable token streams for analysis. (Robert Muir, yonik) + +17. SOLR-1296: Enables setting IndexReader's termInfosIndexDivisor via a new attribute to StandardIndexReaderFactory. Enables + setting termIndexInterval to IndexWriter via SolrIndexConfig. (Jason Rutherglen, hossman, gsingers) + +Bug Fixes +---------------------- + 1. SOLR-774: Fixed logging level display (Sean Timm via Otis Gospodnetic) + + 2. SOLR-771: CoreAdminHandler STATUS should display 'normalized' paths (koji, hossman, shalin) + + 3. SOLR-532: WordDelimiterFilter now respects payloads and other attributes of the original Token by + using Token.clone() (Tricia Williams, gsingers) + + 4. SOLR-805: DisMax queries are not being cached in QueryResultCache (Todd Feak via koji) + + 5. SOLR-751: WordDelimiterFilter didn't adjust the start offset of single + tokens that started with delimiters, leading to incorrect highlighting. + (Stefan Oestreicher via yonik) + + 7. SOLR-843: SynonymFilterFactory cannot handle multiple synonym files correctly (koji) + + 8. SOLR-840: BinaryResponseWriter does not handle incompatible data in fields (Noble Paul via shalin) + + 9. SOLR-803: CoreAdminRequest.createCore fails because name parameter isn't set (Sean Colombo via ryan) + +10. SOLR-869: Fix file descriptor leak in SolrResourceLoader#getLines (Mark Miller, shalin) + +11. SOLR-872: Better error message for incorrect copyField destination (Noble Paul via shalin) + +12. SOLR-879: Enable position increments in the query parser and fix the + example schema to enable position increments for the stop filter in + both the index and query analyzers to fix the bug with phrase queries + with stopwords. (yonik) + +13. SOLR-836: Add missing "a" to the example stopwords.txt (yonik) + +14. SOLR-892: Fix serialization of booleans for PHPSerializedResponseWriter + (yonik) + +15. SOLR-898: Fix null pointer exception for the JSON response writer + based formats when nl.json=arrarr with null keys. (yonik) + +16. SOLR-901: FastOutputStream ignores write(byte[]) call. (Noble Paul via shalin) + +17. SOLR-807: BinaryResponseWriter writes fieldType.toExternal if it is not a supported type, + otherwise it writes fieldType.toObject. This fixes the bug with encoding/decoding UUIDField. + (koji, Noble Paul, shalin) + +18. SOLR-863: SolrCore.initIndex should close the directory it gets for clearing the lock and + use the DirectoryFactory. (Mark Miller via shalin) + +19. SOLR-802: Fix a potential null pointer error in the distributed FacetComponent + (David Bowen via ryan) + +20. SOLR-346: Use perl regex to improve accuracy of finding latest snapshot in snapinstaller (billa) + +21. SOLR-830: Use perl regex to improve accuracy of finding latest snapshot in snappuller (billa) + +22. SOLR-897: Fixed Argument list too long error when there are lots of snapshots/backups (Dan Rosher via billa) + +23. SOLR-925: Fixed highlighting on fields with multiValued="true" and termOffsets="true" (koji) + +24. SOLR-902: FastInputStream#read(byte b[], int off, int len) gives incorrect results when amount left to read is less + than buffer size (Noble Paul via shalin) + +25. SOLR-978: Old files are not removed from slaves after replication (Jaco, Noble Paul, shalin) + +26. SOLR-883: Implicit properties are not set for Cores created through CoreAdmin (Noble Paul via shalin) + +27. SOLR-991: Better error message when parsing solrconfig.xml fails due to malformed XML. Error message notes the name + of the file being parsed. (Michael Henson via shalin) + +28. SOLR-1008: Fix stats.jsp XML encoding for item entries with ampersands in their names. (ehatcher) + +29. SOLR-976: deleteByQuery is ignored when deleteById is placed prior to deleteByQuery in a . + Now both delete by id and delete by query can be specified at the same time as follows. (koji) + + 0599106000 + office:Bridgewateroffice:Osaka + + +30. SOLR-1016: HTTP 503 error changes 500 in SolrCore (koji) + +31. SOLR-1015: Incomplete information in replication admin page and http command response when server + is both master and slave i.e. when server is a repeater (Akshay Ukey via shalin) + +32. SOLR-1018: Slave is unable to replicate when server acts as repeater (as both master and slave) + (Akshay Ukey, Noble Paul via shalin) + +33. SOLR-1031: Fix XSS vulnerability in schema.jsp (Paul Lovvik via ehatcher) + +34. SOLR-1064: registry.jsp incorrectly displaying info for last core initialized + regardless of what the current core is. (hossman) + +35. SOLR-1072: absolute paths used in sharedLib attribute were + incorrectly treated as relative paths. (hossman) + +36. SOLR-1104: Fix some rounding errors in LukeRequestHandler's histogram (hossman) + +37. SOLR-1125: Use query analyzer rather than index analyzer for queryFieldType in QueryElevationComponent + (koji) + +38. SOLR-1126: Replicated files have incorrect timestamp (Jian Han Guo, Jeff Newburn, Noble Paul via shalin) + +39. SOLR-1094: Incorrect value of correctlySpelled attribute in some cases (David Smiley, Mark Miller via shalin) + +40. SOLR-965: Better error message when is not configured. + (Mark Miller via hossman) + +41. SOLR-1135: Java replication creates Snapshot in the directory where Solr was launched (Jianhan Guo via shalin) + +42. SOLR-1138: Query Elevation Component now gracefully handles missing queries. (gsingers) + +43. SOLR-929: LukeRequestHandler should return "dynamicBase" only if the field is dynamic. + (Peter Wolanin, koji) + +44. SOLR-1141: NullPointerException during snapshoot command in java based replication (Jian Han Guo, shalin) + +45. SOLR-1078: Fixes to WordDelimiterFilter to avoid splitting or dropping + international non-letter characters such as non spacing marks. (yonik) + +46. SOLR-825, SOLR-1221: Enables highlighting for range/wildcard/fuzzy/prefix queries if using hl.usePhraseHighlighter=true + and hl.highlightMultiTerm=true. Also make both options default to true. (Mark Miller, yonik) + +47. SOLR-1174: Fix Logging admin form submit url for multicore. (Jacob Singh via shalin) + +48. SOLR-1182: Fix bug in OrdFieldSource#equals which could cause a bug with OrdFieldSource caching + on OrdFieldSource#hashcode collisions. (Mark Miller) + +49. SOLR-1207: equals method should compare this and other of DocList in DocSetBase (koji) + +50. SOLR-1242: Human readable JVM info from system handler does integer cutoff rounding, even when dealing + with GB. Fixed to round to one decimal place. (Jay Hill, Mark Miller) + +51. SOLR-1243: Admin RequestHandlers should not be cached over HTTP. (Mark Miller) + +52. SOLR-1260: Fix implementations of set operations for DocList subclasses + and fix a bug in HashDocSet construction when offset != 0. These bugs + never manifested in normal Solr use and only potentially affect + custom code. (yonik) + +53. SOLR-1171: Fix LukeRequestHandler so it doesn't rely on SolrQueryParser + and report incorrect stats when field names contain characters + SolrQueryParser considers special. + (hossman) + +54. SOLR-1317: Fix CapitalizationFilterFactory to work when keep parameter is not specified. + (ehatcher) + +55. SOLR-1342: CapitalizationFilterFactory uses incorrect term length calculations. + (Robert Muir via Mark Miller) + +56. SOLR-1359: DoubleMetaphoneFilter didn't index original tokens if there was no + alternative, and could incorrectly skip or reorder tokens. (yonik) + +57. SOLR-1360: Prevent PhoneticFilter from producing duplicate tokens. (yonik) + +58. SOLR-1371: LukeRequestHandler/schema.jsp errored if schema had no + uniqueKey field. The new test for this also (hopefully) adds some + future proofing against similar bugs in the future. As a side + effect QueryElevationComponentTest was refactored, and a bug in + that test was found. (hossman) + +59. SOLR-914: General finalize() improvements. No finalizer delegates + to the respective close/destroy method w/o first checking if it's + already been closed/destroyed; if it hasn't a, SEVERE error is + logged first. (noble, hossman) + +60. SOLR-1362: WordDelimiterFilter had inconsistent behavior when setting + the position increment of tokens following a token consisting of all + delimiters, and could additionally lose big position increments. + (Robert Muir, yonik) + +61. SOLR-1091: Jetty's use of CESU-8 for code points outside the BMP + resulted in invalid output from the serialized PHP writer. (yonik) + +62. SOLR-1103: LukeRequestHandler (and schema.jsp) have been fixed to + include the "1" (ie: 2**0) bucket in the term histogram data. + (hossman) + +63. SOLR-1398: Add offset corrections in PatternTokenizerFactory. + (Anders Melchiorsen, koji) + +64. SOLR-1400: Properly handle zero-length tokens in TrimFilter. This + was not a bug in any released version. (Peter Wolanin, gsingers) + +65. SOLR-1071: spellcheck.extendedResults returns an invalid JSON response + when count > 1. To fix, the extendedResults format was changed. + (Uri Boness, yonik) + +66. SOLR-1381: Fixed improper handling of fields that have only term positions and not term offsets during Highlighting (Thorsten Fischer, gsingers) + +67. SOLR-1427: Fixed registry.jsp issue with MBeans (gsingers) + +68. SOLR-1468: SolrJ's XML response parsing threw an exception for null + names, such as those produced when facet.missing=true (yonik) + +69. SOLR-1471: Fixed issue with calculating missing values for facets in single valued cases in Stats Component. + This is not correctly calculated for the multivalued case. (James Miller, gsingers) + +70. SOLR-1481: Fixed omitHeader parameter for PHP ResponseWriter. (Jun Ohtani via billa) + +71. SOLR-1448: Add weblogic.xml to solr webapp to enable correct operation in + WebLogic. (Ilan Rabinovitch via yonik) + +72. SOLR-1504: empty char mapping can cause ArrayIndexOutOfBoundsException in analysis.jsp and co. + (koji) + +73. SOLR-1394: HTMLStripCharFilter split tokens that contained entities and + often calculated offsets incorrectly for entities. + (Anders Melchiorsen via yonik) + +74. SOLR-1517: Admin pages could stall waiting for localhost name resolution + if reverse DNS wasn't configured; this was changed so the DNS resolution + is attempted only once the first time an admin page is loaded. + (hossman) + +75. SOLR-1529: More than 8 deleteByQuery commands in a single request + caused an error to be returned, although the deletes were + still executed. (asmodean via yonik) + +Other Changes +---------------------- + 1. Upgraded to Lucene 2.4.0 (yonik) + + 2. SOLR-805: Upgraded to Lucene 2.9-dev (r707499) (koji) + + 3. DumpRequestHandler (/debug/dump): changed 'fieldName' to 'sourceInfo'. (ehatcher) + + 4. SOLR-852: Refactored common code in CSVRequestHandler and XMLUpdateRequestHandler (gsingers, ehatcher) + + 5. SOLR-871: Removed dependency on stax-utils.jar. If you using solr.jar and running + java 6, you can also remove woodstox and geronimo. (ryan) + + 6. SOLR-465: Upgraded to Lucene 2.9-dev (r719351) (shalin) + + 7. SOLR-889: Upgraded to commons-io-1.4.jar and commons-fileupload-1.2.1.jar (ryan) + + 8. SOLR-875: Upgraded to Lucene 2.9-dev (r723985) and consolidated the BitSet implementations (Michael Busch, gsingers) + + 9. SOLR-819: Upgraded to Lucene 2.9-dev (r724059) to get access to Arabic public constructors (gsingers) + and +10. SOLR-900: Moved solrj into /src/solrj. The contents of solr-common.jar is now included + in the solr-solrj.jar. (ryan) + +11. SOLR-924: Code cleanup: make all existing finalize() methods call + super.finalize() in a finally block. All current instances extend + Object, so this doesn't fix any bugs, but helps protect against + future changes. (Kay Kay via hossman) + +12. SOLR-885: NamedListCodec is renamed to JavaBinCodec and returns Object instead of NamedList. + (Noble Paul, yonik via shalin) + +13. SOLR-84: Use new Solr logo in admin (Michiel via koji) + +14. SOLR-981: groupId for Woodstox dependency in maven solrj changed to org.codehaus.woodstox (Tim Taranov via shalin) + +15. Upgraded to Lucene 2.9-dev r738218 (yonik) + +16. SOLR-959: Refactored TestReplicationHandler to remove hardcoded port numbers (hossman, Akshay Ukey via shalin) + +17. Upgraded to Lucene 2.9-dev r742220 (yonik) + +18. SOLR-1022: Better "ignored" field in example schema.xml (Peter Wolanin via hossman) + +19. SOLR-967: New type-safe constructor for NamedList (Kay Kay via hossman) + +20. SOLR-1036: Change default QParser from "lucenePlusSort" to "lucene" to + reduce confusion of semicolon splitting behavior when no sort param is + specified (hossman) + +21. Upgraded to Lucene 2.9-dev r752164 (shalin) + +22. SOLR-1068: Use fsync on replicated index and configuration files (yonik, Noble Paul, shalin) + +23. SOLR-952: Cleanup duplicated code in deprecated HighlightingUtils (hossman) + +24. Upgraded to Lucene 2.9-dev r764281 (shalin) + +25. SOLR-1079: Rename omitTf to omitTermFreqAndPositions (shalin) + +26. SOLR-804: Added Lucene's misc contrib JAR (rev 764281). (gsingers) + +27. Upgraded to Lucene 2.9-dev r768228 (shalin) + +28. Upgraded to Lucene 2.9-dev r768336 (shalin) + +29. SOLR-997: Wait for a longer time for slave to complete replication in TestReplicationHandler + (Mark Miller via shalin) + +30. SOLR-748: FacetComponent helper classes are made public as an experimental API. + (Wojtek Piaseczny via shalin) + +31. Upgraded to Lucene 2.9-dev 773862 (Mark Miller) + +32. Upgraded to Lucene 2.9-dev r776177 (shalin) + +33. SOLR-1149: Made QParserPlugin and related classes extendible as an experimental API. + (Kaktu Chakarabati via shalin) + +34. Upgraded to Lucene 2.9-dev r779312 (yonik) + +35. SOLR-786: Refactor DisMaxQParser to allow overriding certain features of DisMaxQParser + (Wojciech Biela via shalin) + +36. SOLR-458: Add equals and hashCode methods to NamedList (Stefan Rinner, shalin) + +37. SOLR-1184: Add option in solrconfig to open a new IndexReader rather than + using reopen. Done mainly as a fail-safe in the case that a user runs into + a reopen bug/issue. (Mark Miller) + +38. SOLR-1215 use double quotes to enclose attributes in solr.xml (noble) + +39. SOLR-1151: add dynamic copy field and maxChars example to example schema.xml. + (Peter Wolanin, Mark Miller) + +40. SOLR-1233: remove /select?qt=/whatever restriction on /-prefixed request handlers. + (ehatcher) + +41. SOLR-1257: logging.jsp has been removed and now passes through to the + hierarchical log level tool added in Solr 1.3. Users still + hitting "/admin/logging.jsp" should switch to "/admin/logging". + (hossman) + +42. Upgraded to Lucene 2.9-dev r794238. Other changes include: + LUCENE-1614 - Use Lucene's DocIdSetIterator.NO_MORE_DOCS as the sentinel value. + LUCENE-1630 - Add acceptsDocsOutOfOrder method to Collector implementations. + LUCENE-1673, LUCENE-1701 - Trie has moved to Lucene core and renamed to NumericRangeQuery. + LUCENE-1662, LUCENE-1687 - Replace usage of ExtendedFieldCache by FieldCache. + (shalin) + +42. SOLR-1241: Solr's CharFilter has been moved to Lucene. Remove CharFilter and related classes + from Solr and use Lucene's corresponding code (koji via shalin) + +43. SOLR-1261: Lucene trunk renamed RangeQuery & Co to TermRangeQuery (Uwe Schindler via shalin) + +44. Upgraded to Lucene 2.9-dev r801856 (Mark Miller) + +45. SOLR1276: Added StatsComponentTest (Rafa�ł Ku�ć, gsingers) + +46. SOLR-1377: The TokenizerFactory API has changed to explicitly return a Tokenizer + rather then a TokenStream (that may be or may not be a Tokenizer). This change + is required to take advantage of the Token reuse improvements in lucene 2.9. (ryan) + +47. SOLR-1410: Log a warning if the deprecated charset option is used + on GreekLowerCaseFilterFactory, RussianStemFilterFactory, + RussianLowerCaseFilterFactory or RussianLetterTokenizerFactory. + (Robert Muir via hossman) + +48. SOLR-1423: Due to LUCENE-1906, Solr's tokenizer should use Tokenizer.correctOffset() instead of CharStream.correctOffset(). + (Uwe Schindler via koji) + +49. SOLR-1319, SOLR-1345: Upgrade Solr Highlighter classes to new Lucene Highlighter API. This upgrade has + resulted in a back compat break in the DefaultSolrHighlighter class - getQueryScorer is no longer + protected. If you happened to be overriding that method in custom code, overide getHighlighter instead. + Also, HighlightingUtils#getQueryScorer has been removed as it was deprecated and backcompat has been + broken with it anyway. (Mark Miller) + +50. SOLR-1357 SolrInputDocument cannot process dynamic fields (Lars Grote via noble) + +Build +---------------------- + 1. SOLR-776: Added in ability to sign artifacts via Ant for releases (gsingers) + + 2. SOLR-854: Added run-example target (Mark Miller via ehatcher) + + 3. SOLR-1054:Fix dist-src target for DataImportHandler (Ryuuichi Kumai via shalin) + + 4. SOLR-1219: Added proxy.setup target (koji) + + 5. SOLR-1386: In build.xml, use longfile="gnu" in tar task to avoid warnings about long file names + (Mark Miller via shalin) + + 6. SOLR-1441: Make it possible to run all tests in a package (shalin) + + +Documentation +---------------------- + 1. SOLR-789: The javadoc of RandomSortField is not readable (Nicolas Lalev�Ã�e via koji) + + 2. SOLR-962: Note about null handling in ModifiableSolrParams.add javadoc + (Kay Kay via hossman) + + 3. SOLR-1409: Added Solr Powered By Logos + +================== Release 1.3.0 20080915 ================== + +Upgrading from Solr 1.2 +----------------------- +IMPORTANT UPGRADE NOTE: In a master/slave configuration, all searchers/slaves +should be upgraded before the master! If the master were to be updated +first, the older searchers would not be able to read the new index format. + +The Porter snowball based stemmers in Lucene were updated (LUCENE-1142), +and are not guaranteed to be backward compatible at the index level +(the stem of certain words may have changed). Re-indexing is recommended. + +Older Apache Solr installations can be upgraded by replacing +the relevant war file with the new version. No changes to configuration +files should be needed. + +This version of Solr contains a new version of Lucene implementing +an updated index format. This version of Solr/Lucene can still read +and update indexes in the older formats, and will convert them to the new +format on the first index change. Be sure to backup your index before +upgrading in case you need to downgrade. + +Solr now recognizes HTTP Request headers related to HTTP Caching (see +RFC 2616 sec13) and will by default respond with "304 Not Modified" +when appropriate. This should only affect users who access Solr via +an HTTP Cache, or via a Web-browser that has an internal cache, but if +you wish to suppress this behavior an '' +option can be added to your solrconfig.xml. See the wiki (or the +example solrconfig.xml) for more details... + http://wiki.apache.org/solr/SolrConfigXml#HTTPCaching + +In Solr 1.2, DateField did not enforce the canonical representation of +the ISO 8601 format when parsing incoming data, and did not generation +the canonical format when generating dates from "Date Math" strings +(particularly as it pertains to milliseconds ending in trailing zeros) +-- As a result equivalent dates could not always be compared properly. +This problem is corrected in Solr 1.3, but DateField users that might +have been affected by indexing inconsistent formats of equivilent +dates (ie: 1995-12-31T23:59:59Z vs 1995-12-31T23:59:59.000Z) may want +to consider reindexing to correct these inconsistencies. Users who +depend on some of the the "broken" behavior of DateField in Solr 1.2 +(specificly: accepting any input that ends in a 'Z') should consider +using the LegacyDateField class as a possible alternative. Users that +desire 100% backwards compatibility should consider using the Solr 1.2 +version of DateField. + +Due to some changes in the lifecycle of TokenFilterFactories, users of +Solr 1.2 who have written Java code which constructs new instances of +StopFilterFactory, SynonymFilterFactory, or EnglishProterFilterFactory +will need to modify their code by adding a line like the following +prior to using the factory object... + factory.inform(SolrCore.getSolrCore().getSolrConfig().getResourceLoader()); +These lifecycle changes do not affect people who use Solr "out of the +box" or who have developed their own TokenFilterFactory plugins. More +info can be found in SOLR-594. + +The python client that used to ship with Solr is no longer included in +the distribution (see client/python/README.txt). + +Detailed Change List +-------------------- + +New Features + 1. SOLR-69: Adding MoreLikeThisHandler to search for similar documents using + lucene contrib/queries MoreLikeThis. MoreLikeThis is also available from + the StandardRequestHandler using ?mlt=true. (bdelacretaz, ryan) + + 2. SOLR-253: Adding KeepWordFilter and KeepWordFilterFactory. A TokenFilter + that keeps tokens with text in the registered keeplist. This behaves like + the inverse of StopFilter. (ryan) + + 3. SOLR-257: WordDelimiterFilter has a new parameter splitOnCaseChange, + which can be set to 0 to disable splitting "PowerShot" => "Power" "Shot". + (klaas) + + 4. SOLR-193: Adding SolrDocument and SolrInputDocument to represent documents + outside of the lucene Document infrastructure. This class will be used + by clients and for processing documents. (ryan) + + 5. SOLR-244: Added ModifiableSolrParams - a SolrParams implementation that + help you change values after initialization. (ryan) + + 6. SOLR-20: Added a java client interface with two implementations. One + implementation uses commons httpclient to connect to solr via HTTP. The + other connects to solr directly. Check client/java/solrj. This addition + also includes tests that start jetty and test a connection using the full + HTTP request cycle. (Darren Erik Vengroff, Will Johnson, ryan) + + 7. SOLR-133: Added StaxUpdateRequestHandler that uses StAX for XML parsing. + This implementation has much better error checking and lets you configure + a custom UpdateRequestProcessor that can selectively process update + requests depending on the request attributes. This class will likely + replace XmlUpdateRequestHandler. (Thorsten Scherler, ryan) + + 8. SOLR-264: Added RandomSortField, a utility field with a random sort order. + The seed is based on a hash of the field name, so a dynamic field + of this type is useful for generating different random sequences. + This field type should only be used for sorting or as a value source + in a FunctionQuery (ryan, hossman, yonik) + + 9. SOLR-266: Adding show=schema to LukeRequestHandler to show the parsed + schema fields and field types. (ryan) + +10. SOLR-133: The UpdateRequestHandler now accepts multiple delete options + within a single request. For example, sending: + 12 will delete both 1 and 2. (ryan) + +11. SOLR-269: Added UpdateRequestProcessor plugin framework. This provides + a reasonable place to process documents after they are parsed and + before they are committed to the index. This is a good place for custom + document manipulation or document based authorization. (yonik, ryan) + +12. SOLR-260: Converting to a standard PluginLoader framework. This reworks + RequestHandlers, FieldTypes, and QueryResponseWriters to share the same + base code for loading and initializing plugins. This adds a new + configuration option to define the default RequestHandler and + QueryResponseWriter in XML using default="true". (ryan) + +13. SOLR-225: Enable pluggable highlighting classes. Allow configurable + highlighting formatters and Fragmenters. (ryan) + +14. SOLR-273/376/452/516: Added hl.maxAnalyzedChars highlighting parameter, defaulting + to 50k, hl.alternateField, which allows the specification of a backup + field to use as summary if no keywords are matched, and hl.mergeContiguous, + which combines fragments if they are adjacent in the source document. + (klaas, Grant Ingersoll, Koji Sekiguchi via klaas) + +15. SOLR-291: Control maximum number of documents to cache for any entry + in the queryResultCache via queryResultMaxDocsCached solrconfig.xml + entry. (Koji Sekiguchi via yonik) + +16. SOLR-240: New configuration setting in and + blocks supports all Lucene builtin LockFactories. + 'single' is recommended setting, but 'simple' is default for total + backwards compatibility. + (Will Johnson via hossman) + +17. SOLR-248: Added CapitalizationFilterFactory that creates tokens with + normalized capitalization. This filter is useful for facet display, + but will not work with a prefix query. (ryan) + SOLR-468: Change to the semantics to keep the original token, not the + token in the Map. Also switched to use Lucene's new reusable token + capabilities. (gsingers) + +18. SOLR-307: Added NGramFilterFactory and EdgeNGramFilterFactory. + (Thomas Peuss via Otis Gospodnetic) + +19. SOLR-305: analysis.jsp can be given a fieldtype instead of a field + name. (hossman) + +20. SOLR-102: Added RegexFragmenter, which splits text for highlighting + based on a given pattern. (klaas) + +21. SOLR-258: Date Faceting added to SimpleFacets. Facet counts + computed for ranges of size facet.date.gap (a DateMath expression) + between facet.date.start and facet.date.end. (hossman) + +22. SOLR-196: A PHP serialized "phps" response writer that returns a + serialized array that can be used with the PHP function unserialize, + and a PHP response writer "php" that may be used by eval. + (Nick Jenkin, Paul Borgermans, Pieter Berkel via yonik) + +23. SOLR-308: A new UUIDField class which accepts UUID string values, + as well as the special value of "NEW" which triggers generation of + a new random UUID. + (Thomas Peuss via hossman) + +24. SOLR-349: New FunctionQuery functions: sum, product, div, pow, log, + sqrt, abs, scale, map. Constants may now be used as a value source. + (yonik) + +25. SOLR-359: Add field type className to Luke response, and enabled access + to the detailed field information from the solrj client API. + (Grant Ingersoll via ehatcher) + +26. SOLR-334: Pluggable query parsers. Allows specification of query + type and arguments as a prefix on a query string. (yonik) + +27. SOLR-351: External Value Source. An external file may be used + to specify the values of a field, currently usable as + a ValueSource in a FunctionQuery. (yonik) + +28. SOLR-395: Many new features for the spell checker implementation, including + an extended response mode with much richer output, multi-word spell checking, + and a bevy of new and renamed options (see the wiki). + (Mike Krimerman, Scott Taber via klaas). + +29. SOLR-408: Added PingRequestHandler and deprecated SolrCore.getPingQueryRequest(). + Ping requests should be configured using standard RequestHandler syntax in + solrconfig.xml rather then using the syntax. + (Karsten Sperling via ryan) + +30. SOLR-281: Added a 'Search Component' interface and converted StandardRequestHandler + and DisMaxRequestHandler to use this framework. + (Sharad Agarwal, Henri Biestro, yonik, ryan) + +31. SOLR-176: Add detailed timing data to query response output. The SearchHandler + interface now returns how long each section takes. (klaas) + +32. SOLR-414: Plugin initialization now supports SolrCore and ResourceLoader "Aware" + plugins. Plugins that implement SolrCoreAware or ResourceLoaderAware are + informed about the SolrCore/ResourceLoader. (Henri Biestro, ryan) + +33. SOLR-350: Support multiple SolrCores running in the same solr instance and allows + runtime runtime management for any running SolrCore. If a solr.xml file exists + in solr.home, this file is used to instanciate multiple cores and enables runtime + core manipulation. For more informaion see: http://wiki.apache.org/solr/CoreAdmin + (Henri Biestro, ryan) + +34. SOLR-447: Added an single request handler that will automatically register all + standard admin request handlers. This replaces the need to register (and maintain) + the set of admin request handlers. Assuming solrconfig.xml includes: + + This will register: Luke/SystemInfo/PluginInfo/ThreadDump/PropertiesRequestHandler. + (ryan) + +35. SOLR-142: Added RawResponseWriter and ShowFileRequestHandler. This returns config + files directly. If AdminHandlers are configured, this will be added automatically. + The jsp files /admin/get-file.jsp and /admin/raw-schema.jsp have been deprecated. + The deprecated will be automatically registered with + a ShowFileRequestHandler instance for backwards compatibility. (ryan) + +36. SOLR-446: TextResponseWriter can write SolrDocuments and SolrDocumentLists the + same way it writes Document and DocList. (yonik, ryan) + +37. SOLR-418: Adding a query elevation component. This is an optional component to + elevate some documents to the top positions (or exclude them) for a given query. + (ryan) + +38. SOLR-478: Added ability to get back unique key information from the LukeRequestHandler. + (gsingers) + +39. SOLR-127: HTTP Caching awareness. Solr now recognizes HTTP Request + headers related to HTTP Caching (see RFC 2616 sec13) and will respond + with "304 Not Modified" when appropriate. New options have been added + to solrconfig.xml to influence this behavior. + (Thomas Peuss via hossman) + +40. SOLR-303: Distributed Search over HTTP. Specification of shards + argument causes Solr to query those shards and merge the results + into a single response. Querying, field faceting (sorted only), + query faceting, highlighting, and debug information are supported + in distributed mode. + (Sharad Agarwal, Patrick O'Leary, Sabyasachi Dalal, Stu Hood, + Jayson Minard, Lars Kotthoff, ryan, yonik) + +41. SOLR-356: Pluggable functions (value sources) that allow + registration of new functions via solrconfig.xml + (Doug Daniels via yonik) + +42. SOLR-494: Added cool admin Ajaxed schema explorer. + (Greg Ludington via ehatcher) + +43. SOLR-497: Added date faceting to the QueryResponse in SolrJ + and QueryResponseTest (Shalin Shekhar Mangar via gsingers) + +44. SOLR-486: Binary response format, faster and smaller + than XML and JSON response formats (use wt=javabin). + BinaryResponseParser for utilizing the binary format via SolrJ + and is now the default. + (Noble Paul, yonik) + +45. SOLR-521: StopFilterFactory support for "enablePositionIncrements" + (Walter Ferrara via hossman) + +46. SOLR-557: Added SolrCore.getSearchComponents() to return an unmodifiable Map. (gsingers) + +47. SOLR-516: Added hl.maxAlternateFieldLength parameter, to set max length for hl.alternateField + (Koji Sekiguchi via klaas) + +48. SOLR-319: Changed SynonymFilterFactory to "tokenize" synonyms file. + To use a tokenizer, specify "tokenizerFactory" attribute in . + For example: + + + (koji) + +49. SOLR-515: Added SimilarityFactory capability to schema.xml, + making config file parameters usable in the construction of + the global Lucene Similarity implementation. + (ehatcher) + +50. SOLR-536: Add a DocumentObjectBinder to solrj that converts Objects to and + from SolrDocuments. (Noble Paul via ryan) + +51. SOLR-595: Add support for Field level boosting in the MoreLikeThis Handler. + (Tom Morton, gsingers) + +52. SOLR-572: Added SpellCheckComponent and org.apache.solr.spelling package to support more spell + checking functionality. Also includes ability to add your own SolrSpellChecker implementation that + plugs in. See http://wiki.apache.org/solr/SpellCheckComponent for more details + (Shalin Shekhar Mangar, Bojan Smid, gsingers) + +53. SOLR-679: Added accessor methods to Lucene based spell checkers (gsingers) + +54. SOLR-423: Added Request Handler close hook notification so that RequestHandlers can be notified + when a core is closing. (gsingers, ryan) + +55. SOLR-603: Added ability to partially optimize. (gsingers) + +56. SOLR-483: Add byte/short sorting support (gsingers) + +57. SOLR-14: Add preserveOriginal flag to WordDelimiterFilter + (Geoffrey Young, Trey Hyde, Ankur Madnani, yonik) + +58. SOLR-502: Add search timeout support. (Sean Timm via yonik) + +59. SOLR-605: Add the ability to register callbacks programatically (ryan, Noble Paul) + +60. SOLR-610: hl.maxAnalyzedChars can be -1 to highlight everything (Lars Kotthoff via klaas) + +61. SOLR-522: Make analysis.jsp show payloads. (Tricia Williams via yonik) + +62. SOLR-611: Expose sort_values returned by QueryComponent in SolrJ's QueryResponse + (Dan Rosher via shalin) + +63. SOLR-256: Support exposing Solr statistics through JMX (Sharad Agrawal, shalin) + +64. SOLR-666: Expose warmup time in statistics for SolrIndexSearcher and LRUCache (shalin) + +65. SOLR-663: Allow multiple files for stopwords, keepwords, protwords and synonyms + (Otis Gospodnetic, shalin) + +66. SOLR-469: Added DataImportHandler as a contrib project which makes indexing data from Databases, + XML files and HTTP data sources into Solr quick and easy. Includes API and implementations for + supporting multiple data sources, processors and transformers for importing data. Supports full + data imports as well as incremental (delta) indexing. See http://wiki.apache.org/solr/DataImportHandler + for more details. (Noble Paul, shalin) + +67. SOLR-622: SpellCheckComponent supports auto-loading indices on startup and optionally, (re)builds + indices on newSearcher event, if configured in solrconfig.xml (shalin) + +68. SOLR-554: Hierarchical JDK log level selector for SOLR Admin replaces logging.jsp + (Sean Timm via shalin) + +69. SOLR-506: Emitting HTTP Cache headers can be enabled or disabled through configuration on a + per-handler basis (shalin) + +70. SOLR-716: Added support for properties in configuration files. Properties can be specified in + solr.xml and can be used in solrconfig.xml and schema.xml (Henri Biestro, hossman, ryan, shalin) + +71. SOLR-1129 : Support binding dynamic fields to beans in SolrJ (Avlesh Singh , noble) + +72. SOLR-920 : Cache and reuse IndexSchema . A new attribute added in solr.xml called 'shareSchema' (noble) + +Changes in runtime behavior + 1. SOLR-559: use Lucene updateDocument, deleteDocuments methods. This + removes the maxBufferedDeletes parameter added by SOLR-310 as Lucene + now manages the deletes. This provides slightly better indexing + performance and makes overwrites atomic, eliminating the possibility of + a crash causing duplicates. (yonik) + + 2. SOLR-689 / SOLR-695: If you have used "MultiCore" functionality in an unreleased + version of 1.3-dev, many classes and configs have been renamed for the official + 1.3 release. Speciffically, solr.xml has replaced multicore.xml, and uses a slightly + different syntax. The solrj classes: MultiCore{Request/Response/Params} have been + renamed: CoreAdmin{Request/Response/Params} (hossman, ryan, Henri Biestro) + + 3. SOLR-647: reference count the SolrCore uses to prevent a premature + close while a core is still in use. (Henri Biestro, Noble Paul, yonik) + + 4. SOLR-737: SolrQueryParser now uses a ConstantScoreQuery for wildcard + queries that prevent an exception from being thrown when the number + of matching terms exceeds the BooleanQuery clause limit. (yonik) + +Optimizations + 1. SOLR-276: improve JSON writer speed. (yonik) + + 2. SOLR-310: bound and reduce memory usage by providing parameter, + which flushes deleted without forcing the user to use for this purpose. + (klaas) + + 3. SOLR-348: short-circuit faceting if less than mincount docs match. (yonik) + + 4. SOLR-354: Optimize removing all documents. Now when a delete by query + of *:* is issued, the current index is removed. (yonik) + + 5. SOLR-377: Speed up response writers. (yonik) + + 6. SOLR-342: Added support into the SolrIndexWriter for using several new features of the new + LuceneIndexWriter, including: setRAMBufferSizeMB(), setMergePolicy(), setMergeScheduler. + Also, added support to specify Lucene's autoCommit functionality (not to be confused with Solr's + similarily named autoCommit functionality) via the config. item. See the test + and example solrconfig.xml section for usage. Performance during indexing should + be significantly increased by moving up to 2.3 due to Lucene's new indexing capabilities. + Furthermore, the setRAMBufferSizeMB makes it more logical to decide on tuning factors related to + indexing. For best performance, leave the mergePolicy and mergeScheduler as the defaults and set + ramBufferSizeMB instead of maxBufferedDocs. The best value for this depends on the types of + documents in use. 32 should be a good starting point, but reports have shown up to 48 MB provides + good results. Note, it is acceptable to set both ramBufferSizeMB and maxBufferedDocs, and Lucene + will flush based on whichever limit is reached first. (gsingers) + + 7. SOLR-330: Converted TokenStreams to use Lucene's new char array based + capabilities. (gsingers) + + 8. SOLR-624: Only take snapshots if there are differences to the index (Richard Trey Hyde via gsingers) + + 9. SOLR-587: Delete by Query performance greatly improved by using + new underlying Lucene IndexWriter implementation. (yonik) + +10. SOLR-730: Use read-only IndexReaders that don't synchronize + isDeleted(). This will speed up function queries and *:* queries + as well as improve their scalability on multi-CPU systems. + (Mark Miller via yonik) + +Bug Fixes + 1. Make TextField respect sortMissingFirst and sortMissingLast fields. + (J.J. Larrea via yonik) + + 2. autoCommit/maxDocs was not working properly when large autoCommit/maxTime + was specified (klaas) + + 3. SOLR-283: autoCommit was not working after delete. (ryan) + + 4. SOLR-286: ContentStreamBase was not using default encoding for getBytes() + (Toru Matsuzawa via ryan) + + 5. SOLR-292: Fix MoreLikeThis facet counting. (Pieter Berkel via ryan) + + 6. SOLR-297: Fix bug in RequiredSolrParams where requiring a field + specific param would fail if a general default value had been supplied. + (hossman) + + 7. SOLR-331: Fix WordDelimiterFilter handling of offsets for synonyms or + other injected tokens that can break highlighting. (yonik) + + 8. SOLR-282: Snapshooter does not work on Solaris and OS X since the cp command + there does not have the -l option. Also updated commit/optimize related + scripts to handle both old and new response format. (bill) + + 9. SOLR-294: Logging of elapsed time broken on Solaris because the date command + there does not support the %s output format. (bill) + +10. SOLR-136: Snappuller - "date -d" and locales don't mix. (J�Ã�rgen Hermann via bill) + +11. SOLR-333: Changed distributiondump.jsp to use Solr HOME instead of CWD to set path. + +12. SOLR-393: Removed duplicate contentType from raw-schema.jsp. (bill) + +13. SOLR-413: Requesting a large numbers of documents to be returned (limit) + can result in an out-of-memory exception, even for a small index. (yonik) + +14. The CSV loader incorrectly threw an exception when given + header=true (the default). (ryan, yonik) + +15. SOLR-449: the python and ruby response writers are now able to correctly + output NaN and Infinity in their respective languages. (klaas) + +16. SOLR-42: HTMLStripReader tokenizers now preserve correct source + offsets for highlighting. (Grant Ingersoll via yonik) + +17. SOLR-481: Handle UnknownHostException in _info.jsp (gsingers) + +18. SOLR-324: Add proper support for Long and Doubles in sorting, etc. (gsingers) + +19. SOLR-496: Cache-Control max-age changed to Long so Expires + calculation won't cause overflow. (Thomas Peuss via hossman) + +20. SOLR-535: Fixed typo (Tokenzied -> Tokenized) in schema.jsp (Thomas Peuss via billa) + +21. SOLR-529: Better error messages from SolrQueryParser when field isn't + specified and there is no defaultSearchField in schema.xml + (Lars Kotthoff via hossman) + +22. SOLR-530: Better error messages/warnings when parsing schema.xml: + field using bogus fieldtype and multiple copyFields to a non-multiValue + field. (Shalin Shekhar Mangar via hossman) + +23. SOLR-528: Better error message when defaultSearchField is bogus or not + indexed. (Lars Kotthoff via hossman) + +24. SOLR-533: Fixed tests so they don't use hardcoded port numbers. + (hossman) + +25. SOLR-400: SolrExceptionTest should now handle using OpenDNS as a DNS provider (gsingers) + +26. SOLR-541: Legacy XML update support (provided by SolrUpdateServlet + when no RequestHandler is mapped to "/update") now logs error correctly. + (hossman) + +27. SOLR-267: Changed logging to report number of hits, and also provide a mechanism to add log + messages to be output by the SolrCore via a NamedList toLog member variable. + (Will Johnson, yseeley, gsingers) + + SOLR-267: Removed adding values to the HTTP headers in SolrDispatchFilter (gsingers) + +28. SOLR-509: Moved firstSearcher event notification to the end of the SolrCore constructor + (Koji Sekiguchi via gsingers) + +29. SOLR-470, SOLR-552, SOLR-544, SOLR-701: Multiple fixes to DateField + regarding lenient parsing of optional milliseconds, and correct + formating using the canonical representation. LegacyDateField has + been added for people who have come to depend on the existing + broken behavior. (hossman, Stefan Oestreicher) + +30. SOLR-539: Fix for non-atomic long counters and a cast fix to avoid divide + by zero. (Sean Timm via Otis Gospodnetic) + +31. SOLR-514: Added explicit media-type with UTF* charset to *.xsl files that + don't already have one. (hossman) + +32. SOLR-505: Give RequestHandlers the possiblity to suppress the generation + of HTTP caching headers. (Thomas Peuss via Otis Gospodnetic) + +33. SOLR-553: Handle highlighting of phrase terms better when + hl.usePhraseHighligher=true URL param is used. + (Bojan Smid via Otis Gospodnetic) + +34. SOLR-590: Limitation in pgrep on Linux platform breaks script-utils fixUser. + (Hannes Schmidt via billa) + +35. SOLR-597: SolrServlet no longer "caches" SolrCore. This was causing + problems in Resin, and could potentially cause problems for customized + usages of SolrServlet. + +36. SOLR-585: Now sets the QParser on the ResponseBuilder (gsingers) + +37. SOLR-604: If the spellchecking path is relative, make it relative to the Solr Data Directory. + (Shalin Shekhar Mangar via gsingers) + +38. SOLR-584: Make stats.jsp and stats.xsl more robust. + (Yousef Ourabi and hossman) + +39. SOLR-443: SolrJ: Declare UTF-8 charset on POSTed parameters + to avoid problems with servlet containers that default to latin-1 + and allow switching of the exact POST mechanism for parameters + via useMultiPartPost in CommonsHttpSolrServer. + (Lars Kotthoff, Andrew Schurman, ryan, yonik) + +40. SOLR-556: multi-valued fields always highlighted in disparate snippets + (Lars Kotthoff via klaas) + +41. SOLR-501: Fix admin/analysis.jsp UTF-8 input for some other servlet + containers such as Tomcat. (Hiroaki Kawai, Lars Kotthoff via yonik) + +42. SOLR-616: SpellChecker accuracy configuration is not applied for FileBasedSpellChecker. + Apply it for FileBasedSpellChecker and IndexBasedSpellChecker both. + (shalin) + +43. SOLR-648: SpellCheckComponent throws NullPointerException on using spellcheck.q request + parameter after restarting Solr, if reload is called but build is not called. + (Jonathan Lee, shalin) + +44. SOLR-598: DebugComponent now always occurs last in the SearchHandler list unless the + components are explicitly declared. (gsingers) + +45. SOLR-676: DataImportHandler should use UpdateRequestProcessor API instead of directly + using UpdateHandler. (shalin) + +46. SOLR-696: Fixed bug in NamedListCodec in regards to serializing Iterable objects. (gsingers) + +47. SOLR-669: snappuler fix for FreeBSD/Darwin (Richard "Trey" Hyde via Otis Gospodnetic) + +48. SOLR-606: Fixed spell check collation offset issue. (Stefan Oestreicher , Geoffrey Young, gsingers) + +49. SOLR-589: Improved handling of badly formated query strings (Sean Timm via Otis Gospodnetic) + +50. SOLR-749: Allow QParser and ValueSourceParsers to be extended with same name (hossman, gsingers) + +Other Changes + 1. SOLR-135: Moved common classes to org.apache.solr.common and altered the + build scripts to make two jars: apache-solr-1.3.jar and + apache-solr-1.3-common.jar. This common.jar can be used in client code; + It does not have lucene or junit dependencies. The original classes + have been replaced with a @Deprecated extended class and are scheduled + to be removed in a later release. While this change does not affect API + compatibility, it is recommended to update references to these + deprecated classes. (ryan) + + 2. SOLR-268: Tweaks to post.jar so it prints the error message from Solr. + (Brian Whitman via hossman) + + 3. Upgraded to Lucene 2.2.0; June 18, 2007. + + 4. SOLR-215: Static access to SolrCore.getSolrCore() and SolrConfig.config + have been deprecated in order to support multiple loaded cores. + (Henri Biestro via ryan) + + 5. SOLR-367: The create method in all TokenFilter and Tokenizer Factories + provided by Solr now declare their specific return types instead of just + using "TokenStream" (hossman) + + 6. SOLR-396: Hooks add to build system for automatic generation of (stub) + Tokenizer and TokenFilter Factories. + Also: new Factories for all Tokenizers and TokenFilters provided by the + lucene-analyzers-2.2.0.jar -- includes support for German, Chinese, + Russan, Dutch, Greek, Brazilian, Thai, and French. (hossman) + + 7. Upgraded to commons-CSV r609327, which fixes escaping bugs and + introduces new escaping and whitespace handling options to + increase compatibility with different formats. (yonik) + + 8. Upgraded to Lucene 2.3.0; Jan 23, 2008. + + 9. SOLR-451: Changed analysis.jsp to use POST instead of GET, also made the input area a + bit bigger (gsingers) + +10. Upgrade to Lucene 2.3.1 + +11. SOLR-531: Different exit code for rsyncd-start and snappuller if disabled (Thomas Peuss via billa) + +12. SOLR-550: Clarified DocumentBuilder addField javadocs (gsingers) + +13. Upgrade to Lucene 2.3.2 + +14. SOLR-518: Changed luke.xsl to use divs w/css for generating histograms + instead of SVG (Thomas Peuss via hossman) + +15. SOLR-592: Added ShardParams interface and changed several string literals + to references to constants in CommonParams. + (Lars Kotthoff via Otis Gospodnetic) + +16. SOLR-520: Deprecated unused LengthFilter since already core in + Lucene-Java (hossman) + +17. SOLR-645: Refactored SimpleFacetsTest (Lars Kotthoff via hossman) + +18. SOLR-591: Changed Solrj default value for facet.sort to true (Lars Kotthoff via Shalin) + +19. Upgraded to Lucene 2.4-dev (r669476) to support SOLR-572 (gsingers) + +20. SOLR-636: Improve/simplify example configs; and make index.jsp + links more resilient to configs loaded via an InputStream + (Lars Kotthoff, hossman) + +21. SOLR-682: Scripts now support FreeBSD (Richard Trey Hyde via gsingers) + +22. SOLR-489: Added in deprecation comments. (Sean Timm, Lars Kothoff via gsingers) + +23. SOLR-692: Migrated to stable released builds of StAX API 1.0.1 and StAX 1.2.0 (shalin) +24. Upgraded to Lucene 2.4-dev (r686801) (yonik) +25. Upgraded to Lucene 2.4-dev (r688745) 27-Aug-2008 (yonik) +26. Upgraded to Lucene 2.4-dev (r691741) 03-Sep-2008 (yonik) +27. Replaced the StAX reference implementation with the geronimo + StAX API jar, and the Woodstox StAX implementation. (yonik) + +Build + 1. SOLR-411. Changed the names of the Solr JARs to use the defacto standard JAR names based on + project-name-version.jar. This yields, for example: + apache-solr-common-1.3-dev.jar + apache-solr-solrj-1.3-dev.jar + apache-solr-1.3-dev.jar + + 2. SOLR-479: Added clover code coverage targets for committers and the nightly build. Requires + the Clover library, as licensed to Apache and only available privately. To run: + ant -Drun.clover=true clean clover test generate-clover-reports + + 3. SOLR-510: Nightly release includes client sources. (koji) + + 4. SOLR-563: Modified the build process to build contrib projects + (Shalin Shekhar Mangar via Otis Gospodnetic) + + 5. SOLR-673: Modify build file to create javadocs for core, solrj, contrib and "all inclusive" (shalin) + + 6. SOLR-672: Nightly release includes contrib sources. (Jeremy Hinegardner, shalin) + + 7. SOLR-586: Added ant target and POM files for building maven artifacts of the Solr core, common, + client and contrib. The target can publish artifacts with source and javadocs. + (Spencer Crissman, Craig McClanahan, shalin) + +================== Release 1.2, 20070602 ================== + +Upgrading from Solr 1.1 +------------------------------------- +IMPORTANT UPGRADE NOTE: In a master/slave configuration, all searchers/slaves +should be upgraded before the master! If the master were to be updated +first, the older searchers would not be able to read the new index format. + +Older Apache Solr installations can be upgraded by replacing +the relevant war file with the new version. No changes to configuration +files should be needed. + +This version of Solr contains a new version of Lucene implementing +an updated index format. This version of Solr/Lucene can still read +and update indexes in the older formats, and will convert them to the new +format on the first index change. One change in the new index format +is that all "norms" are kept in a single file, greatly reducing the number +of files per segment. Users of compound file indexes will want to consider +converting to the non-compound format for faster indexing and slightly better +search concurrency. + +The JSON response format for facets has changed to make it easier for +clients to retain sorted order. Use json.nl=map explicitly in clients +to get the old behavior, or add it as a default to the request handler +in solrconfig.xml + +The Lucene based Solr query syntax is slightly more strict. +A ':' in a field value must be escaped or the whole value must be quoted. + +The Solr "Request Handler" framework has been updated in two key ways: +First, if a Request Handler is registered in solrconfig.xml with a name +starting with "/" then it can be accessed using path-based URL, instead of +using the legacy "/select?qt=name" URL structure. Second, the Request +Handler framework has been extended making it possible to write Request +Handlers that process streams of data for doing updates, and there is a +new-style Request Handler for XML updates given the name of "/update" in +the example solrconfig.xml. Existing installations without this "/update" +handler will continue to use the old update servlet and should see no +changes in behavior. For new-style update handlers, errors are now +reflected in the HTTP status code, Content-type checking is more strict, +and the response format has changed and is controllable via the wt +parameter. + + + +Detailed Change List +-------------------- + +New Features + 1. SOLR-82: Default field values can be specified in the schema.xml. + (Ryan McKinley via hossman) + + 2. SOLR-89: Two new TokenFilters with corresponding Factories... + * TrimFilter - Trims leading and trailing whitespace from Tokens + * PatternReplaceFilter - applies a Pattern to each token in the + stream, replacing match occurances with a specified replacement. + (hossman) + + 3. SOLR-91: allow configuration of a limit of the number of searchers + that can be warming in the background. This can be used to avoid + out-of-memory errors, or contention caused by more and more searchers + warming in the background. An error is thrown if the limit specified + by maxWarmingSearchers in solrconfig.xml is exceeded. (yonik) + + 4. SOLR-106: New faceting parameters that allow specification of a + minimum count for returned facets (facet.mincount), paging through facets + (facet.offset, facet.limit), and explicit sorting (facet.sort). + facet.zeros is now deprecated. (yonik) + + 5. SOLR-80: Negative queries are now allowed everywhere. Negative queries + are generated and cached as their positive counterpart, speeding + generation and generally resulting in smaller sets to cache. + Set intersections in SolrIndexSearcher are more efficient, + starting with the smallest positive set, subtracting all negative + sets, then intersecting with all other positive sets. (yonik) + + 6. SOLR-117: Limit a field faceting to constraints with a prefix specified + by facet.prefix or f..facet.prefix. (yonik) + + 7. SOLR-107: JAVA API: Change NamedList to use Java5 generics + and implement Iterable (Ryan McKinley via yonik) + + 8. SOLR-104: Support for "Update Plugins" -- RequestHandlers that want + access to streams of data for doing updates. ContentStreams can come + from the raw POST body, multi-part form data, or remote URLs. + Included in this change is a new SolrDispatchFilter that allows + RequestHandlers registered with names that begin with a "/" to be + accessed using a URL structure based on that name. + (Ryan McKinley via hossman) + + 9. SOLR-126: DirectUpdateHandler2 supports autocommitting after a specified time + (in ms), using 10000. + (Ryan McKinley via klaas). + +10. SOLR-116: IndexInfoRequestHandler added. (Erik Hatcher) + +11. SOLR-79: Add system property ${[:]} substitution for + configuration files loaded, including schema.xml and solrconfig.xml. + (Erik Hatcher with inspiration from Andrew Saar) + +12. SOLR-149: Changes to make Solr more easily embeddable, in addition + to logging which request handler handled each request. + (Ryan McKinley via yonik) + +13. SOLR-86: Added standalone Java-based command-line updater. + (Erik Hatcher via Bertrand Delecretaz) + +14. SOLR-152: DisMaxRequestHandler now supports configurable alternate + behavior when q is not specified. A "q.alt" param can be specified + using SolrQueryParser syntax as a mechanism for specifying what query + the dismax handler should execute if the main user query (q) is blank. + (Ryan McKinley via hossman) + +15. SOLR-158: new "qs" (Query Slop) param for DisMaxRequestHandler + allows for specifying the amount of default slop to use when parsing + explicit phrase queries from the user. + (Adam Hiatt via hossman) + +16. SOLR-81: SpellCheckerRequestHandler that uses the SpellChecker from + the Lucene contrib. + (Otis Gospodnetic and Adam Hiatt) + +17. SOLR-182: allow lazy loading of request handlers on first request. + (Ryan McKinley via yonik) + +18. SOLR-81: More SpellCheckerRequestHandler enhancements, inlcluding + support for relative or absolute directory path configurations, as + well as RAM based directory. (hossman) + +19. SOLR-197: New parameters for input: stream.contentType for specifying + or overriding the content type of input, and stream.file for reading + local files. (Ryan McKinley via yonik) + +20. SOLR-66: CSV data format for document additions and updates. (yonik) + +21. SOLR-184: add echoHandler=true to responseHeader, support echoParams=all + (Ryan McKinley via ehatcher) + +22. SOLR-211: Added a regex PatternTokenizerFactory. This extracts tokens + from the input string using a regex Pattern. (Ryan McKinley) + +23. SOLR-162: Added a "Luke" request handler and other admin helpers. + This exposes the system status through the standard requestHandler + framework. (ryan) + +24. SOLR-212: Added a DirectSolrConnection class. This lets you access + solr using the standard request/response formats, but does not require + an HTTP connection. It is designed for embedded applications. (ryan) + +25. SOLR-204: The request dispatcher (added in SOLR-104) can handle + calls to /select. This offers uniform error handling for /update and + /select. To enable this behavior, you must add: + to your solrconfig.xml + See the example solrconfig.xml for details. (ryan) + +26. SOLR-170: StandardRequestHandler now supports a "sort" parameter. + Using the ';' syntax is still supported, but it is recommended to + transition to the new syntax. (ryan) + +27. SOLR-181: The index schema now supports "required" fields. Attempts + to add a document without a required field will fail, returning a + descriptive error message. By default, the uniqueKey field is + a required field. This can be disabled by setting required=false + in schema.xml. (Greg Ludington via ryan) + +28. SOLR-217: Fields configured in the schema to be neither indexed or + stored will now be quietly ignored by Solr when Documents are added. + The example schema has a comment explaining how this can be used to + ignore any "unknown" fields. + (Will Johnson via hossman) + +29. SOLR-227: If schema.xml defines multiple fieldTypes, fields, or + dynamicFields with the same name, a severe error will be logged rather + then quietly continuing. Depending on the + settings, this may halt the server. Likewise, if solrconfig.xml + defines multiple RequestHandlers with the same name it will also add + an error. (ryan) + +30. SOLR-226: Added support for dynamic field as the destination of a + copyField using glob (*) replacement. (ryan) + +31. SOLR-224: Adding a PhoneticFilterFactory that uses apache commons codec + language encoders to build phonetically similar tokens. This currently + supports: DoubleMetaphone, Metaphone, Soundex, and RefinedSoundex (ryan) + +32. SOLR-199: new n-gram tokenizers available via NGramTokenizerFactory + and EdgeNGramTokenizerFactory. (Adam Hiatt via yonik) + +33. SOLR-234: TrimFilter can update the Token's startOffset and endOffset + if updateOffsets="true". By default the Token offsets are unchanged. + (ryan) + +34. SOLR-208: new example_rss.xsl and example_atom.xsl to provide more + examples for people about the Solr XML response format and how they + can transform it to suit different needs. + (Brian Whitman via hossman) + +35. SOLR-249: Deprecated SolrException( int, ... ) constructors in favor + of constructors that takes an ErrorCode enum. This will ensure that + all SolrExceptions use a valid HTTP status code. (ryan) + +36. SOLR-386: Abstracted SolrHighlighter and moved existing implementation + to DefaultSolrHighlighter. Adjusted SolrCore and solrconfig.xml so + that highlighter is configurable via a class attribute. Allows users + to use their own highlighter implementation. (Tricia Williams via klaas) + +Changes in runtime behavior + 1. Highlighting using DisMax will only pick up terms from the main + user query, not boost or filter queries (klaas). + + 2. SOLR-125: Change default of json.nl to flat, change so that + json.nl only affects items where order matters (facet constraint + listings). Fix JSON output bug for null values. Internal JAVA API: + change most uses of NamedList to SimpleOrderedMap. (yonik) + + 3. A new method "getSolrQueryParser" has been added to the IndexSchema + class for retrieving a new SolrQueryParser instance with all options + specified in the schema.xml's block set. The + documentation for the SolrQueryParser constructor and it's use of + IndexSchema have also been clarified. + (Erik Hatcher and hossman) + + 4. DisMaxRequestHandler's bq, bf, qf, and pf parameters can now accept + multiple values (klaas). + + 5. Query are re-written before highlighting is performed. This enables + proper highlighting of prefix and wildcard queries (klaas). + + 6. A meaningful exception is raised when attempting to add a doc missing + a unique id if it is declared in the schema and allowDups=false. + (ryan via klaas) + + 7. SOLR-183: Exceptions with error code 400 are raised when + numeric argument parsing fails. RequiredSolrParams class added + to facilitate checking for parameters that must be present. + (Ryan McKinley, J.J. Larrea via yonik) + + 8. SOLR-179: By default, solr will abort after any severe initalization + errors. This behavior can be disabled by setting: + false + in solrconfig.xml (ryan) + + 9. The example solrconfig.xml maps /update to XmlUpdateRequestHandler using + the new request dispatcher (SOLR-104). This requires posted content to + have a valid contentType: curl -H 'Content-type:text/xml; charset=utf-8' + The response format matches that of /select and returns standard error + codes. To enable solr1.1 style /update, do not map "/update" to any + handler in solrconfig.xml (ryan) + +10. SOLR-231: If a charset is not specified in the contentType, + ContentStream.getReader() will use UTF-8 encoding. (ryan) + +11. SOLR-230: More options for post.jar to support stdin, xml on the + commandline, and defering commits. Tutorial modified to take + advantage of these options so there is no need for curl. + (hossman) + +12. SOLR-128: Upgraded Jetty to the latest stable release 6.1.3 (ryan) + +Optimizations + 1. SOLR-114: HashDocSet specific implementations of union() and andNot() + for a 20x performance improvement for those set operations, and a new + hash algorithm speeds up exists() by 10% and intersectionSize() by 8%. + (yonik) + + 2. SOLR-115: Solr now uses BooleanQuery.clauses() instead of + BooleanQuery.getClauses() in any situation where there is no risk of + modifying the original query. + (hossman) + + 3. SOLR-221: Speed up sorted faceting on multivalued fields by ~60% + when the base set consists of a relatively large portion of the + index. (yonik) + + 4. SOLR-221: Added a facet.enum.cache.minDf parameter which avoids + using the filterCache for terms that match few documents, trading + decreased memory usage for increased query time. (yonik) + +Bug Fixes + 1. SOLR-87: Parsing of synonym files did not correctly handle escaped + whitespace such as \r\n\t\b\f. (yonik) + + 2. SOLR-92: DOMUtils.getText (used when parsing config files) did not + work properly with many DOM implementations when dealing with + "Attributes". (Ryan McKinley via hossman) + + 3. SOLR-9,SOLR-99: Tighten up sort specification error checking, throw + exceptions for missing sort specifications or a sort on a non-indexed + field. (Ryan McKinley via yonik) + + 4. SOLR-145: Fix for bug introduced in SOLR-104 where some Exceptions + were being ignored by all "out of the box" RequestHandlers. (hossman) + + 5. SOLR-166: JNDI solr.home code refactoring. SOLR-104 moved + some JNDI related code to the init method of a Servlet Filter - + according to the Servlet Spec, all Filter's should be initialized + prior to initializing any Servlets, but this is not the case in at + least one Servlet Container (Resin). This "bug fix" refactors + this JNDI code so that it should be executed the first time any + attempt is made to use the solr.home dir. + (Ryan McKinley via hossman) + + 6. SOLR-173: Bug fix to SolrDispatchFilter to reduce "too many open + files" problem was that SolrDispatchFilter was not closing requests + when finished. Also modified ResponseWriters to only fetch a Searcher + reference if necessary for writing out DocLists. + (Ryan McKinley via hossman) + + 7. SOLR-168: Fix display positioning of multiple tokens at the same + position in analysis.jsp (yonik) + + 8. SOLR-167: The SynonymFilter sometimes generated incorrect offsets when + multi token synonyms were mached in the source text. (yonik) + + 9. SOLR-188: bin scripts do not support non-default webapp names. Added "-U" + option to specify a full path to the update url, overriding the + "-h" (hostname), "-p" (port) and "-w" (webapp name) parameters. + (Jeff Rodenburg via billa) + +10. SOLR-198: RunExecutableListener always waited for the process to + finish, even when wait="false" was set. (Koji Sekiguchi via yonik) + +11. SOLR-207: Changed distribution scripts to remove recursive find + and avoid use of "find -maxdepth" on platforms where it is not + supported. (yonik) + +12. SOLR-222: Changing writeLockTimeout in solrconfig.xml did not + change the effective timeout. (Koji Sekiguchi via yonik) + +13. Changed the SOLR-104 RequestDispatcher so that /select?qt=xxx can not + access handlers that start with "/". This makes path based authentication + possible for path based request handlers. (ryan) + +14. SOLR-214: Some servlet containers (including Tomcat and Resin) do not + obey the specified charset. Rather then letting the the container handle + it solr now uses the charset from the header contentType to decode posted + content. Using the contentType: "text/xml; charset=utf-8" will force + utf-8 encoding. If you do not specify a contentType, it will use the + platform default. (Koji Sekiguchi via ryan) + +15. SOLR-241: Undefined system properties used in configuration files now + cause a clear message to be logged rather than an obscure exception thrown. + (Koji Sekiguchi via ehatcher) + +Other Changes + 1. Updated to Lucene 2.1 + + 2. Updated to Lucene 2007-05-20_00-04-53 + +================== Release 1.1.0, 20061222 ================== + +Status +------ +This is the first release since Solr joined the Incubator, and brings many +new features and performance optimizations including highlighting, +faceted browsing, and JSON/Python/Ruby response formats. + + +Upgrading from previous Solr versions +------------------------------------- +Older Apache Solr installations can be upgraded by replacing +the relevant war file with the new version. No changes to configuration +files are needed and the index format has not changed. + +The default version of the Solr XML response syntax has been changed to 2.2. +Behavior can be preserved for those clients not explicitly specifying a +version by adding a default to the request handler in solrconfig.xml + +By default, Solr will no longer use a searcher that has not fully warmed, +and requests will block in the meantime. To change back to the previous +behavior of using a cold searcher in the event there is no other +warm searcher, see the useColdSearcher config item in solrconfig.xml + +The XML response format when adding multiple documents to the collection +in a single command has changed to return a single . + + +Detailed Change List +-------------------- + +New Features + 1. added support for setting Lucene's positionIncrementGap + 2. Admin: new statistics for SolrIndexSearcher + 3. Admin: caches now show config params on stats page + 3. max() function added to FunctionQuery suite + 4. postOptimize hook, mirroring the functionallity of the postCommit hook, + but only called on an index optimize. + 5. Ability to HTTP POST query requests to /select in addition to HTTP-GET + 6. The default search field may now be overridden by requests to the + standard request handler using the df query parameter. (Erik Hatcher) + 7. Added DisMaxRequestHandler and SolrPluginUtils. (Chris Hostetter) + 8. Support for customizing the QueryResponseWriter per request + (Mike Baranczak / SOLR-16 / hossman) + 9. Added KeywordTokenizerFactory (hossman) +10. copyField accepts dynamicfield-like names as the source. + (Darren Erik Vengroff via yonik, SOLR-21) +11. new DocSet.andNot(), DocSet.andNotSize() (yonik) +12. Ability to store term vectors for fields. (Mike Klaas via yonik, SOLR-23) +13. New abstract BufferedTokenStream for people who want to write + Tokenizers or TokenFilters that require arbitrary buffering of the + stream. (SOLR-11 / yonik, hossman) +14. New RemoveDuplicatesToken - useful in situations where + synonyms, stemming, or word-deliminater-ing produce identical tokens at + the same position. (SOLR-11 / yonik, hossman) +15. Added highlighting to SolrPluginUtils and implemented in StandardRequestHandler + and DisMaxRequestHandler (SOLR-24 / Mike Klaas via hossman,yonik) +16. SnowballPorterFilterFactory language is configurable via the "language" + attribute, with the default being "English". (Bertrand Delacretaz via yonik, SOLR-27) +17. ISOLatin1AccentFilterFactory, instantiates ISOLatin1AccentFilter to remove accents. + (Bertrand Delacretaz via yonik, SOLR-28) +18. JSON, Python, Ruby QueryResponseWriters: use wt="json", "python" or "ruby" + (yonik, SOLR-31) +19. Make web admin pages return UTF-8, change Content-type declaration to include a + space between the mime-type and charset (Philip Jacob, SOLR-35) +20. Made query parser default operator configurable via schema.xml: + + The default operator remains "OR". +21. JAVA API: new version of SolrIndexSearcher.getDocListAndSet() which takes + flags (Greg Ludington via yonik, SOLR-39) +22. A HyphenatedWordsFilter, a text analysis filter used during indexing to rejoin + words that were hyphenated and split by a newline. (Boris Vitez via yonik, SOLR-41) +23. Added a CompressableField base class which allows fields of derived types to + be compressed using the compress=true setting. The field type also gains the + ability to specify a size threshold at which field data is compressed. + (klaas, SOLR-45) +24. Simple faceted search support for fields (enumerating terms) + and arbitrary queries added to both StandardRequestHandler and + DisMaxRequestHandler. (hossman, SOLR-44) +25. In addition to specifying default RequestHandler params in the + solrconfig.xml, support has been added for configuring values to be + appended to the multi-val request params, as well as for configuring + invariant params that can not overridden in the query. (hossman, SOLR-46) +26. Default operator for query parsing can now be specified with q.op=AND|OR + from the client request, overriding the schema value. (ehatcher) +27. New XSLTResponseWriter does server side XSLT processing of XML Response. + In the process, an init(NamedList) method was added to QueryResponseWriter + which works the same way as SolrRequestHandler. + (Bertrand Delacretaz / SOLR-49 / hossman) +28. json.wrf parameter adds a wrapper-function around the JSON response, + useful in AJAX with dynamic script tags for specifying a JavaScript + callback function. (Bertrand Delacretaz via yonik, SOLR-56) +29. autoCommit can be specified every so many documents added (klaas, SOLR-65) +30. ${solr.home}/lib directory can now be used for specifying "plugin" jars + (hossman, SOLR-68) +31. Support for "Date Math" relative "NOW" when specifying values of a + DateField in a query -- or when adding a document. + (hossman, SOLR-71) +32. useColdSearcher control in solrconfig.xml prevents the first searcher + from being used before it's done warming. This can help prevent + thrashing on startup when multiple requests hit a cold searcher. + The default is "false", preventing use before warm. (yonik, SOLR-77) + +Changes in runtime behavior + 1. classes reorganized into different packages, package names changed to Apache + 2. force read of document stored fields in QuerySenderListener + 3. Solr now looks in ./solr/conf for config, ./solr/data for data + configurable via solr.solr.home system property + 4. Highlighter params changed to be prefixed with "hl."; allow fragmentsize + customization and per-field overrides on many options + (Andrew May via klaas, SOLR-37) + 5. Default param values for DisMaxRequestHandler should now be specified + using a '...' init param, for backwards + compatability all init prams will be used as defaults if an init param + with that name does not exist. (hossman, SOLR-43) + 6. The DisMaxRequestHandler now supports multiple occurances of the "fq" + param. (hossman, SOLR-44) + 7. FunctionQuery.explain now uses ComplexExplanation to provide more + accurate score explanations when composed in a BooleanQuery. + (hossman, SOLR-25) + 8. Document update handling locking is much sparser, allowing performance gains + through multiple threads. Large commits also might be faster (klaas, SOLR-65) + 9. Lazy field loading can be enabled via a solrconfig directive. This will be faster when + not all stored fields are needed from a document (klaas, SOLR-52) +10. Made admin JSPs return XML and transform them with new XSL stylesheets + (Otis Gospodnetic, SOLR-58) +11. If the "echoParams=explicit" request parameter is set, request parameters are copied + to the output. In an XML output, they appear in new list inside + the new element, which replaces the old . + Adding a version=2.1 parameter to the request produces the old format, for backwards + compatibility (bdelacretaz and yonik, SOLR-59). + +Optimizations + 1. getDocListAndSet can now generate both a DocList and a DocSet from a + single lucene query. + 2. BitDocSet.intersectionSize(HashDocSet) no longer generates an intermediate + set + 3. OpenBitSet completed, replaces BitSet as the implementation for BitDocSet. + Iteration is faster, and BitDocSet.intersectionSize(BitDocSet) and unionSize + is between 3 and 4 times faster. (yonik, SOLR-15) + 4. much faster unionSize when one of the sets is a HashDocSet: O(smaller_set_size) + 5. Optimized getDocSet() for term queries resulting in a 36% speedup of facet.field + queries where DocSets aren't cached (for example, if the number of terms in the field + is larger than the filter cache.) (yonik) + 6. Optimized facet.field faceting by as much as 500 times when the field has + a single token per document (not multiValued & not tokenized) by using the + Lucene FieldCache entry for that field to tally term counts. The first request + utilizing the FieldCache will take longer than subsequent ones. + +Bug Fixes + 1. Fixed delete-by-id for field types who's indexed form is different + from the printable form (mainly sortable numeric types). + 2. Added escaping of attribute values in the XML response (Erik Hatcher) + 3. Added empty extractTerms() to FunctionQuery to enable use in + a MultiSearcher (Yonik) + 4. WordDelimiterFilter sometimes lost token positionIncrement information + 5. Fix reverse sorting for fields were sortMissingFirst=true + (Rob Staveley, yonik) + 6. Worked around a Jetty bug that caused invalid XML responses for fields + containing non ASCII chars. (Bertrand Delacretaz via yonik, SOLR-32) + 7. WordDelimiterFilter can throw exceptions if configured with both + generate and catenate off. (Mike Klaas via yonik, SOLR-34) + 8. Escape '>' in XML output (because ]]> is illegal in CharData) + 9. field boosts weren't being applied and doc boosts were being applied to fields (klaas) +10. Multiple-doc update generates well-formed xml (klaas, SOLR-65) +11. Better parsing of pingQuery from solrconfig.xml (hossman, SOLR-70) +12. Fixed bug with "Distribution" page introduced when Versions were + added to "Info" page (hossman) +13. Fixed HTML escaping issues with user input to analysis.jsp and action.jsp + (hossman, SOLR-74) + +Other Changes + 1. Upgrade to Lucene 2.0 nightly build 2006-06-22, lucene SVN revision 416224, + http://svn.apache.org/viewvc/lucene/java/trunk/CHANGES.txt?view=markup&pathrev=416224 + 2. Modified admin styles to improve display in Internet Explorer (Greg Ludington via billa, SOLR-6) + 3. Upgrade to Lucene 2.0 nightly build 2006-07-15, lucene SVN revision 422302, + 4. Included unique key field name/value (if available) in log message of add (billa, SOLR-18) + 5. Updated to Lucene 2.0 nightly build 2006-09-07, SVN revision 462111 + 6. Added javascript to catch empty query in admin query forms (Tomislav Nakic-Alfirevic via billa, SOLR-48 + 7. blackslash escape * in ssh command used in snappuller for zsh compatibility, SOLR-63 + 8. check solr return code in admin scripts, SOLR-62 + 9. Updated to Lucene 2.0 nightly build 2006-11-15, SVN revision 475069 +10. Removed src/apps containing the legacy "SolrTest" app (hossman, SOLR-3) +11. Simplified index.jsp and form.jsp, primarily by removing/hiding XML + specific params, and adding an option to pick the output type. (hossman) +12. Added new numeric build property "specversion" to allow clean + MANIFEST.MF files (hossman) +13. Added Solr/Lucene versions to "Info" page (hossman) +14. Explicitly set mime-type of .xsl files in web.xml to + application/xslt+xml (hossman) +15. Config parsing should now work useing DOM Level 2 parsers -- Solr + previously relied on getTextContent which is a DOM Level 3 addition + (Alexander Saar via hossman, SOLR-78) + +2006/01/17 Solr open sourced, moves to Apache Incubator diff --git a/solr/KEYS b/solr/KEYS new file mode 100644 index 00000000000..320a423543a --- /dev/null +++ b/solr/KEYS @@ -0,0 +1,453 @@ +This file contains the PGP keys of various developers. +Please don't use them for email unless you have to. Their main +purpose is code signing. + +Examples of importing this file in your keystore: + gpg --import KEYS.txt + (need pgp and other examples here) + +Examples of adding your key to this file: + pgp -kxa and append it to this file. + (pgpk -ll && pgpk -xa ) >> this file. + (gpg --list-sigs + && gpg --armor --export ) >> this file. + +----------------------------------------------------------------------------------- +pub 1024D/015AFC8A 2004-06-18 +uid Bertrand Delacretaz +sig 3 015AFC8A 2004-06-18 Bertrand Delacretaz +sig X CA57AD7C 2005-07-01 PGP Global Directory Verification Key +sig 3 E41EDC7E 2004-10-30 Carsten Ziegeler +sig 3 E2D774DF 2004-10-28 Sylvain Wallez +sig 3 7C200941 2004-10-18 Torsten Curdt +sig 3 23CB7A2A 2004-08-19 David Crossley +sig X CA57AD7C 2005-07-14 PGP Global Directory Verification Key +sig 5793498F 2005-07-20 Tim Ellison +sig 8103A37E 2005-07-20 Andre Malo +sig C4C57B42 2005-07-21 Marcus Crafter +sig E4136392 2005-07-21 Noel J. Bergman +sig 5C1C3AD7 2005-07-24 David Reid +sig 1CD4861F 2005-07-25 Eran Chinthaka (Web mail) +sig 333E4E84 2005-07-25 Chathura Kamalanath Herath (Apachecon Europe 2005) +sig EA1BA38D 2005-07-25 Ajith Harshana Ranabahu (Made at Apachecon 2005) +sig 152924AF 2005-07-29 Sander Temme +sig 2 FC243F3C 2005-07-20 Henk P. Penning +sig 3 EC140B81 2005-07-20 Dirk-Willem van Gulik (http://www.anywi.com/ - Senior partner) +sig 3 EE65E321 2005-07-20 Martin Kraemer +sig 3 A99F75DD 2005-07-21 Rodent of Unusual Size +sig 3 21D0A71B 2005-07-20 Dirk-Willem van Gulik +sig 3 3642CB4B 2005-07-20 Martin Kraemer +sig 3 2261D073 2005-07-20 Astrid Kessler (Kess) +sig 3 2C312D2F 2005-07-21 Rodent of Unusual Size +sig 3 302DA568 2005-07-21 Rodent of Unusual Size (DSA) +sig 3 E04F9A89 2005-07-22 Roy T. Fielding +sig 3 5F6B8B72 2005-07-22 Stefan Bodewig +sig 3 87315C31 2005-07-23 Rapha�l Luta +sig 3 F39B3750 2005-07-24 Colm MacCarthaigh +sig 3 40581837 2005-07-24 Nick Kew +sig 3 9C85222B 2005-07-24 Henning Schmiedehausen +sig 3 9978AF86 2005-07-25 Christoph Probst +sig 3 2A623F72 2005-07-25 Christoph Probst +sig 3 F8EA2967 2005-07-26 Brian McCallister +sig 3 C152431A 2005-07-27 Steve Loughran +sig 3 CC78C893 2005-08-01 [User ID not found] +sig 3 75A67692 2006-02-22 Erik Abele +sig C8628501 2006-03-31 [User ID not found] +sig X CA57AD7C 2005-07-31 PGP Global Directory Verification Key +sig X CA57AD7C 2005-08-01 PGP Global Directory Verification Key +sig X CA57AD7C 2006-05-23 PGP Global Directory Verification Key +sig X CA57AD7C 2006-06-05 PGP Global Directory Verification Key +sig X CA57AD7C 2006-06-18 PGP Global Directory Verification Key +uid Bertrand Delacretaz +sig 3 015AFC8A 2004-06-18 Bertrand Delacretaz +sig X CA57AD7C 2005-07-01 PGP Global Directory Verification Key +sig 3 E41EDC7E 2004-10-30 Carsten Ziegeler +sig 3 E2D774DF 2004-10-28 Sylvain Wallez +sig 3 7C200941 2004-10-18 Torsten Curdt +sig 3 23CB7A2A 2004-08-19 David Crossley +sig X CA57AD7C 2005-07-14 PGP Global Directory Verification Key +sig 5793498F 2005-07-20 Tim Ellison +sig 8103A37E 2005-07-20 Andre Malo +sig C4C57B42 2005-07-21 Marcus Crafter +sig 5C1C3AD7 2005-07-24 David Reid +sig 1CD4861F 2005-07-25 Eran Chinthaka (Web mail) +sig 333E4E84 2005-07-25 Chathura Kamalanath Herath (Apachecon Europe 2005) +sig EA1BA38D 2005-07-25 Ajith Harshana Ranabahu (Made at Apachecon 2005) +sig 152924AF 2005-07-29 Sander Temme +sig 3 EC140B81 2005-07-20 Dirk-Willem van Gulik (http://www.anywi.com/ - Senior partner) +sig 3 EE65E321 2005-07-20 Martin Kraemer +sig 3 A99F75DD 2005-07-21 Rodent of Unusual Size +sig 3 21D0A71B 2005-07-20 Dirk-Willem van Gulik +sig 3 3642CB4B 2005-07-20 Martin Kraemer +sig 3 2261D073 2005-07-20 Astrid Kessler (Kess) +sig 3 2C312D2F 2005-07-21 Rodent of Unusual Size +sig 3 302DA568 2005-07-21 Rodent of Unusual Size (DSA) +sig 3 E04F9A89 2005-07-22 Roy T. Fielding +sig 3 5F6B8B72 2005-07-22 Stefan Bodewig +sig 3 87315C31 2005-07-23 Rapha�l Luta +sig 3 F39B3750 2005-07-24 Colm MacCarthaigh +sig 3 40581837 2005-07-24 Nick Kew +sig 3 9C85222B 2005-07-24 Henning Schmiedehausen +sig 3 9978AF86 2005-07-25 Christoph Probst +sig 3 2A623F72 2005-07-25 Christoph Probst +sig 3 F8EA2967 2005-07-26 Brian McCallister +sig 3 C152431A 2005-07-27 Steve Loughran +sig 3 CC78C893 2005-08-01 [User ID not found] +sig 3 75A67692 2006-02-22 Erik Abele +sig C8628501 2006-03-31 [User ID not found] +sig X CA57AD7C 2005-07-31 PGP Global Directory Verification Key +sig X CA57AD7C 2005-08-01 PGP Global Directory Verification Key +sig X CA57AD7C 2006-05-23 PGP Global Directory Verification Key +sig X CA57AD7C 2006-06-05 PGP Global Directory Verification Key +sig X CA57AD7C 2006-06-18 PGP Global Directory Verification Key +sub 2048g/AC136A02 2004-06-18 +sig 015AFC8A 2004-06-18 Bertrand Delacretaz + +----------------------------------------------------------------------------------- +-----BEGIN PGP PUBLIC KEY BLOCK----- +Version: GnuPG v1.4.2.2 (Darwin) + +mQGiBEDSjpsRBAC6qu/5vv5ETRaTEwrZI1gbepp2tNNBqIVfYEZlF8jnWhUk1sdm ++bvqNMPf1B+ZQqjmctLoa+OKVd4vQEgdxclUYCkjuk2iWRX/iRJ61Zm1myuaBfhN +v0mRBduoSG+2+cIBq3ODQld/BPFotfL+giLdHF6SMjMl0BYcCOqZZGhhGwCg88wL +wNpY/ZPBtiUXzozD0wCDs6MD/ApczG73dG9H3BCPmp7y41ZbYZHpxiS9Mz9mLJNk +HMcx4iOzZnptX71UGG58nnBc+VDPooJdzzZywYhHhyz4uSrvMqS5DYquH5HEzMLE +jKNf0EGay9SHbpbT808YJIKogLgUhuUXe/MrzOdhEEfj+c8Q0iLEmOnNYjFRvakV +ei8gA/wKwnnQbQvgHyanZsus4WQRRRhbIZFM/qsmM4+McnI9TveDRYjoNwI5tbwY +aKF+FS9XAbMTBfXZggdjlyYy7J8LZyfm/yyb7BBq6++Owpj3Y/6DPgg1jqJ9Lenc +04uJATQiXNFS4k38eUtxWQ8lutc4W+3gy4M86mTFwfYVikRr7rQwQmVydHJhbmQg +RGVsYWNyZXRheiA8YmRlbGFjcmV0YXpAY29kZWNvbnN1bHQuY2g+iF4EExECAB4F +AkDSjpsCGwMGCwkIBwMCAxUCAwMWAgECHgECF4AACgkQN/aP9QFa/Ip0DwCgyqeU +b6ClVLmPOnsHeikXUeyuHOYAn2WW/Gq9kk5vOfJ2bv/lS5sH+lRgiQEiBBABAgAM +BQJCxTKzBQMAEnUAAAoJEJcQuJvKV618ckwH/j9H7+w45cbGAkqkbwWLQMAzHng+ +9wgJdoITY9KR2j0c3Vr8M70rwB9HINxSR2j9DdNceS5qSzLZTH/gJqQ+EKb8B925 +mGI7s1K8BpGAR2ipZukqxIJdyNcT4UQ85YxDSwyMfCnQhdaK2Ic+Eziu8Hd32pMW +bNhckxMux1eJst6ix0jka/yKXu2JJ/MsmJ/0S78DKw0Mf4vIL4YBH14n+hHoXaAk +YCH4wdKOVvd+Nu5PGgQxVsrIOQMGc5w+/vBDgq3Q4rUs5g4v3zPDm9wDaGLiMwyn +GY/pOqDx2nnWso35G74ALDRJ2mVdC2sk0CPfrhhQjLgDUSPAGI5cQyeNgG+IRgQT +EQIABgUCQYNhrgAKCRATLknU5B7cfqtLAJ9P3jDLqH6q3er+ClAsNRaDNOnhsQCf +Y05hAt5Y7oBb0RHW82wajc6GZGuIRgQTEQIABgUCQYEoJgAKCRDJtabs4td03+bs +AJsHWq/6YVYrp64UsJgAN/oohtP1iACfXgL6CHe5RvWW3UuGtUsZCevkhoGIRgQT +EQIABgUCQXObygAKCRAEYzpXfCAJQRSxAJ98Mk2gIBEcxdCt6XL1qhg2BHvJ9gCd +Gii992d6EggkXgF096dxT1yCfaCIRgQTEQIABgUCQSQRSQAKCRDBOfFTI8t6Kl4A +AJ4slc4BUEF636i0TWS9YKxrAA4mNQCfd3Mk8LSqOtC6y7HoF6AFvkoKA5qJASIE +EAECAAwFAkLWuS4FAwASdQAACgkQlxC4m8pXrXxktwf+J7v3DG0t0zetgH8Ifm7c +JD1O6aLKgHoqSLol+/XU39DHfkouFl5SifUhszzpVa5ek8kvfmY4KAjxYHYDrWXJ +eWiDhmcrLNWWK5VqIuJqsdYugP3WQuS3GVeufqv/da9a7/1BZQP4l3Mi3HGuqApI +It1ZoIqBX3mupESg/HTOFMhSvkHHpFwjUb4rLlHqBHCXoMoYe5I9JTF55mcJyDpj +KasMO+2kTeVPEwYvbdpdnnElRucQRT+gbxOhiDLxvvUnV0pD4hgzUpiTHueOAOVE +MAdloQbYYG2QaZJo67h7AMeZgnE275QHqDYZRJPhbyKLp3f9tlkZ6Dhx8E0YUgZp +JohGBBARAgAGBQJC3ta4AAoJEEHqCaBXk0mPOgAAoLzhTcsB2EB77taLCBFl8Nhh +cOrKAKCywO74xPLFEpe5b8osxreCHhLCdYhGBBARAgAGBQJC3tr9AAoJEMppOXSB +A6N+gF0AoKBcgIUWC+2i6LezOPp6JJN7Wy6WAKCXTxLqNduBwpeUh1OIYf3mEAmJ +fohGBBARAgAGBQJC333lAAoJEDlNL+PExXtCzBwAnjK5DG5LuANHdISaIRKXBuZG +zI4pAJ9AOuX3bR5mHvqpKkapFrikOULDG4hGBBARAgAGBQJC44WjAAoJEMl8UJZc +HDrXln0AoMJuj6pqvkjchV1iqCsCi3b4CGRqAKCoss+9L68xBcyR+dj6oXvQU5NJ +D4hGBBARAgAGBQJC5HKyAAoJEIzjdrgc1IYfvegAnRD7Xr2DzpZmKbDrgW+ROPAE +OoSbAJ9DJlWq9J6vSKprccCu9eZeVZxENIhGBBARAgAGBQJC5H1nAAoJEGEZai4z +Pk6E4vAAoKNEr9XS4K0y6qTc7FHfhwNoG1l1AJ97ZJZFp1djyfwJGq5OQuPLzSoP +B4hGBBARAgAGBQJC5JNXAAoJEASRNELqG6ON5UEAn2UgTiqDJbFcCW4tweKmBE37 +42TdAJ9+ur+rt7eCSeOVinE0IgPma7/0cIhGBBARAgAGBQJC6mQzAAoJELK+vEAV +KSSv88QAnA114d+qT3DSiK5030OjwBSLfOzuAKD1ZpFkDjmfkgDnWPAU4tSJAAI1 +P4icBBMBAgAGBQJC3qyqAAoJEDGmPZbsFAuB7DED/AqX7JLVRmj2GnWEsBi1i7+a +ZglkgxQhqCzraNzOivIEkZ6dtOTiHT466r57P7ezUrCMpJxvhlHVCUK5q/PioE55 +HLmS9JsVONfozOMArAhkpp1QdgWMTv6rvz7ssE2jX1gOllr4mt4WK3sow8TiRTKZ +K7v1BVlTyg9+XELCwoKkiJwEEwECAAYFAkLeyAQACgkQN+P0X+5l4yGHTQQA6q55 +2r/LhhuFWJGgyw19UKs51q/Rm+fVXT8M3fuJBE4dE39Zkgo8J58134/fm+VNJ7dk +J1xqF5jvTS6rCAM3L9MNRvt6YhKYxoH6YDcMkNJqs1zTzHM/EDKI3GRukG03vX/l +L7UxtuvtV7RnlRS/dlhvG40dODQqok3fO4/C6QeInAQTAQIABgUCQt9pHwAKCRCa +zTzAqZ913YDfA/wJapKu9EC6SnFdpuaLhOL3mogkGjTURib96y0Tf1yYObmJhcs5 +/lJjMT8cePR3uGeeiVrpdaMlsjBGYO30KwG7UCJYOw1oZ+ueqKN3Tk7/fLOiV/5j +2RcGCFo0bISjJAMTG4h7RQVydIgYVg/5x8LCIvDYjgXg4dPKblCJyChUm4hGBBMR +AgAGBQJC3rP7AAoJEP1viMYh0KcbRzQAoLXGj1B1Df/COcpjhVbkB/aWMUZCAJ9U +4uCV91iNY79gUs3WQEkS8j7+5IhGBBMRAgAGBQJC3rs4AAoJEDLJ5M42QstLdNAA +nAvT7s31lr6Md8T3nsrLPh6fPH+RAJwMZgmm/JRnoZIaUON6SNNlv1kboohGBBMR +AgAGBQJC3tfbAAoJEO7R6jkiYdBzuIAAnj9z0TACmzHiWSbyqiIewJLW/wv9AJ47 +jRln1u9mvi9192cwqfF01jcLzohGBBMRAgAGBQJC32oRAAoJEN26ZLosMS0v9RAA +n2h23ZqXNLsOgZUCWfgm5MTy3HcyAKCIjCEVisaSHtA2sO6cGJSV9mANEohGBBMR +AgAGBQJC32qpAAoJEFCOrsUwLaVojyYAn0XkNbiULXmEaa/wZsefVW7ghEOxAJ9d +Hdk8IKfbSusfh2KF8MozvXqyqYhGBBMRAgAGBQJC4O5HAAoJEFuWgBDgT5qJhz0A +mgNmDpUgiOOxbN7yevi8gFZ0HhDsAJ0VTF10Ci/DoLGa5dddrwIejazAbIhGBBMR +AgAGBQJC4TZ5AAoJEKIRWuFfa4tyHXoAoItZqmv8WICsMwqtF0QOku2qDgmsAKDI +qM50egV4ibXB0ds5xhIvVHHAh4hGBBMRAgAGBQJC4lyJAAoJEG2YjReHMVwxqXkA +nRQuML7MHsp5MtEgsR9Vahi4JBukAKDtrfB6Ev9GoSpqlj98Sjfr/dasTYhFBBMR +AgAGBQJC42T0AAoJEHXKoqPzmzdQE/IAl3tHNQ4ginBdWovyH/WNxjSD7NgAnRbR +KTISG6pO5eNOX6DU98/qQyzWiEYEExECAAYFAkLjtIEACgkQbQvHOkBYGDcnZQCf +Wg7cSWMBOlu+FLW2Q20lqdZCdAkAn3e0tTiyn0gokU7/wyfHWGju4upqiEYEExEC +AAYFAkLj7aEACgkQMoZOQZyFIivS2gCcCIah919Iy+ASvjk5EWks8jOksykAmgP5 +drcHNsRsPUfYnrSUzD1zVmCViEYEExECAAYFAkLlEMIACgkQUnkvr5l4r4aXkACe +KCpZspIEttLdnatEIypxfhX0YIMAn2EI3RrbgHmXHkhaIUFyVhjz7eEyiEYEExEC +AAYFAkLlEbAACgkQa3OhBipiP3IZTgCg6xsI8+ftGOokb8y+3mCLc9lMSPQAn0A6 +N49Kci+/FiLY0pOVFysbuRu4iEYEExECAAYFAkLmmXYACgkQaOuMdvjqKWfXGACe +Ihm4NjT3xPTyyH7aDPmBObVozt0AnAgRnpnVmyCuUnGXjtcytZ0eDX/siEYEExEC +AAYFAkLnYXYACgkQbpR1lMFSQxoQKQCeLYJR57+5bDPECNN4oLKLUZ8WeYoAoJCu +P4o/4roo7fTc9X7j+sVX1BQdiEYEExECAAYFAkLul5sACgkQXP03+sx4yJN7NgCf +XJDAjCStOu3mJ9q50+GtkhytGXUAoPzkoZFGC8QjBxOB5t+WmTXZuW/xiEYEExEC +AAYFAkP7wYEACgkQFT+gzXWmdpL51gCgnV3MIcrJWJt75OBt4r1EVRJ6x18An2CX +RV58NNli++76V900Epsr6B0FiQEcBBABAgAGBQJELYhHAAoJECm4ktDIYoUB9YIH +/1winK8pIM1WmXaNNK3eMFrG2zm1DbAvdhR4CD/D97CPHftk7gPi22j+92H0DrTm +dgUaTem8cv4t6CvQOdeovq97CsUARnPuFP6mp6C7O5uEHe27VJM33zdFcLuRFeG3 +4Ua9JD7yGA+CbdYOkoo593enoikOm4OKPAz5BLpC6zTc9oc7LUqBiDWjCkxzHKA9 +OisdzwGAsDTP3D1Cq7btviKMMUhSG6LZX1uIEhWSaPF49Tp2ox4AM97GC/GsfyUt +xpH0ECTZGwHvHEBvkvmjIfmVIjx80Wgw8pyfh7hM0AEO21AR0WTPH9Q3OgAxaeKs +hlT2oEhRU7T0CVOWn2X/ztmJASIEEAECAAwFAkLsxgUFAwASdQAACgkQlxC4m8pX +rXwwpwf+MneDvBkEsCKVCO/3jNuNfayJp1nMdabQUZ1sKQHmzKNbDMh3jsP2k+OF +2e+3+/qnr01D+0E0L2hHvtN44FhkTVrnbSzEUjew2a3Szi/7FGbAyYSdtYbFVe1y +7nDq/3DYug7+rAfzU72SVnKPhy9f95KylsWUNnu6GEoYR8Izx9SRdn94KZ/K3ueL +IcUMReYhOA89fSKmfDpvQQlqrWx2DunFMuk4PcXsJabs+XYqmBZf4IMi9/FIs2yV +f5VR/57LEWiniNOTqLoGwNAHhugIEaw5jkPel/RA5muP4Afi4PWmBPF/guoum5b/ +lj0Gg7lV29UJE0x+vVMVj5yu9je8ZokBIgQQAQIADAUCQu1sRQUDABJ1AAAKCRCX +ELibyletfJWDCACxx1ZUAuBUo0N3n3ky/Mmcoa6og1QN7InmfXvgD/r2lLOI8dwa +HaUpvSmay0uQHM8ioCm7HvmH0w7wxl0OZHPoZnHRMCE3Kr+K4ry93ZyzhiJD1LrK +qnKdC66x+jM0i+Fh12Gp9ETJ+zKjZE5JUKrKhakkUfLVh7U8xU9McX9vKFDchCAS +p8qv905iuHDO7vaeaT9/lG9ovu0r7/0F3r57yRlbM+SkvFS6XOWSz2/vb8RoNAVr +mmTJ1ox/HIADJRjIStdZyANHv/ed5gQWgA1NcR+uOnfJE5aNwxUVaJFuYtlfaYks +u+iJwytPLSKUm3t6b754bU2gAxQAgWGDlYERiQEiBBABAgAMBQJEctcdBQMAEnUA +AAoJEJcQuJvKV618/34H/iHd3utKfX9Pl68rQ2oGkCP5k1a6ccKPNUreksxAmQp4 +z2noZbeToA7pqWz6NumDdHwOpGZRbMdBaCdlaae02Um7BvPeHtnv+u+7B1xryGys +BVeU+r3y7suwvzNY/ihHhpXwiy+qh75ZgTzIRrv5MvLz3hc2ZPR8MUpJTurF4Dlq +fMIgPTKDYzyshmUGUXZF1R2kmIiy2djxyL3jNvdtYlBQeWdQIER9uVEikldZ2pUE +/WGJqLOUJxnAmYWL3CgBgiioIbTsFUkm4Y5xW4Xrq753Ux1OpKCiwAsOzzF+ehKz +4LPG/P3LUr2y79HwVDHVvCrwlbDBd/N5vXKyHCGHm5CJASIEEAECAAwFAkSEmwcF +AwASdQAACgkQlxC4m8pXrXwz1Af/WXhiSKBLQOtN8HrB7ndYlFq99XeLQpvWSuMb +uCnNHYYnsCkjo3tVGPnRgKkrV66TC7SXgnf3A/7x1iEBLDkVBUU4XwJrzz2zUkXj +Q+UzZUt+8JVj2SCzVAF4wzhYonaX5tx/JQYH/1jB5iy91hMf+LcnwTHk+/+OCBEh +FqZLTDVtXZagEfYyM3Aoq7tiOC/feadOMTMDtw4xJLKhKvynWJpE5v2td/FVm9sZ +U7MkeqCD7uHgzDPSOMdmS+3kTi7O8RNF8pEuol3CZErNJ6Ei5dlG9cJg8tWVhkXD +XBEvFQ6UpcQ4Yj9/encCcGwsVT6qG3IfPUobTNr88fM0d1rxs4kBIgQQAQIADAUC +RJXCxAUDABJ1AAAKCRCXELibyletfBBiCACr/yMglAt7DNzzW/9EyJP9OBsXNk0c +TZ02BRgEsZBuxHU0qzPXSCAxraPqOsvAP9OZ9TSmqArNI/HmB6CzJIHMlxH4XRJJ +eJ4Hwfgf1POfwv3xPTG8OoVabA9j5uPPtFiTYXPLzuGwiBVKM2iNEfLLuCbZfhoa +qcMKxjq4ixKlG/HGav73TeJP8uoNB488mgX0GV6m/JGfyvLP/R4ZyA3jQLk7aM8X +GmGVG63Mgy9cZkLbeGLUhrBXf32enMSc+7R1tO2uHFNJMmh01MdXPw58+Qmx4r2H +M8W/7NQweYCbUdDujpI6f7Lg+qMemHDs7fi0x9TmYRdE/DpIWKiL1ThGtCxCZXJ0 +cmFuZCBEZWxhY3JldGF6IDxiZGVsYWNyZXRhekBhcGFjaGUub3JnPoheBBMRAgAe +BQJA0o/yAhsDBgsJCAcDAgMVAgMDFgIBAh4BAheAAAoJEDf2j/UBWvyKoVIAoLZI +jDpTDUQb99MWGAIhd5hKbwlGAJ9QG+DT2qEF1vtgOHucWg1ljhPpUYkBIgQQAQIA +DAUCQsUyswUDABJ1AAAKCRCXELibyletfDA1CACcdUjrLC24oOo9mCPcgp5z85mQ +FsNdeYGiQDpvss8viILtaBSi5T/eyrnFQCLe267s2+f/Blinz5zYZCKvO0KCs/s2 +SqXytCqTV4KSc/0Efo7th7PXaB/VTxUP31hG4jU9i+6NYXosavieQiWnn4ghsl9u +gwXTzu1LbGdKbAXmbCJex9uKNdXdcUni89je4oOiUcyh+9BaMl70PLjl1dAyE6yC +CYhXrHW+n6Yye2Aw+sTzMMjLIphnJLAdsDKB9iIyUWzJCllwyWOJZk2paKPPgCEK +IaBVsPwoDy3Dbs8X88Kn993B35VwFf0qGXLWh6kRNatLsEtGcE9PkUFjdBcqiEYE +ExECAAYFAkGDYawACgkQEy5J1OQe3H4voQCePL2c6muDRVZ98rTaVLGEiwqVgnYA +n2cPs1VLsS/oVqOtCwCUgydKHeVviEYEExECAAYFAkGBKCMACgkQybWm7OLXdN9c +ugCgy89zOXdMOhVncxKRbt1YqeiXikMAoKk5BioFVyFxv6zGpF5+KYVl9ZkGiEYE +ExECAAYFAkFzm7wACgkQBGM6V3wgCUH68gCgmAEnlUseKreWdrEoA1INqjoc0U0A +njEXpxW1cEW/Hord0MZjY09GUQ1niEYEExECAAYFAkEkEZoACgkQwTnxUyPLeipG +2gCdHKKBCHqONngx3Bpycj/XZ8JRH4IAniMz1bPFDFLrPTtGQcB7GJSeeUqliQEi +BBABAgAMBQJC1rkuBQMAEnUAAAoJEJcQuJvKV618CbAH/3RmNilwoXS+Q6su8Wj2 +X9u26HhqzSecHhiY5rRGaQewe69I4VoCCHVLuWUMAF2IIawm03ka59YbU+rpIXS4 +f3vmAZVfGIo2vbDfufyFjPbQUHfekR/GnJL1xEHAfAwWp/jpnniWjsgRr6eAwlZN +UNCzPyXPikuktjZBZ2JONAae/vuXwP0zkdEtc4WCP1BzMmU8athZ4hZxzp+VtC1g +57jluoS4iw0vEIgXRgrcm3fLE8OArzhPac5o9XK1dTx40wHL+U3fhyvYuJ2FrOWT ++BO0UP7e1wUI6+ITxoCTZHxW8FE+RUtmPigtMKNu3cowEftVgqfSQnZv+YwhRsgj +3HeIRgQQEQIABgUCQt7WrwAKCRBB6gmgV5NJj0w1AKCKKt5D8mxvkCobaj+ZyPgN +/YcecwCeIQeJ6tSp5G9MBgUO7upvG1BzMaWIRgQQEQIABgUCQt7a/QAKCRDKaTl0 +gQOjfraJAJ4sJyjcjMnMr9n4+ZJY64a2j+tdBwCggw0NTw3PN01CRM6hXQoaLGZb +mmmIRgQQEQIABgUCQt994gAKCRA5TS/jxMV7QknsAJ9ylXueE5pHqVP5UPGEWIVk +P74IBgCeJcsKsjkfZq6uVP/gozUCgjf2B5eIRgQQEQIABgUCQt+SugAKCRABBWa8 +5BNjko0BAKCQUONymYbUGYWDLxId5b3NXB5QrgCaAuW0RWV6oauWNtRrvoucRy9Z +/syIRgQQEQIABgUCQuOFngAKCRDJfFCWXBw614hNAJ4qp8uDW7ESMQRPfqSruu4d +3I5tcQCgxfQWli4MAf0G0ZEjNiEkeL2AQgWIRgQQEQIABgUCQuRypgAKCRCM43a4 +HNSGH7jxAJ45jrVG9EiDuryEdkW/1X16vsY61wCgopPFmYKmCm4k2lZy5hyAojIL +uemIRgQQEQIABgUCQuR9WAAKCRBhGWouMz5OhDjEAKCbyBQG0FGJjMTUqCeEaXGs +rlkMoACfVLnLYLICywWclOPGb9xCTTDQnkuIRgQQEQIABgUCQuSTVAAKCRAEkTRC +6hujjWwLAKCClSiaCm/rsCoFSMG9HPnElkWNqwCgpUqniJ+Q587c/ee2MJMfP9Rj +2ZeIRgQQEQIABgUCQupkLwAKCRCyvrxAFSkkr2r+AJ4pBWxHy+8mERcd2w3WpHD9 +aV2oRACg0MhVy92t2ElFxPrLeTs0anzkNv2IRgQSEQIABgUCQt6xAAAKCRCLlilD +/CQ/PImUAKChHoZ3OostKM+vXf4HzC3EobFkyQCgjw+gHlrMtOZ7/jyNEuBKt6ao +XQiInAQTAQIABgUCQt6spwAKCRAxpj2W7BQLgTCaA/99LRrKrSFFlRFJaPYms9Li +Jn+FDYLan3Lj6gA04cCXC3nr0yUEYVP0nex08yge5NEkbV4xOV3sDCrMjjSFYTRk +CjvhUm9KiqrHl9MSFbflaoSj4hLDFvL3GrlycCnk4p9MnHkdE8BfRxuSqts9aUSx +uKHIkNwI4jjGo58Ctc07BYicBBMBAgAGBQJC3sf+AAoJEDfj9F/uZeMhIGQEAOh7 +dNsTwWuMPGzK+KyAD97Ah3C1eQwgnifuI7LTt46t1iDHC//EmWY6Fl6wmrPDjUXl +n5JozJEOMzTVdc1bHaaGJh1PlmNGROIpGWf6AykGiIM6AiCN/6Oforfh1oGf1hro +blYZX1ykJmcnJA5eX9n80dx1/M75PgKvlH28wV5fiJwEEwECAAYFAkLfaRcACgkQ +ms08wKmfdd1djgP/UQ29LZrKtFY51K0Kb4s4x9V2JU3eolLbVaeW4yvjENBy0pqs +9WAe4fTCzPRcA82JzXdahbofQ6G+cuV1IARtaL6qQA/NLriv0u2s2W0KF8kSvu5W +DJcYs0sClls3jC0S+GGvgM+DOiDWV3o8uNsMewTxSKziJwEG9haQ5wjt6LWIRgQT +EQIABgUCQt6z+gAKCRD9b4jGIdCnG1wcAJ9mLJiUi9BKxWwEpImpYEttV44kpQCg ++VeKf+9Mmb03YV5iwqmxLPi+EyuIRgQTEQIABgUCQt67MgAKCRAyyeTONkLLS0Q1 +AJ4lH6xITtr//6aWwIAaShLB6ere8QCgziP6JRCDHIv37sqFuCXl24+roRSIRgQT +EQIABgUCQt7X2AAKCRDu0eo5ImHQcy7YAJ9Y4wqukDN6g3o3trqgxk/LkD/EeACf +aCEsB7OwTw4nTbYiYlyj7Q6xcX+IRgQTEQIABgUCQt9qCgAKCRDdumS6LDEtL/fk +AJ9i3H2oakGBDrEWQTaYuQcYK7nddwCeKLYEhCx+TrXKFKjLDwUxJMrVYkGIRgQT +EQIABgUCQt9qowAKCRBQjq7FMC2laB//AKCCrmQysq3yGjonfumvnQQb8cS/RwCf +eGsAFVbvGJkyNwcst3o9yF0sVjqIRgQTEQIABgUCQuDuQwAKCRBbloAQ4E+aiWn8 +AJsEK2PGchbc0YD6orPtNYC/34kUgACeKFFFAHOFRXStc5KZLmJBSbT8OACIRgQT +EQIABgUCQuE2cwAKCRCiEVrhX2uLco1lAKC72f8hMpjsNUE85l0vlSP+ROD32wCd +G66V3/14h7LMoqTgkYCh7L+PjmCIRgQTEQIABgUCQuJciQAKCRBtmI0XhzFcMU9N +AKCoHx9H4WPuUkaG3nBWOX5sfqD49gCfcqT97X3EUVi/6wB/ypFEDbgSN7iIRgQT +EQIABgUCQuNk8wAKCRB1yqKj85s3UFUiAJ41Jh3QzVaoB7kZLbfguRdu0LWolACe +O7vrJoYM2RRCnuvCYcnvoO8H3Z6IRgQTEQIABgUCQuO0fQAKCRBtC8c6QFgYNzPv +AJ0XgXiLYp9yQgnH+CzBfEhESMoD/QCbB+sSYX4sAlHqsP4ZDlo09zxSjn2IRgQT +EQIABgUCQuPtnQAKCRAyhk5BnIUiK7KXAJsHwl80HSVOBabaxUJIiOueq0APCwCb +BjqwtPDB7LsJmSMvI+VZD05pqjuIRgQTEQIABgUCQuUQwgAKCRBSeS+vmXivhgSo +AKDWgoqYC+uVRHriBrOyoogiolgwXQCg5RNcb9RBU6K2nz3fVTXH+jqbvO6IRgQT +EQIABgUCQuURsAAKCRBrc6EGKmI/co9aAKC3ye4ZnztAud5KP0HjLUqePJtlEgCf +WSjOL3180YiroT0w18UYpVIvzFKIRgQTEQIABgUCQuaZdgAKCRBo64x2+OopZ+q4 +AJ9JYfPM/UjftfzMUgGtPvExe+yPGACePJbwo67cI8FG9dzQaK2VgWAo3WaIRgQT +EQIABgUCQudhcwAKCRBulHWUwVJDGtgWAKCzyBiSi8UPv0qarURcDvbrv3/g1ACe +M+nakste8unr4BsbOnB8wPsEGUOIRgQTEQIABgUCQu6XmAAKCRBc/Tf6zHjIk/fp +AKC0jvC+7cXMhTomdHPRbDITuCfS1gCgj7zDVMw9Y6g2woQ2NM+scLkt6iOIRgQT +EQIABgUCQ/vBfwAKCRAVP6DNdaZ2krwXAJ42ykru/p5qfrfm0YllMUn9MJTjwQCe +M6Wcf4/Xv72gppz3kG+muOXCKWWJARwEEAECAAYFAkQtiEMACgkQKbiS0MhihQFK +dAf+NjAJ21o1Mc+HtjJEByLOXVkiZYupqr7aHmnbi8X97LnZGPxQ5f3QBtqQ8CQ5 +wirf2T8mkpR5xyVNsQuZ4/Ow6xsJRzQbeRQ5OHkJYDBdGJzyrGDaFLnk6wKCJ/9d +QtUwKNVLqgFyzDRYhZiskZ7w/IxDY9pONyyBwwOoifl0mPzkDDrqdhey27KfmuL5 +fcj40Va+xTLaM9sR+bR5+KlbCDubmhr4AmfDgTE5iwmybf9Hd2e6u54GPQ6X7r2S +kuOxPgYeQGNelEo7TsnbGeSOvct0y4pzh/oyMXfkZGqM6DkNlN0/GnlJb1Tem4CH +ZnOQvzdfI9gmV90Q5sl+2K/RjokBIgQQAQIADAUCQuzGBQUDABJ1AAAKCRCXELib +yletfMlFCACSskAhwSbX3ksnNpOFDcVd4sv1zurACUyJEd8vrWvvoVWQ4SOfjTwv +6kpm2z/3MqsKyo3RdiNwlFrOb/cWxnqJh4ZwVp4bqZBbnhRzIta3ZMiwF4yz6SLk +Js0s30+0Z/Ig4SwoB8dchbee6DicCS2FckKL72Cxs7INQrB6qov2C4V65ftK/+In +QN0oXZTfPnw/yleWI+BUvY+7ZNNfuQiCZTef/xNO8wou3Pkdkxuk9hEY+uEFRSpo +jdQ5FICOmaFZS80UyhNpl/jbh88HQ5KM3OmS4iUF7vAcxoailOwVcvp4EE5Otpmb +4A2aeo7KT+w/7UqoTXP0V2Y+BG0bMVVBiQEiBBABAgAMBQJC7WxFBQMAEnUAAAoJ +EJcQuJvKV61878UIAKO3591kybrdTUA4OAZPy/ZAbHDcKbB8Vj+r1Tk8RIbjbRWo +Vdz9uuakqYtuPgD4CC5ArxApqJ1Wi4cNsA2oT7TcsXQnAl/xFvtRjFgcsvfEkcLm +plaZM15wCqI1NFYilsgk768GzsppkbBM72LXTWFXoDWHq4elXCm/Snnj+zXXEabT +Ng406EZGWMTmsl79VTuh3nz8q7tqCNGXdowGkfHR/AYYsPf/yLedan5aXTMS7MeM +/uu8vd2iMdLua6MQ/qEeOSuqKaqgSIrpPV+1sHm6wZViFTVdqXP5SmY1MExLzDME +7z4pKedhYzsmpZYUJ9QWQlvX4ZS6Y+w4/aMfCAyJASIEEAECAAwFAkRy1x0FAwAS +dQAACgkQlxC4m8pXrXyJLAgAsrVnbAJRLkqORSpB8XwM00Gla1N+xKgzldZhkHqC +fasrA+T2hwfZl87CZ0DXOm4gBK3mVaDYy1wgqPD+mXRtmgjBM0UF09AwcY/1lb/v +5l8FEUJBOpBJ7pUyYWoakVijSN0QNU+lWLZflCoRBL917IpTS9yngvPjYeVNn0Mq +2zhVsXnxhKNzvWab/TN2BDFmNa4BcnUxFkvgeSwNAegFBL9DfghfXBu3HWXWuAY/ +1FC5YHcRYf/ZaJ34DiVwyO/k3IvkM/E0Wl6tfrmnYXUizFuXQYv3TjgqbsJRrXaQ +lh3rcmb10mAJQMPMdyNulDN6o1APS4lr5guQGO9GcxzvC4kBIgQQAQIADAUCRISb +BwUDABJ1AAAKCRCXELibyletfAHjCAC8/anlsu7nGDOdZvaNs7kSu+GJILIsxaIi +a4qICzmIpqc7f3MoAj0BMDEFWk8t5Q946LJDF7V0ep0IsU9IOzy9mbbJGi1phWyJ +W3h2q+cZYA5a66GmoaM5WdZF2fWy+EVdM+I0En4nqnjcVDZNar+lD7yhyF2XRi82 +AsX+QCLW3uH1ofdtbx1ia8sSwSOM18rk9wsBF0GqKRNdZhKwbqIyzPtBruskgErd +HyAb8jODtYRrsAL/Zs0DPXfpw6j/rjrKQ1gJRazwAgGlkCxhiA99LwlEN6/DPUcc +P+JTxfHtfVMW55dHldhNv56L5gqBYClxkVPvx6ZdZYNY3NvyUkBeiQEiBBABAgAM +BQJElcLEBQMAEnUAAAoJEJcQuJvKV618u7gIAKtxn/yXgTEMSdpRzmhtoy7T5r8H +4KmIbh7kAzP2Z4hzcNQ7UIWP1qKnHhZRw1ZGEHW9S0n+5t6wvGmAvcVdXjFJHz9a +e5AieHRV08rn6PwymDeSxB6AwDf1fXVTSkfIPaRaBJgi6Zf+Kck5ODX2NGl4TOet +tSH46RbDMzCZYXgJ8SC3zsqkYO/ZpiBaOAahvT7Ejq/ChlRSUShytlDW5qaNf/JB +/MiNb0/3CvFmvSYoO+AvrAK2po+13CWZkXk0ioC39hey6GBk8dfXIW7oFQjXd+2f +jRT6ekUNKzXXcYGaooqilcSamhVDkP/+w29dUSvDxJE63xTFGMsFtAKYMcW5Ag0E +QNKOqxAIAPD9YvaFIx80FX53jYFgN0E7lsG35nejioD0exR5Jj6WQTgpjW8uuZQf +UrdWkMbafYwZtk48jvCQ2ycy9Stk2F0SOohNZYdm7aAp4O7+o6/JV8iZGLVQeCGX +vZeZp7JUkWte2rHfvGiUqmOL8/r1T/XJ4Nkl6cVgGhI68Lo8rj2KsvWlLWPy/XkD +bgPpynLxZYsAhyYX1KjGGineyFKM+Xz+Fz5IuZLi729FU7wXKu2qpq/UjMY4ZFCN +Io2hhpO9rSnYBymq7AHJefwWYqEbcYgJTPFS8/rcnMXMW9dnvki29wRPhq4Oikqi +tz7oR57H++6xcBBbrr7WWmyCGKu5emMAAwYH/R+Ozt7WlG/VRZolew+o+2I3MZGt +qm7buzSHRl8BpWy7bOs6As6DsBWj28PSHqN1LHMOY55m/sJGmV5M+VKwozm7B2lt +Ff65tl1b43YIp7vCAwpZsCtavIn2GOKHOu9T65vwdZrNUappv3blRc0kp3FCuWLI +gtkCdvX/nCVOT626vi2h4qR6FT4OfX5t4cTM2tZrW3hciCMiEgT2sBAA+LbtEgEb +AHLzRCpALq+gq/wWdQtjD5HOdZCVQX9BBzvu26aCZg3hvWWd5Z3yqNXIk512zrwD +u8/wDtIuBuLST8Ra6NOuYpxk9RjMMRFsmPn2B4lZXO0QgdlgRmt+OxSnuT6ISQQY +EQIACQUCQNKOqwIbDAAKCRA39o/1AVr8ikVrAKDMUsQGX1u8ZLsuUs6d8wsFdFwq +uACcDJAVc3KuTVrTWNn0Bdptwn2QKvI= +=9kB2 +-----END PGP PUBLIC KEY BLOCK----- +----------------------------------------------------------------------------------- + +pub 1024D/0AFCEE7C 2006-12-04 +uid Yonik Seeley +sig 3 0AFCEE7C 2006-12-04 Yonik Seeley +sub 4096g/1D87573B 2006-12-04 +sig 0AFCEE7C 2006-12-04 Yonik Seeley + +-----BEGIN PGP PUBLIC KEY BLOCK----- +Version: GnuPG v1.4.5 (Cygwin) + +mQGiBEV0pbkRBAC0MHFXllHMcpVqdIv1fiDTfmdiQtBn5bl0DY4vuR7xlltAo2UI +ZPom3MRcaT/NeOEa+abwEmELZWrrvI7H+8ngSfv+3bEcu+lkDcSo/ssLaDOzGVv4 +I6vVgAsCTvbH+4+vA6DF6HVHCVv1EkZ8hxhcELX4INY/SQ8GqiugJAhtnwCg/LX0 +wSQvzCJBeMB+J83rQGeiwl8D/3HMPuYLwVGvGNynG2i31NMJq8XjizromMuX4sxW +6kC+FW9jo73KPeIa+uRb8tIUhc9qt/J/RlHTpwIs5KfM7lvW82n7vTL53hMglL4l +3Uk9g/AsPZ8u3pPD9BOFGAqnX8IYkAViP2bABnCgMqXMeF96GU2vnzhEdBMbL0Ax +faeUA/9Jior6GepY18kpromH9+BcY9RLhJi66h4MVJ7yo8KeN+KnBsIr2LXPhrY0 +sUmYyX5Cgz9hZfac2eSTeLqdDCRvRAlQQdwwDQo23hX4fPc4h+vQGdKTNfTRzk46 +ZubipAmkquppKQ88/SYY41SeVSQXy+44e63AlXDWjhwRqfMD6LQfWW9uaWsgU2Vl +bGV5IDx5b25pa0BhcGFjaGUub3JnPohgBBMRAgAgBQJFdKW5AhsDBgsJCAcDAgQV +AggDBBYCAwECHgECF4AACgkQuD6oKgr87ny+KACgyzUyo1osmiXvTYLHdoSJgQ9l ++rEAoMXnbrDyY5exkD+Ey215T6rmxi/+uQQNBEV0px8QEACbaRr79hDDmA5MBNi0 +dLudcpKwQrqHowK1LNH5LvQy6FsuXdKlKNbqX8mFOnZuR9yCDtGz6WibO7P16Iwn +Ui0XjDrMknelinLhZ6uhJ0AVVMa8RDyXcZ7Rg/RYBGnqRTEyb41TutQagY7tC3Xq +3DTiy/O9gSWHsQUwxxirbvvduRPJov799YvE6CF+HGbyqjTrM2DU87Ov3PtVkpyF +xToUOwoGxhoyYmH6K6fgv9OhOcMExjCy97S74iNT5467hOGA/seCifQ3qAdQoDBk +qmvtjviipUkLSN7mcEF7mhF1ZJdlZIv3glThx3CNYHPXXRbYAaJCwESFjyiOpFd4 +7JrkbekazN9rrdbpusVmWMWkUxLjP7GAdGiCoE5xunuQ0iIZ4iutHSFb39wUViBo +rN0b4/VAmVJfjcvnw6K6NkxJyoaO8P2owSYxDS+fDznzyRACaouNX1gUJaQuH1tZ +u3AUzfwFnOzisCTUF2P9JTMpVaCJucNq9ydUQqbjrkfAR8l5U+4cn6sIoWzQAXFj +HyDlOHEKgZ2oZksPao4KLlAhgNKJ8ojx1XmvCA9Omw1NVPoKKyMqjlr97FXqP5S4 +yPjdCHKFMtBf/aRlIRFpdIMUXUViQm5w9arl3YGRb5730n8PJPSb3A4R9eRvDF5P +XVyQz6+6ezfuNEPOAr+bqbgM/wAEDRAAjVP16SqhI54mHmiyxgRZVar+ABIdczVr +Cl90RifuOHYEi2DZE3uhPoJ1sAG4XCzO7qGNngdOZYP8vaUtfvMihJY19yM146j+ +lLaDM/dm5W09Z6x8SU2RqgBYEzK+PlQhUhGVXUhMJQWNH9wSkL/sG3HXhSp4UTio +RK5jhaJ/xIKvQbrcnXGbC/zol/GhEkf2DdC4KuTLb3dXdET9w7KMnQ6fpjUIFbRW +BuypN/EFxDJWGYd+q8jnNw0oHC9Ud85Ox0rui93r4cGsAniIQsPnB+f5XYOEhgI0 +HQUq3qk9pBjUg2S9vtSUCI9ElReF+BQ973uYArm27eXYksQlbu4kF06/JYGa3HXm +/qxFMoCveEkp6xdNSpSq6VVLqtGUVnTRbCTBCxIR7eFlDjzEQEmzmIRNTgQKRqo6 +tyOv6/eUnJSFQy6fYyjNfYShjr2jJaTKqlk3qJRMO+ipSWns7ecEXyplUSZD35lu +3ynylBLxzkqw6anAMFjqO6FZvna8RT7vm4Ack8N71H4rwrFAf9Pab610LPKYq4XU +2PIPRitveDCbyh4zC5sV/JFeMblbgIJ5vde5BosFp1l7VaG14chMGxLK1eXTXv3e +7UDlFXLWhb7kGMZfFFaWT2B1RjRHDlRUtrcPsaR+KrYKTL4e+fIgVQbHexO8gLah +stBndV1asSOISQQYEQIACQUCRXSnHwIbDAAKCRC4PqgqCvzufBeBAKC76wRazrF0 +VU+hcYmr9vesir/wOgCfVthpK20jcfrs0WsjTyoYRtHygi8= +=EYGl +-----END PGP PUBLIC KEY BLOCK----- + +pub 4096R/FE045966 2009-10-13 + Key fingerprint = A46D 8682 A850 E44E 4FEC 20EB 8A8A 771F FE04 5966 +uid Grant Ingersoll (CODE SIGNING KEY) +sub 4096R/72F9E0C0 2009-10-13 + +-----BEGIN PGP PUBLIC KEY BLOCK----- +Version: GnuPG v1.4.7 (Darwin) + +mQINBErU6JEBEACsovhRB+Z8VrdTU76Qxg8u+0WiSaoilsksGgOaphWvWt0b6rA3 +PJSGuDuMJfL+lGqk+aARehiZNbNl0cGYtP4Av/fElTdSr1UlmDeFjG+7Qi7FB6KK +vAjv4mw+XM05QRTADjpNkDfAEXGPR1GNE7lOfPvNqvAl9YMLHJOBGlVqq5ZZAPHZ +/R6Cg7+5qHbVJKtPqSxAoPJQwg6ADwDZv9nWZfbp2VwVwBkuVxBCRBPFN+WTFmW/ +k1LSxUIeHqOG9RXo7S/DYddthE0iBzP3yKA5fs3k9zaQZNAjC92Dj/M4oDiIimqG +DJAO7ixpQY2ug9FB4LtWkyeNRnOM1LKd3TbZNqzZt4TuhCI3C5LAfVoXRPxe4T3n +4hvWkL/2THSKfC4u0CLGjw41rXhD86YYiIWdvxVezfESzpqZPhBrAZWfx7kB69pq +8DxWFXCaA31S/L2I6B1ZUmpOhtxg0cDoevipne7jaqRjA7TknOC45+CrpuEkOvQO +8rwHbtshT/JDFLPfq0ruDH21eV4QYP/JLffDGyEtoRRRr4M2DZCFkOCWIPE0l142 +5mIi0nqMSj1HK5kuwMQoNAf6vF6P6MYyGWJ8nR13CDtFOnjpOpuxZiTQhlb0cqXj +X4yQBjFim8ztGOnHrlSh25OgeKuiCWiCIuyFGykjX21RtJ/AwiOeMr4zkwARAQAB +tDhHcmFudCBJbmdlcnNvbGwgKENPREUgU0lHTklORyBLRVkpIDxnc2luZ2Vyc0Bh +cGFjaGUub3JnPokCNwQTAQoAIQUCStTokQIbAwULCQgHAwUVCgkICwUWAgMBAAIe +AQIXgAAKCRCKincf/gRZZqcfD/4+zhoLTTpTGRNutTyjPnR85aTuMUVtqYNLjEcF +PSV7p1OPhsGd3g5iaQtwCMsbWDPRSL+Xvy4/E4D32YjUR026mzAUnICq4Z35TecT +StIeMadgSwJ0fNvuzBB8jJfUYW6a91D9TZirEC4fRVRL1bnJvmjm0HnGLQa5uGCl +dUMbR04YXU+5V8S6KbRtLwhiVDD/do6XKeS9PGY941sw9182mLZbIbEcQrNWf8s/ +eOnobosxg5a0WxKfSZgQfNqkkuNlsRbKwI2gSjzAl030r6pWzduvftqFdnoaOBN/ +yNM1BghAhXmb/hxjuQa0x+xan15/lY5FwDX1bdnZcEI0KHJ/FIPFgk59XJVnZYH/ +tRI6jqmxQvdliA9q6rt/ctZAYaOhmXI28eeLCmdnZKUZjiG1ORYC0tIYdOYc/nXP +NqryDaa2OD2rMy8BM5tfQ/Om/6kavDqn/m8x0jLLuOne5Umeste3yTZ3pbJWc5GF +izOCX0FualpLXNBWt3jCooSaj5Gx92pFgoanbtI91ouVNsC24eKOJZYibKLP5fuH +B1sNvtPcWE3e99qOzVnolHjbDX4KzXCW+yFad714kK1vdAlDvqIt2OuEuQFggZHS +5G5FbGjgqFUG5D0uckBmu/8lZ82YW2yhuQosa5EOMwChG1sqtsYuddbifFF78AM4 +vYnmKohGBBARCgAGBQJK1OocAAoJEMsDFRmoZ+ixVg4An0MfyRmOv0tA8/UibzyK +KPrzo1aeAKCIV+M3L+gPT9yJ9843HxyBWL+j6bkCDQRK1OiRARAApG7lRX08hPq5 +7KRRUsK6GChneFeZZNNI35VpFQHPe8y/4ej7Ydnr37otEjIvd+14p0M+PF6igCIm +IGp2dg57PFfoOVW+apoudAtBpWkdBSjMJQ4pCoLwyv/HSXKW6QxMZeO5OBdT4iAg +AT36M2m/lpv5wC7g7SUJDusyFPuYtMtxAkj6TUPTFJBS4+FzhrNBoCXxILDKh0AE +N9Sslm37tC7Le84PkiI/k0C//KqNZFQ11Cazyf0CuQKj4gLtkfBTaDenlsufAKNI +M2pkIxtLNpx93Gcay2lVKD9Dv2i4EmQID7Vt6fZ2CP+60K7CnepLhapkfWa9Rk71 +7fqLIlXCFYdWEmuT614dnDuuuRfm12ZqT3GAx9F0elZ2yv4DrXnW1F60ASJuFnDf +RYcbTmw2VVoDiAo2al4uoE7a2yjyv7PExB65k0Uj0n1V4PF413np3r/WLSWBxxNu +9K8oV0KZI/UxvhMULGI23ryNTZAsoi3E44lZ0EUrJTWMRvLuewQdNpNLmlo30HNL +VTyoIlWbzhsu4ejKVqLBs/Q9M92c/Um6FJM5owkiGBEvnRtGGWhf89RonCncwg2g +i/rk91TTKnhGpYv3tenLjZ6qmlgMgT+KUrElqrLv02kD3xZ7+2zwhaLYWFlZN6wr +xXlA/FDOEz3tChqG+41Vf8W26+QnC98AEQEAAYkCHwQYAQoACQUCStTokQIbDAAK +CRCKincf/gRZZgzbEACfLTy+6afsT4wAgKYdlc+6w3bBqFnDzoG0JRIrUsVhEnjB +xhl+RZA9XMkPvw5iAeNOWSU+SoPz8hGrv3tkGJXqfeThOAB5IVDDW8FDmm57/sl4 +2m09B+QHZ7Buw56OD90GoCSm1otkbaIUjoMTbuQxTRb1qykVHO4AgLReaeMb9jqu +hqwxyzGzWMqVR01olgvCkSDrooYjA1ltQ84JrJhic5+zdQq1XYIv0dTPP3CcrFcy +b6pVx+Y31hK9f0EXoNZv6Ekg6B5L7LUleB3XdCL+jI1eWlQ3DTE7+OkVcehpyygc +JFgPVm/0KMPkHTa3Fw55YWbcrwAKGv5fWSj852pbaW/GNgDAiay0MPExEYey2cu5 +Pi8dUOmJoqcznBt9qQrrmRNWPRa1Gu9vowM9m90+jtU+Tlxo104tj8gKWVngnPhn +v1VPKPblEwJfuqC3DQh3XWzs3AwjKLXXfwznF7slqBRT48BwdLsietnovoTsZXYg +7ks2s/QxklWisZUxrhpZTNeA/WQKxyXwiN2sKxulwjd1PnAz5DeFQWKDNZHyHP+T +1cqtTc96tSwb2XW3iA2uZlD4aTkrOmm3FKbauC/rFmCjkpvwpvqcIdpib4M2DgNx +zAZ2cJnxw3f57qc9Yh5qvhDUephwOAlAy8ekc1AmX14F+mwYE3GjcqeGdEbLNw== +=GLHu +-----END PGP PUBLIC KEY BLOCK----- \ No newline at end of file diff --git a/solr/LICENSE.txt b/solr/LICENSE.txt new file mode 100644 index 00000000000..06520adb501 --- /dev/null +++ b/solr/LICENSE.txt @@ -0,0 +1,1086 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +========================================================================== +Portions of Jetty 6 are bundled in the Solr example server. +Jetty 6 includes a binary javax.servlet package licensed under the +Common Development and Distribution License. +-------------------------------------------------------------------------- +COMMON DEVELOPMENT AND DISTRIBUTION LICENSE (CDDL) Version 1.0 + +1. Definitions. + +1.1. Contributor means each individual or entity that creates or contributes to +the creation of Modifications. + +1.2. Contributor Version means the combination of the Original Software, prior +Modifications used by a Contributor (if any), and the Modifications made by +that particular Contributor. + +1.3. Covered Software means (a) the Original Software, or (b) Modifications, or +(c) the combination of files containing Original Software with files containing +Modifications, in each case including portions thereof. + +1.4. Executable means the Covered Software in any form other than Source Code. + +1.5. Initial Developer means the individual or entity that first makes Original +Software available under this License. + +1.6. Larger Work means a work which combines Covered Software or portions +thereof with code not governed by the terms of this License. + +1.7. License means this document. + +1.8. Licensable means having the right to grant, to the maximum extent +possible, whether at the time of the initial grant or subsequently acquired, +any and all of the rights conveyed herein. + +1.9. Modifications means the Source Code and Executable form of any of the +following: + +A. Any file that results from an addition to, deletion from or modification of +the contents of a file containing Original Software or previous Modifications; + +B. Any new file that contains any part of the Original Software or previous +Modification; or + +C. Any new file that is contributed or otherwise made available under the terms +of this License. + +1.10. Original Software means the Source Code and Executable form of computer +software code that is originally released under this License. + +1.11. Patent Claims means any patent claim(s), now owned or hereafter acquired, +including without limitation, method, process, and apparatus claims, in any +patent Licensable by grantor. + +1.12. Source Code means (a) the common form of computer software code in which +modifications are made and (b) associated documentation included in or with +such code. + +1.13. You (or Your) means an individual or a legal entity exercising rights +under, and complying with all of the terms of, this License. For legal +entities, You includes any entity which controls, is controlled by, or is under +common control with You. For purposes of this definition, control means (a)�the +power, direct or indirect, to cause the direction or management of such entity, +whether by contract or otherwise, or (b)�ownership of more than fifty percent +(50%) of the outstanding shares or beneficial ownership of such entity. + +2. License Grants. + +2.1. The Initial Developer Grant. Conditioned upon Your compliance with +Section 3.1 below and subject to third party intellectual property claims, the +Initial Developer hereby grants You a world-wide, royalty-free, non-exclusive +license: (a) under intellectual property rights (other than patent or +trademark) Licensable by Initial Developer, to use, reproduce, modify, display, +perform, sublicense and distribute the Original Software (or portions thereof), +with or without Modifications, and/or as part of a Larger Work; and (b) under +Patent Claims infringed by the making, using or selling of Original Software, +to make, have made, use, practice, sell, and offer for sale, and/or otherwise +dispose of the Original Software (or portions thereof). (c) The licenses +granted in Sections�2.1(a) and (b) are effective on the date Initial Developer +first distributes or otherwise makes the Original Software available to a third +party under the terms of this License. (d) Notwithstanding Section�2.1(b) +above, no patent license is granted: (1)�for code that You delete from the +Original Software, or (2)�for infringements caused by: (i)�the modification of +the Original Software, or (ii)�the combination of the Original Software with +other software or devices. + +2.2. Contributor Grant. Conditioned upon Your compliance with Section 3.1 +below and subject to third party intellectual property claims, each Contributor +hereby grants You a world-wide, royalty-free, non-exclusive license: (a) under +intellectual property rights (other than patent or trademark) Licensable by +Contributor to use, reproduce, modify, display, perform, sublicense and +distribute the Modifications created by such Contributor (or portions thereof), +either on an unmodified basis, with other Modifications, as Covered Software +and/or as part of a Larger Work; and (b) under Patent Claims infringed by the +making, using, or selling of Modifications made by that Contributor either +alone and/or in combination with its Contributor Version (or portions of such +combination), to make, use, sell, offer for sale, have made, and/or otherwise +dispose of: (1)�Modifications made by that Contributor (or portions thereof); +and (2)�the combination of Modifications made by that Contributor with its +Contributor Version (or portions of such combination). (c) The licenses +granted in Sections�2.2(a) and 2.2(b) are effective on the date Contributor +first distributes or otherwise makes the Modifications available to a third +party. (d) Notwithstanding Section�2.2(b) above, no patent license is granted: +(1)�for any code that Contributor has deleted from the Contributor Version; +(2)�for infringements caused by: (i)�third party modifications of Contributor +Version, or (ii)�the combination of Modifications made by that Contributor with +other software (except as part of the Contributor Version) or other devices; or +(3)�under Patent Claims infringed by Covered Software in the absence of +Modifications made by that Contributor. + +3. Distribution Obligations. + +3.1. Availability of Source Code. + +Any Covered Software that You distribute or otherwise make available in +Executable form must also be made available in Source Code form and that Source +Code form must be distributed only under the terms of this License. You must +include a copy of this License with every copy of the Source Code form of the +Covered Software You distribute or otherwise make available. You must inform +recipients of any such Covered Software in Executable form as to how they can +obtain such Covered Software in Source Code form in a reasonable manner on or +through a medium customarily used for software exchange. + +3.2. Modifications. + +The Modifications that You create or to which You contribute are governed by +the terms of this License. You represent that You believe Your Modifications +are Your original creation(s) and/or You have sufficient rights to grant the +rights conveyed by this License. + +3.3. Required Notices. You must include a notice in each of Your Modifications +that identifies You as the Contributor of the Modification. You may not remove +or alter any copyright, patent or trademark notices contained within the +Covered Software, or any notices of licensing or any descriptive text giving +attribution to any Contributor or the Initial Developer. + +3.4. Application of Additional Terms. You may not offer or impose any terms on +any Covered Software in Source Code form that alters or restricts the +applicable version of this License or the recipients rights hereunder. You may +choose to offer, and to charge a fee for, warranty, support, indemnity or +liability obligations to one or more recipients of Covered Software. However, +you may do so only on Your own behalf, and not on behalf of the Initial +Developer or any Contributor. You must make it absolutely clear that any such +warranty, support, indemnity or liability obligation is offered by You alone, +and You hereby agree to indemnify the Initial Developer and every Contributor +for any liability incurred by the Initial Developer or such Contributor as a +result of warranty, support, indemnity or liability terms You offer. + +3.5. Distribution of Executable Versions. You may distribute the Executable +form of the Covered Software under the terms of this License or under the terms +of a license of Your choice, which may contain terms different from this +License, provided that You are in compliance with the terms of this License and +that the license for the Executable form does not attempt to limit or alter the +recipients rights in the Source Code form from the rights set forth in this +License. If You distribute the Covered Software in Executable form under a +different license, You must make it absolutely clear that any terms which +differ from this License are offered by You alone, not by the Initial Developer +or Contributor. You hereby agree to indemnify the Initial Developer and every +Contributor for any liability incurred by the Initial Developer or such +Contributor as a result of any such terms You offer. + +3.6. Larger Works. You may create a Larger Work by combining Covered Software +with other code not governed by the terms of this License and distribute the +Larger Work as a single product. In such a case, You must make sure the +requirements of this License are fulfilled for the Covered Software. + +4. Versions of the License. + +4.1. New Versions. Sun Microsystems, Inc. is the initial license steward and +may publish revised and/or new versions of this License from time to time. Each +version will be given a distinguishing version number. Except as provided in +Section 4.3, no one other than the license steward has the right to modify this +License. + +4.2. Effect of New Versions. + +You may always continue to use, distribute or otherwise make the Covered +Software available under the terms of the version of the License under which +You originally received the Covered Software. If the Initial Developer includes +a notice in the Original Software prohibiting it from being distributed or +otherwise made available under any subsequent version of the License, You must +distribute and make the Covered Software available under the terms of the +version of the License under which You originally received the Covered +Software. Otherwise, You may also choose to use, distribute or otherwise make +the Covered Software available under the terms of any subsequent version of the +License published by the license steward. 4.3. Modified Versions. + +When You are an Initial Developer and You want to create a new license for Your +Original Software, You may create and use a modified version of this License if +You: (a)�rename the license and remove any references to the name of the +license steward (except to note that the license differs from this License); +and (b)�otherwise make it clear that the license contains terms which differ +from this License. + +5. DISCLAIMER OF WARRANTY. + +COVERED SOFTWARE IS PROVIDED UNDER THIS LICENSE ON AN AS IS BASIS, WITHOUT +WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, WITHOUT +LIMITATION, WARRANTIES THAT THE COVERED SOFTWARE IS FREE OF DEFECTS, +MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE OR NON-INFRINGING. THE ENTIRE RISK +AS TO THE QUALITY AND PERFORMANCE OF THE COVERED SOFTWARE IS WITH YOU. SHOULD +ANY COVERED SOFTWARE PROVE DEFECTIVE IN ANY RESPECT, YOU (NOT THE INITIAL +DEVELOPER OR ANY OTHER CONTRIBUTOR) ASSUME THE COST OF ANY NECESSARY SERVICING, +REPAIR OR CORRECTION. THIS DISCLAIMER OF WARRANTY CONSTITUTES AN ESSENTIAL PART +OF THIS LICENSE. NO USE OF ANY COVERED SOFTWARE IS AUTHORIZED HEREUNDER EXCEPT +UNDER THIS DISCLAIMER. + +6. TERMINATION. + +6.1. This License and the rights granted hereunder will terminate automatically +if You fail to comply with terms herein and fail to cure such breach within 30 +days of becoming aware of the breach. Provisions which, by their nature, must +remain in effect beyond the termination of this License shall survive. + +6.2. If You assert a patent infringement claim (excluding declaratory judgment +actions) against Initial Developer or a Contributor (the Initial Developer or +Contributor against whom You assert such claim is referred to as Participant) +alleging that the Participant Software (meaning the Contributor Version where +the Participant is a Contributor or the Original Software where the Participant +is the Initial Developer) directly or indirectly infringes any patent, then any +and all rights granted directly or indirectly to You by such Participant, the +Initial Developer (if the Initial Developer is not the Participant) and all +Contributors under Sections�2.1 and/or 2.2 of this License shall, upon 60 days +notice from Participant terminate prospectively and automatically at the +expiration of such 60 day notice period, unless if within such 60 day period +You withdraw Your claim with respect to the Participant Software against such +Participant either unilaterally or pursuant to a written agreement with +Participant. + +6.3. In the event of termination under Sections�6.1 or 6.2 above, all end user +licenses that have been validly granted by You or any distributor hereunder +prior to termination (excluding licenses granted to You by any distributor) +shall survive termination. + +7. LIMITATION OF LIABILITY. + +UNDER NO CIRCUMSTANCES AND UNDER NO LEGAL THEORY, WHETHER TORT (INCLUDING +NEGLIGENCE), CONTRACT, OR OTHERWISE, SHALL YOU, THE INITIAL DEVELOPER, ANY +OTHER CONTRIBUTOR, OR ANY DISTRIBUTOR OF COVERED SOFTWARE, OR ANY SUPPLIER OF +ANY OF SUCH PARTIES, BE LIABLE TO ANY PERSON FOR ANY INDIRECT, SPECIAL, +INCIDENTAL, OR CONSEQUENTIAL DAMAGES OF ANY CHARACTER INCLUDING, WITHOUT +LIMITATION, DAMAGES FOR LOST PROFITS, LOSS OF GOODWILL, WORK STOPPAGE, COMPUTER +FAILURE OR MALFUNCTION, OR ANY AND ALL OTHER COMMERCIAL DAMAGES OR LOSSES, EVEN +IF SUCH PARTY SHALL HAVE BEEN INFORMED OF THE POSSIBILITY OF SUCH DAMAGES. THIS +LIMITATION OF LIABILITY SHALL NOT APPLY TO LIABILITY FOR DEATH OR PERSONAL +INJURY RESULTING FROM SUCH PARTYS NEGLIGENCE TO THE EXTENT APPLICABLE LAW +PROHIBITS SUCH LIMITATION. SOME JURISDICTIONS DO NOT ALLOW THE EXCLUSION OR +LIMITATION OF INCIDENTAL OR CONSEQUENTIAL DAMAGES, SO THIS EXCLUSION AND +LIMITATION MAY NOT APPLY TO YOU. + +8. U.S. GOVERNMENT END USERS. + +The Covered Software is a commercial item, as that term is defined in +48�C.F.R.�2.101 (Oct. 1995), consisting of commercial computer software (as +that term is defined at 48 C.F.R. �252.227-7014(a)(1)) and commercial computer +software documentation as such terms are used in 48�C.F.R.�12.212 (Sept. 1995). +Consistent with 48 C.F.R. 12.212 and 48 C.F.R. 227.7202-1 through 227.7202-4 +(June 1995), all U.S. Government End Users acquire Covered Software with only +those rights set forth herein. This U.S. Government Rights clause is in lieu +of, and supersedes, any other FAR, DFAR, or other clause or provision that +addresses Government rights in computer software under this License. + +9. MISCELLANEOUS. + +This License represents the complete agreement concerning subject matter +hereof. If any provision of this License is held to be unenforceable, such +provision shall be reformed only to the extent necessary to make it +enforceable. This License shall be governed by the law of the jurisdiction +specified in a notice contained within the Original Software (except to the +extent applicable law, if any, provides otherwise), excluding such +jurisdictions conflict-of-law provisions. Any litigation relating to this +License shall be subject to the jurisdiction of the courts located in the +jurisdiction and venue specified in a notice contained within the Original +Software, with the losing party responsible for costs, including, without +limitation, court costs and reasonable attorneys fees and expenses. The +application of the United Nations Convention on Contracts for the International +Sale of Goods is expressly excluded. Any law or regulation which provides that +the language of a contract shall be construed against the drafter shall not +apply to this License. You agree that You alone are responsible for compliance +with the United States export administration regulations (and the export +control laws and regulation of any other countries) when You use, distribute or +otherwise make available any Covered Software. + +10. RESPONSIBILITY FOR CLAIMS. + +As between Initial Developer and the Contributors, each party is responsible +for claims and damages arising, directly or indirectly, out of its utilization +of rights under this License and You agree to work with Initial Developer and +Contributors to distribute such responsibility on an equitable basis. Nothing +herein is intended or shall be deemed to constitute any admission of liability. + +NOTICE PURSUANT TO SECTION 9 OF THE COMMON DEVELOPMENT AND DISTRIBUTION LICENSE +(CDDL) The GlassFish code released under the CDDL shall be governed by the laws +of the State of California (excluding conflict-of-law provisions). Any +litigation relating to this License shall be subject to the jurisdiction of the +Federal Courts of the Northern District of California and the state courts of +the State of California, with venue lying in Santa Clara County, California. + + +========================================================================== +The following license applies to parts of the lucene-snowball jar +that are generated from the snowball sources at http://snowball.tartarus.org/ +-------------------------------------------------------------------------- +The BSD License + +Copyright (c) 2001, Dr Martin Porter, Copyright (c) 2002, Richard Boulton +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + * Neither the name of the nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +========================================================================== +The following license applies to easymock.jar +-------------------------------------------------------------------------- +EasyMock 2 License (MIT License) +Copyright (c) 2001-2007 OFFIS, Tammo Freese. + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +========================================================================== +The following license applies to the JQuery JavaScript library +-------------------------------------------------------------------------- +Copyright (c) 2008 John Resig, http://jquery.com/ + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +========================================================================== +The following license applies to stax-utils.jar +-------------------------------------------------------------------------- +Copyright (c) 2004, Christian Niles, unit12.net +Copyright (c) 2004, Sun Microsystems, Inc. +Copyright (c) 2006, John Kristian +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + * Neither the name of the listed copyright holders nor the names + of its contributors may be used to endorse or promote products + derived from this software without specific prior written + permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +========================================================================== +The following license applies to JUnit +-------------------------------------------------------------------------- +Common Public License - v 1.0 + +THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS COMMON PUBLIC LICENSE ("AGREEMENT"). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT. + +1. DEFINITIONS + +"Contribution" means: + + a) in the case of the initial Contributor, the initial code and documentation distributed under this Agreement, and + b) in the case of each subsequent Contributor: + + i) changes to the Program, and + + ii) additions to the Program; + + where such changes and/or additions to the Program originate from and are distributed by that particular Contributor. A Contribution 'originates' from a Contributor if it was added to the Program by such Contributor itself or anyone acting on such Contributor's behalf. Contributions do not include additions to the Program which: (i) are separate modules of software distributed in conjunction with the Program under their own license agreement, and (ii) are not derivative works of the Program. + +"Contributor" means any person or entity that distributes the Program. + +"Licensed Patents " mean patent claims licensable by a Contributor which are necessarily infringed by the use or sale of its Contribution alone or when combined with the Program. + +"Program" means the Contributions distributed in accordance with this Agreement. + +"Recipient" means anyone who receives the Program under this Agreement, including all Contributors. + +2. GRANT OF RIGHTS + + a) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, distribute and sublicense the Contribution of such Contributor, if any, and such derivative works, in source code and object code form. + + b) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free patent license under Licensed Patents to make, use, sell, offer to sell, import and otherwise transfer the Contribution of such Contributor, if any, in source code and object code form. This patent license shall apply to the combination of the Contribution and the Program if, at the time the Contribution is added by the Contributor, such addition of the Contribution causes such combination to be covered by the Licensed Patents. The patent license shall not apply to any other combinations which include the Contribution. No hardware per se is licensed hereunder. + + c) Recipient understands that although each Contributor grants the licenses to its Contributions set forth herein, no assurances are provided by any Contributor that the Program does not infringe the patent or other intellectual property rights of any other entity. Each Contributor disclaims any liability to Recipient for claims brought by any other entity based on infringement of intellectual property rights or otherwise. As a condition to exercising the rights and licenses granted hereunder, each Recipient hereby assumes sole responsibility to secure any other intellectual property rights needed, if any. For example, if a third party patent license is required to allow Recipient to distribute the Program, it is Recipient's responsibility to acquire that license before distributing the Program. + + d) Each Contributor represents that to its knowledge it has sufficient copyright rights in its Contribution, if any, to grant the copyright license set forth in this Agreement. + +3. REQUIREMENTS + +A Contributor may choose to distribute the Program in object code form under its own license agreement, provided that: + + a) it complies with the terms and conditions of this Agreement; and + + b) its license agreement: + + i) effectively disclaims on behalf of all Contributors all warranties and conditions, express and implied, including warranties or conditions of title and non-infringement, and implied warranties or conditions of merchantability and fitness for a particular purpose; + + ii) effectively excludes on behalf of all Contributors all liability for damages, including direct, indirect, special, incidental and consequential damages, such as lost profits; + + iii) states that any provisions which differ from this Agreement are offered by that Contributor alone and not by any other party; and + + iv) states that source code for the Program is available from such Contributor, and informs licensees how to obtain it in a reasonable manner on or through a medium customarily used for software exchange. + +When the Program is made available in source code form: + + a) it must be made available under this Agreement; and + + b) a copy of this Agreement must be included with each copy of the Program. + +Contributors may not remove or alter any copyright notices contained within the Program. + +Each Contributor must identify itself as the originator of its Contribution, if any, in a manner that reasonably allows subsequent Recipients to identify the originator of the Contribution. + +4. COMMERCIAL DISTRIBUTION + +Commercial distributors of software may accept certain responsibilities with respect to end users, business partners and the like. While this license is intended to facilitate the commercial use of the Program, the Contributor who includes the Program in a commercial product offering should do so in a manner which does not create potential liability for other Contributors. Therefore, if a Contributor includes the Program in a commercial product offering, such Contributor ("Commercial Contributor") hereby agrees to defend and indemnify every other Contributor ("Indemnified Contributor") against any losses, damages and costs (collectively "Losses") arising from claims, lawsuits and other legal actions brought by a third party against the Indemnified Contributor to the extent caused by the acts or omissions of such Commercial Contributor in connection with its distribution of the Program in a commercial product offering. The obligations in this section do not apply to any claims or Losses relating to any actual or alleged intellectual property infringement. In order to qualify, an Indemnified Contributor must: a) promptly notify the Commercial Contributor in writing of such claim, and b) allow the Commercial Contributor to control, and cooperate with the Commercial Contributor in, the defense and any related settlement negotiations. The Indemnified Contributor may participate in any such claim at its own expense. + +For example, a Contributor might include the Program in a commercial product offering, Product X. That Contributor is then a Commercial Contributor. If that Commercial Contributor then makes performance claims, or offers warranties related to Product X, those performance claims and warranties are such Commercial Contributor's responsibility alone. Under this section, the Commercial Contributor would have to defend claims against the other Contributors related to those performance claims and warranties, and if a court requires any other Contributor to pay any damages as a result, the Commercial Contributor must pay those damages. + +5. NO WARRANTY + +EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, THE PROGRAM IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each Recipient is solely responsible for determining the appropriateness of using and distributing the Program and assumes all risks associated with its exercise of rights under this Agreement, including but not limited to the risks and costs of program errors, compliance with applicable laws, damage to or loss of data, programs or equipment, and unavailability or interruption of operations. + +6. DISCLAIMER OF LIABILITY + +EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, NEITHER RECIPIENT NOR ANY CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. + +7. GENERAL + +If any provision of this Agreement is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this Agreement, and without further action by the parties hereto, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable. + +If Recipient institutes patent litigation against a Contributor with respect to a patent applicable to software (including a cross-claim or counterclaim in a lawsuit), then any patent licenses granted by that Contributor to such Recipient under this Agreement shall terminate as of the date such litigation is filed. In addition, if Recipient institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Program itself (excluding combinations of the Program with other software or hardware) infringes such Recipient's patent(s), then such Recipient's rights granted under Section 2(b) shall terminate as of the date such litigation is filed. + +All Recipient's rights under this Agreement shall terminate if it fails to comply with any of the material terms or conditions of this Agreement and does not cure such failure in a reasonable period of time after becoming aware of such noncompliance. If all Recipient's rights under this Agreement terminate, Recipient agrees to cease use and distribution of the Program as soon as reasonably practicable. However, Recipient's obligations under this Agreement and any licenses granted by Recipient relating to the Program shall continue and survive. + +Everyone is permitted to copy and distribute copies of this Agreement, but in order to avoid inconsistency the Agreement is copyrighted and may only be modified in the following manner. The Agreement Steward reserves the right to publish new versions (including revisions) of this Agreement from time to time. No one other than the Agreement Steward has the right to modify this Agreement. IBM is the initial Agreement Steward. IBM may assign the responsibility to serve as the Agreement Steward to a suitable separate entity. Each new version of the Agreement will be given a distinguishing version number. The Program (including Contributions) may always be distributed subject to the version of the Agreement under which it was received. In addition, after a new version of the Agreement is published, Contributor may elect to distribute the Program (including its Contributions) under the new version. Except as expressly stated in Sections 2(a) and 2(b) above, Recipient receives no rights or licenses to the intellectual property of any Contributor under this Agreement, whether expressly, by implication, estoppel or otherwise. All rights in the Program not expressly granted under this Agreement are reserved. + +This Agreement is governed by the laws of the State of New York and the intellectual property laws of the United States of America. No party to this Agreement will bring a legal action under this Agreement more than one year after the cause of action arose. Each party waives its rights to a jury trial in any resulting litigation. + + + +========================================================================== +The following license applies to slf4j +-------------------------------------------------------------------------- + +Copyright (c) 2004-2008 QOS.ch +All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +========================================================================== +contrib/clustering +========================================================================== +Carrot2 Project + +Copyright (C) 2002-2008, Dawid Weiss, Stanislaw Osinski. +Portions (C) Contributors listed in "carrot2.CONTRIBUTORS" file. +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +- Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +- Redistributions in binary form must reproduce the above copyright notice, this + list of conditions and the following disclaimer in the documentation and/or + other materials provided with the distribution. + +- Neither the name of the Poznan University of Technology, Poznan, Poland nor + the names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +- We request that you include in the end-user documentation provided with the + redistribution and/or in the software itself an acknowledgement equivalent to + the following: "This product includes software developed by the Carrot2 + Project." + +- No algorithms or technical solutions in the project may be patented or claimed + proprietary. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +========================================================================== +EHCache +/** + * Copyright 2003-2008 Luck Consulting Pty Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +========================================================================== +Google Collections +/** + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +========================================================================== +Jackson + +/** + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + + +=========================================================================== +Apache Tika Licenses - contrib/extraction +--------------------------------------------------------------------------- +Apache Tika is licensed under the ASL 2.0. See above for the text of the license + +APACHE TIKA SUBCOMPONENTS + +Apache Tika includes a number of subcomponents with separate copyright notices +and license terms. Your use of these subcomponents is subject to the terms and +conditions of the following licenses. + +Bouncy Castle libraries (bcmail and bcprov) + + Copyright (c) 2000-2006 The Legion Of The Bouncy Castle + (http://www.bouncycastle.org) + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files + (the "Software"), to deal in the Software without restriction, + including without limitation the rights to use, copy, modify, merge, + publish, distribute, sublicense, and/or sell copies of the Software, + and to permit persons to whom the Software is furnished to do so, + subject to the following conditions: + + The above copyright notice and this permission notice shall be included + in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR + OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, + ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR + OTHER DEALINGS IN THE SOFTWARE. + +PDFBox library (pdfbox) + + Copyright (c) 2003-2005, www.pdfbox.org + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + 2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + 3. Neither the name of pdfbox; nor the names of its + contributors may be used to endorse or promote products derived from this + software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS + OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY + OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY + OF SUCH DAMAGE. + +FontBox and JempBox libraries (fontbox, jempbox) + + Copyright (c) 2003-2005, www.fontbox.org + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + 2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + 3. Neither the name of fontbox; nor the names of its + contributors may be used to endorse or promote products derived from this + software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS + OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY + OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY + OF SUCH DAMAGE. + +ICU4J library (icu4j) + + Copyright (c) 1995-2005 International Business Machines Corporation + and others + + All rights reserved. + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, and/or sell copies of the Software, and to permit persons + to whom the Software is furnished to do so, provided that the above + copyright notice(s) and this permission notice appear in all copies + of the Software and that both the above copyright notice(s) and this + permission notice appear in supporting documentation. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF THIRD PARTY RIGHTS. + IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS NOTICE + BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, + OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, + WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, + ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS + SOFTWARE. + + Except as contained in this notice, the name of a copyright holder shall + not be used in advertising or otherwise to promote the sale, use or other + dealings in this Software without prior written authorization of the + copyright holder. + +ASM library (asm) + + Copyright (c) 2000-2005 INRIA, France Telecom + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + 1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + 3. Neither the name of the copyright holders nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF + THE POSSIBILITY OF SUCH DAMAGE. + +================================================================================================= +The following license applies to JavaMail API 1.4.1 and JavaBeans Activation Framework (JAF) 1.1 +------------------------------------------------------------------------------------------------- +COMMON DEVELOPMENT AND DISTRIBUTION LICENSE (CDDL) Version 1.0 1. + +Definitions. + +1.1. Contributor means each individual or entity that creates or contributes to the creation of Modifications. + +1.2. Contributor Version means the combination of the Original Software, prior Modifications used by a Contributor (if any), and the Modifications made by that particular Contributor. + +1.3. Covered Software means (a) the Original Software, or (b) Modifications, or (c) the combination of files containing Original Software with files containing Modifications, in each case including portions thereof. + +1.4. Executable means the Covered Software in any form other than Source Code. + +1.5. Initial Developer means the individual or entity that first makes Original Software available under this License. + +1.6. Larger Work means a work which combines Covered Software or portions thereof with code not governed by the terms of this License. + +1.7. License means this document. + +1.8. Licensable means having the right to grant, to the maximum extent possible, whether at the time of the initial grant or subsequently acquired, any and all of the rights conveyed herein. + +1.9. Modifications means the Source Code and Executable form of any of the following: A. Any file that results from an addition to, deletion from or modification of the contents of a file containing Original Software or previous Modifications; B. Any new file that contains any part of the Original Software or previous Modification; or C. Any new file that is contributed or otherwise made available under the terms of this License. + +1.10. Original Software means the Source Code and Executable form of computer software code that is originally released under this License. + +1.11. Patent Claims means any patent claim(s), now owned or hereafter acquired, including without limitation, method, process, and apparatus claims, in any patent Licensable by grantor. + +1.12. Source Code means (a) the common form of computer software code in which modifications are made and (b) associated documentation included in or with such code. + +1.13. You (or Your) means an individual or a legal entity exercising rights under, and complying with all of the terms of, this License. For legal entities, You includes any entity which controls, is controlled by, or is under common control with You. For purposes of this definition, control means (a) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (b) ownership of more than fifty percent (50%) of the outstanding shares or beneficial ownership of such entity. + +2. License Grants. + + 2.1. The Initial Developer Grant. Conditioned upon Your compliance with Section 3.1 below and subject to third party intellectual property claims, the Initial Developer hereby grants You a world-wide, royalty-free, non-exclusive license: + +(a) under intellectual property rights (other than patent or trademark) Licensable by Initial Developer, to use, reproduce, modify, display, perform, sublicense and distribute the Original Software (or portions thereof), with or without Modifications, and/or as part of a Larger Work; and + +(b) under Patent Claims infringed by the making, using or selling of Original Software, to make, have made, use, practice, sell, and offer for sale, and/or otherwise dispose of the Original Software (or portions thereof); + + (c) The licenses granted in Sections 2.1(a) and (b) are effective on the date Initial Developer first distributes or otherwise makes the Original Software available to a third party under the terms of this License; + + (d) Notwithstanding Section 2.1(b) above, no patent license is granted: (1) for code that You delete from the Original Software, or (2) for infringements caused by: (i) the modification of the Original Software, or (ii) the combination of the Original Software with other software or devices. + +2.2. Contributor Grant. Conditioned upon Your compliance with Section 3.1 below and subject to third party intellectual property claims, each Contributor hereby grants You a world-wide, royalty-free, non-exclusive license: + +(a) under intellectual property rights (other than patent or trademark) Licensable by Contributor to use, reproduce, modify, display, perform, sublicense and distribute the Modifications created by such Contributor (or portions thereof), either on an unmodified basis, with other Modifications, as Covered Software and/or as part of a Larger Work; and + +(b) under Patent Claims infringed by the making, using, or selling of Modifications made by that Contributor either alone and/or in combination with its Contributor Version (or portions of such combination), to make, use, sell, offer for sale, have made, and/or otherwise dispose of: (1) Modifications made by that Contributor (or portions thereof); and (2) the combination of Modifications made by that Contributor with its Contributor Version (or portions of such combination). + +(c) The licenses granted in Sections 2.2(a) and 2.2(b) are effective on the date Contributor first distributes or otherwise makes the Modifications available to a third party. + +(d) Notwithstanding Section 2.2(b) above, no patent license is granted: (1) for any code that Contributor has deleted from the Contributor Version; (2) for infringements caused by: (i) third party modifications of Contributor Version, or (ii) the combination of Modifications made by that Contributor with other software (except as part of the Contributor Version) or other devices; or (3) under Patent Claims infringed by Covered Software in the absence of Modifications made by that Contributor. + +3. Distribution Obligations. + +3.1. Availability of Source Code. Any Covered Software that You distribute or otherwise make available in Executable form must also be made available in Source Code form and that Source Code form must be distributed only under the terms of this License. You must include a copy of this License with every copy of the Source Code form of the Covered Software You distribute or otherwise make available. You must inform recipients of any such Covered Software in Executable form as to how they can obtain such Covered Software in Source Code form in a reasonable manner on or through a medium customarily used for software exchange. + +3.2. Modifications. The Modifications that You create or to which You contribute are governed by the terms of this License. You represent that You believe Your Modifications are Your original creation(s) and/or You have sufficient rights to grant the rights conveyed by this License. + +3.3. Required Notices. You must include a notice in each of Your Modifications that identifies You as the Contributor of the Modification. You may not remove or alter any copyright, patent or trademark notices contained within the Covered Software, or any notices of licensing or any descriptive text giving attribution to any Contributor or the Initial Developer. + +3.4. Application of Additional Terms. You may not offer or impose any terms on any Covered Software in Source Code form that alters or restricts the applicable version of this License or the recipients rights hereunder. You may choose to offer, and to charge a fee for, warranty, support, indemnity or liability obligations to one or more recipients of Covered Software. However, you may do so only on Your own behalf, and not on behalf of the Initial Developer or any Contributor. You must make it absolutely clear that any such warranty, support, indemnity or liability obligation is offered by You alone, and You hereby agree to indemnify the Initial Developer and every Contributor for any liability incurred by the Initial Developer or such Contributor as a result of warranty, support, indemnity or liability terms You offer. + +3.5. Distribution of Executable Versions. You may distribute the Executable form of the Covered Software under the terms of this License or under the terms of a license of Your choice, which may contain terms different from this License, provided that You are in compliance with the terms of this License and that the license for the Executable form does not attempt to limit or alter the recipients rights in the Source Code form from the rights set forth in this License. If You distribute the Covered Software in Executable form under a different license, You must make it absolutely clear that any terms which differ from this License are offered by You alone, not by the Initial Developer or Contributor. You hereby agree to indemnify the Initial Developer and every Contributor for any liability incurred by the Initial Developer or such Contributor as a result of any such terms You offer. + +3.6. Larger Works. You may create a Larger Work by combining Covered Software with other code not governed by the terms of this License and distribute the Larger Work as a single product. In such a case, You must make sure the requirements of this License are fulfilled for the Covered Software. + +4. Versions of the License. + +4.1. New Versions. Sun Microsystems, Inc. is the initial license steward and may publish revised and/or new versions of this License from time to time. Each version will be given a distinguishing version number. Except as provided in Section 4.3, no one other than the license steward has the right to modify this License. + +4.2. Effect of New Versions. You may always continue to use, distribute or otherwise make the Covered Software available under the terms of the version of the License under which You originally received the Covered Software. If the Initial Developer includes a notice in the Original Software prohibiting it from being distributed or otherwise made available under any subsequent version of the License, You must distribute and make the Covered Software available under the terms of the version of the License under which You originally received the Covered Software. Otherwise, You may also choose to use, distribute or otherwise make the Covered Software available under the terms of any subsequent version of the License published by the license steward. + +4.3. Modified Versions. When You are an Initial Developer and You want to create a new license for Your Original Software, You may create and use a modified version of this License if You: (a) rename the license and remove any references to the name of the license steward (except to note that the license differs from this License); and (b) otherwise make it clear that the license contains terms which differ from this License. + +5. DISCLAIMER OF WARRANTY. COVERED SOFTWARE IS PROVIDED UNDER THIS LICENSE ON AN AS IS BASIS, WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES THAT THE COVERED SOFTWARE IS FREE OF DEFECTS, MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE OR NON-INFRINGING. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE COVERED SOFTWARE IS WITH YOU. SHOULD ANY COVERED SOFTWARE PROVE DEFECTIVE IN ANY RESPECT, YOU (NOT THE INITIAL DEVELOPER OR ANY OTHER CONTRIBUTOR) ASSUME THE COST OF ANY NECESSARY SERVICING, REPAIR OR CORRECTION. THIS DISCLAIMER OF WARRANTY CONSTITUTES AN ESSENTIAL PART OF THIS LICENSE. NO USE OF ANY COVERED SOFTWARE IS AUTHORIZED HEREUNDER EXCEPT UNDER THIS DISCLAIMER. + +6. TERMINATION. + +6.1. This License and the rights granted hereunder will terminate automatically if You fail to comply with terms herein and fail to cure such breach within 30 days of becoming aware of the breach. Provisions which, by their nature, must remain in effect beyond the termination of this License shall survive. + +6.2. If You assert a patent infringement claim (excluding declaratory judgment actions) against Initial Developer or a Contributor (the Initial Developer or Contributor against whom You assert such claim is referred to as Participant) alleging that the Participant Software (meaning the Contributor Version where the Participant is a Contributor or the Original Software where the Participant is the Initial Developer) directly or indirectly infringes any patent, then any and all rights granted directly or indirectly to You by such Participant, the Initial Developer (if the Initial Developer is not the Participant) and all Contributors under Sections 2.1 and/or 2.2 of this License shall, upon 60 days notice from Participant terminate prospectively and automatically at the expiration of such 60 day notice period, unless if within such 60 day period You withdraw Your claim with respect to the Participant Software against such Participant either unilaterally or pursuant to a written agreement with Participant. + +6.3. In the event of termination under Sections 6.1 or 6.2 above, all end user licenses that have been validly granted by You or any distributor hereunder prior to termination (excluding licenses granted to You by any distributor) shall survive termination. + +7. LIMITATION OF LIABILITY. UNDER NO CIRCUMSTANCES AND UNDER NO LEGAL THEORY, WHETHER TORT (INCLUDING NEGLIGENCE), CONTRACT, OR OTHERWISE, SHALL YOU, THE INITIAL DEVELOPER, ANY OTHER CONTRIBUTOR, OR ANY DISTRIBUTOR OF COVERED SOFTWARE, OR ANY SUPPLIER OF ANY OF SUCH PARTIES, BE LIABLE TO ANY PERSON FOR ANY INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES OF ANY CHARACTER INCLUDING, WITHOUT LIMITATION, DAMAGES FOR LOST PROFITS, LOSS OF GOODWILL, WORK STOPPAGE, COMPUTER FAILURE OR MALFUNCTION, OR ANY AND ALL OTHER COMMERCIAL DAMAGES OR LOSSES, EVEN IF SUCH PARTY SHALL HAVE BEEN INFORMED OF THE POSSIBILITY OF SUCH DAMAGES. THIS LIMITATION OF LIABILITY SHALL NOT APPLY TO LIABILITY FOR DEATH OR PERSONAL INJURY RESULTING FROM SUCH PARTYS NEGLIGENCE TO THE EXTENT APPLICABLE LAW PROHIBITS SUCH LIMITATION. SOME JURISDICTIONS DO NOT ALLOW THE EXCLUSION OR LIMITATION OF INCIDENTAL OR CONSEQUENTIAL DAMAGES, SO THIS EXCLUSION AND LIMITATION MAY NOT APPLY TO YOU. + +8. U.S. GOVERNMENT END USERS. The Covered Software is a commercial item, as that term is defined in 48 C.F.R. 2.101 (Oct. 1995), consisting of commercial computer software (as that term is defined at 48 C.F.R. 252.227-7014(a)(1)) and commercial computer software documentation as such terms are used in 48 C.F.R. 12.212 (Sept. 1995). Consistent with 48 C.F.R. 12.212 and 48 C.F.R. 227.7202-1 through 227.7202-4 (June 1995), all U.S. Government End Users acquire Covered Software with only those rights set forth herein. This U.S. Government Rights clause is in lieu of, and supersedes, any other FAR, DFAR, or other clause or provision that addresses Government rights in computer software under this License. + +9. MISCELLANEOUS. This License represents the complete agreement concerning subject matter hereof. If any provision of this License is held to be unenforceable, such provision shall be reformed only to the extent necessary to make it enforceable. This License shall be governed by the law of the jurisdiction specified in a notice contained within the Original Software (except to the extent applicable law, if any, provides otherwise), excluding such jurisdictions conflict-of-law provisions. Any litigation relating to this License shall be subject to the jurisdiction of the courts located in the jurisdiction and venue specified in a notice contained within the Original Software, with the losing party responsible for costs, including, without limitation, court costs and reasonable attorneys fees and expenses. The application of the United Nations Convention on Contracts for the International Sale of Goods is expressly excluded. Any law or regulation which provides that the language of a contract shall be construed against the drafter shall not apply to this License. You agree that You alone are responsible for compliance with the United States export administration regulations (and the export control laws and regulation of any other countries) when You use, distribute or otherwise make available any Covered Software. + +10. RESPONSIBILITY FOR CLAIMS. As between Initial Developer and the Contributors, each party is responsible for claims and damages arising, directly or indirectly, out of its utilization of rights under this License and You agree to work with Initial Developer and Contributors to distribute such responsibility on an equitable basis. Nothing herein is intended or shall be deemed to constitute any admission of liability. + +NOTICE PURSUANT TO SECTION 9 OF THE COMMON DEVELOPMENT AND DISTRIBUTION LICENSE (CDDL) The code released under the CDDL shall be governed by the laws of the State of California (excluding conflict-of-law provisions). Any litigation relating to this License shall be subject to the jurisdiction of the Federal Courts of the Northern District of California and the state courts of the State of California, with venue lying in Santa Clara County, California. diff --git a/solr/NOTICE.txt b/solr/NOTICE.txt new file mode 100644 index 00000000000..d3dc2334100 --- /dev/null +++ b/solr/NOTICE.txt @@ -0,0 +1,254 @@ +============================================================== + Apache Solr + Copyright 2006-2008 The Apache Software Foundation +============================================================== + +This product includes software developed by +The Apache Software Foundation (http://www.apache.org/). + +Includes software from other Apache Software Foundation projects, +including, but not limited to: + - Apache Lucene Java + - Apache Tomcat (lib/servlet-api-2.4.jar) + - Apache Commons + - Apache Geronimo (stax API jar) + - Apache Log4j (contrib/clustering) + +This product includes tests written with EasyMock Copyright 2001-2007 +Tammo Freese (http://www.easymock.org/) + +This product includes the JQuery JavaScript library created by John Resig. +Copyright (c) 2008 John Resig, http://jquery.com/ + +This product includes the stax-utils jar: https://stax-utils.dev.java.net/ +Copyright (c) 2004, Christian Niles, unit12.net +Copyright (c) 2004, Sun Microsystems, Inc. +Copyright (c) 2006, John Kristian +License: The BSD License (http://www.opensource.org/licenses/bsd-license.php) + +This product includes a JUnit jar: http://junit.sourceforge.net/ +License: Common Public License - v 1.0 (http://junit.sourceforge.net/cpl-v10.html) + +This product includes the JavaMail API 1.4.1 jar: https://glassfish.dev.java.net/javaee5/mail/ +License: Common Development and Distribution License (CDDL) v1.0 (https://glassfish.dev.java.net/public/CDDLv1.0.html) + +This product includes the JavaBeans Activation Framework (JAF) 1.1 jar: http://java.sun.com/products/javabeans/jaf/index.jsp +License: Common Development and Distribution License (CDDL) v1.0 (https://glassfish.dev.java.net/public/CDDLv1.0.html) + +This product includes the HSQL Database (HSQLDB) 1.8.0.10 jar: http://hsqldb.org/ +License: http://hsqldb.org/web/hsqlLicense.html + + +========================================================================= +== Apache Lucene Notice == +========================================================================= +The snowball stemmers in + contrib/snowball/src/java/net/sf/snowball +were developed by Martin Porter and Richard Boulton. +The full snowball package is available from + http://snowball.tartarus.org/ +--- + +This product includes/uses software, Woodstox (http://woodstox.codehaus.org), +developed by Codehaus (http://www.codehaus.org/) +License: The Apache Software License, Version 2.0 (http://www.apache.org/licenses/LICENSE-2.0.txt) +========================================================================= +== Woodstox Notice == +========================================================================= +This product currently only contains code developed by authors +of specific components, as identified by the source code files. + +Since product implements StAX API, it has dependencies to StAX API +classes. + +For additional credits (generally to people who reported problems) +see CREDITS file. +--- + +This product includes software developed by Mort Bay Consulting +(specifically, Jetty 6.1.3, the bundled servlet container in example) +The jboss integration module is not included. +========================================================================= +== Jetty Notice == +========================================================================= +============================================================== + Jetty Web Container + Copyright 1995-2006 Mort Bay Consulting Pty Ltd +============================================================== + +This product includes some software developed at The Apache Software +Foundation (http://www.apache.org/). + +The javax.servlet package used by Jetty is copyright +Sun Microsystems, Inc and Apache Software Foundation. It is +distributed under the Common Development and Distribution License. +You can obtain a copy of the license at +https://glassfish.dev.java.net/public/CDDLv1.0.html. + +The UnixCrypt.java code ~Implements the one way cryptography used by +Unix systems for simple password protection. Copyright 1996 Aki Yoshida, +modified April 2001 by Iris Van den Broeke, Daniel Deville. + +The default JSP implementation is provided by the Glassfish JSP engine +from project Glassfish http://glassfish.dev.java.net. Copyright 2005 +Sun Microsystems, Inc. and portions Copyright Apache Software Foundation. + +Some portions of the code are Copyright: + 2006 Tim Vernum + 1999 Jason Gilbert. + +The jboss integration module contains some LGPL code. + +========================================================================= +== SLF4J Notice -- http://www.slf4j.org/license.html == +========================================================================= + +Copyright (c) 2004-2008 QOS.ch +All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +========================================================================= +== Apache Tika Notices == +========================================================================= + +The following notices apply to the Apache Tika libraries in contrib/extraction/lib: + +This product includes software developed by the following copyright owners: + +Copyright (c) 2000-2006 The Legion Of The Bouncy Castle +(http://www.bouncycastle.org) + +Copyright (c) 2003-2005, www.pdfbox.org + +Copyright (c) 2003-2005, www.fontbox.org + +Copyright (c) 1995-2005 International Business Machines Corporation and others + +Copyright (c) 2000-2005 INRIA, France Telecom + + +========================================================================= +== Carrot2 Notice == +========================================================================= +Copyright (C) 2002-2008, Dawid Weiss, Stanislaw Osinski. +Portions (C) Contributors listed in "carrot2.CONTRIBUTORS" file. +All rights reserved. + +This product includes software developed by the Carrot2 Project. + +See http://project.carrot2.org/ + +========================================================================= +== EHCache Notice == +========================================================================= +Copyright 2003-2008 Luck Consulting Pty Ltd + +This product includes software developed by the EHCache Project + +See ???? + +========================================================================= +== Google Collections Notice == +========================================================================= + +Copyright ???? Google, Inc. + +This product includes software developed by the Google Collections project. + +See ???? + +========================================================================= +== Jackson Notice == +========================================================================= +Copyright ???? + +This product includes software developed by the Jackson project. + +See ???? + +========================================================================= +== HSQLDB Notice == +========================================================================= + +For content, code, and products originally developed by Thomas Mueller and the Hypersonic SQL Group: + +Copyright (c) 1995-2000 by the Hypersonic SQL Group. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +Redistributions of source code must retain the above copyright notice, this +list of conditions and the following disclaimer. + +Redistributions in binary form must reproduce the above copyright notice, +this list of conditions and the following disclaimer in the documentation +and/or other materials provided with the distribution. + +Neither the name of the Hypersonic SQL Group nor the names of its +contributors may be used to endorse or promote products derived from this +software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE HYPERSONIC SQL GROUP, +OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +This software consists of voluntary contributions made by many individuals on behalf of the +Hypersonic SQL Group. + +For work added by the HSQL Development Group (a.k.a. hsqldb_lic.txt): + +Copyright (c) 2001-2005, The HSQL Development Group +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +Redistributions of source code must retain the above copyright notice, this +list of conditions and the following disclaimer. + +Redistributions in binary form must reproduce the above copyright notice, +this list of conditions and the following disclaimer in the documentation +and/or other materials provided with the distribution. + +Neither the name of the HSQL Development Group nor the names of its +contributors may be used to endorse or promote products derived from this +software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL HSQL DEVELOPMENT GROUP, HSQLDB.ORG, +OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/solr/README.txt b/solr/README.txt new file mode 100644 index 00000000000..75721e211e0 --- /dev/null +++ b/solr/README.txt @@ -0,0 +1,118 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +Welcome to the Apache Solr project! +----------------------------------- + +Apache Solr is a search server based on the Apache Lucene search +library. + +For a complete description of the Solr project, team composition, source +code repositories, and other details, please see the Solr web site at +http://lucene.apache.org/solr + + +Getting Started +--------------- + +See the "example" directory for an example Solr setup. A tutorial +using the example setup can be found in "docs/tutorial.html" + + + +Files Included In Apache Solr Distributions +------------------------------------------- + +dist/apache-solr-XX.war + The Apache Solr Application. Deploy this WAR file to any servlet + container to run Apache Solr. + +dist/apache-solr-XX.jar + The Apache Solr Libraries. This JAR file is needed to compile + Apache Solr Plugins (see http://wiki.apache.org/solr/SolrPlugins for + more information). + +example/ + A self-contained example Solr instance, complete with a sample + configuration, documents to index, and the Jetty Servlet container. + Please see example/README.txt for information about running this + example. + +docs/index.html + The contents of the Apache Solr website. + +docs/api/index.html + The Apache Solr Javadoc API documentation. + +src/ + The Apache Solr source code. + + + +Instructions for Building Apache Solr from Source +------------------------------------------------- + +1. Download the J2SE 5.0 JDK (Java Development Kit) or later from http://java.sun.com. + You will need the JDK installed, and the %JAVA_HOME%\bin directory included + on your command path. To test this, issue a "java -version" command from your + shell and verify that the Java version is 5.0 or later. + +2. Download the Apache Ant binary distribution from http://ant.apache.org. + You will need Ant installed and the %ANT_HOME%\bin directory included on your + command path. To test this, issue a "ant -version" command from your + shell and verify that Ant is available. + +3. Download the Apache Solr distribution, linked from the above + web site. Expand the distribution to a folder of your choice, e.g. c:\solr. + Alternately, you can obtain a copy of the latest Apache Solr source code + directly from the Subversion repository: + + http://lucene.apache.org/solr/version_control.html + +4. Navigate to that folder and issue an "ant" command to see the available options + for building, testing, and packaging Solr. + + NOTE: + To see Solr in action, you may want to use the "ant example" command to build + and package Solr into the example/webapps directory. See also example/README.txt. + + +Export control +------------------------------------------------- +This distribution includes cryptographic software. The country in +which you currently reside may have restrictions on the import, +possession, use, and/or re-export to another country, of +encryption software. BEFORE using any encryption software, please +check your country's laws, regulations and policies concerning the +import, possession, or use, and re-export of encryption software, to +see if this is permitted. See for more +information. + +The U.S. Government Department of Commerce, Bureau of Industry and +Security (BIS), has classified this software as Export Commodity +Control Number (ECCN) 5D002.C.1, which includes information security +software using or performing cryptographic functions with asymmetric +algorithms. The form and manner of this Apache Software Foundation +distribution makes it eligible for export under the License Exception +ENC Technology Software Unrestricted (TSU) exception (see the BIS +Export Administration Regulations, Section 740.13) for both object +code and source code. + +The following provides more details on the included cryptographic +software: + Apache Solr uses the Apache Tika which uses the Bouncy Castle generic encryption libraries for + extracting text content and metadata from encrypted PDF files. + See http://www.bouncycastle.org/ for more details on Bouncy Castle. diff --git a/solr/build.xml b/solr/build.xml new file mode 100644 index 00000000000..c81ade1a4c2 --- /dev/null +++ b/solr/build.xml @@ -0,0 +1,923 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + lib/lucene-core-${lucene_version}.jar Missing + + lib/lucene-core-${lucene_version}.jar does not exist. + This will cause problems with m2-deploy later, so fail fast now. + + Probably cause: lucene jars were upgraded w/o modifying the + 'lucene_version' property in common-build.xml + + + + + + + + + + <!ENTITY solr.specversion "${specversion}"> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ... + + This task requires that the property 'stub.src.path' be set. + + It must contain a "path" listing directories containing source + files that this task should use when looking for classes that + need factories created, the format is platform specific -- + typically it is colon seperated in Unix, semi-colon seperated + on windows, ie: + + ant stub-factories -Dstub.src.path="./src:../lucene/contrib:../lucene/src/java" + + FYI: The file ${stub.list} contains a list of classes + that seem to need stub factories. (if java files can be found to + use as guides for creating them). + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Tests failed! + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Clover not found. Code coverage reports disabled. + + + + + + ################################################################## + Clover not found. + Please make sure clover.jar is in ANT_HOME/lib, or made available + to Ant using other mechanisms like -lib or CLASSPATH. + ################################################################## + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + See ${example}/README.txt for how to run the Solr example configuration. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/solr/client/javascript/README.txt b/solr/client/javascript/README.txt new file mode 100644 index 00000000000..34188b01fa0 --- /dev/null +++ b/solr/client/javascript/README.txt @@ -0,0 +1,5 @@ +For a Solr JavaScript Client, see: +http://evolvingweb.github.com/ajax-solr/ + +For information on (now deprecated) SorlJS, see: +http://wiki.apache.org/solr/SolrJS \ No newline at end of file diff --git a/solr/client/python/README.txt b/solr/client/python/README.txt new file mode 100644 index 00000000000..7f6af360c8f --- /dev/null +++ b/solr/client/python/README.txt @@ -0,0 +1,9 @@ +Note: As of version 1.3, Solr no longer comes bundled with a Python client. The existing client +was not sufficiently maintained or tested as development of Solr progressed, and committers +felt that the code was not up to our usual high standards of release. + +The client bundled with previous versions of Solr will continue to be available indefinitely at: +http://svn.apache.org/viewvc/lucene/solr/tags/release-1.2.0/client/python/ + +Please see http://wiki.apache.org/solr/SolPython for information on third-party Solr python +clients. diff --git a/solr/client/ruby/flare/README b/solr/client/ruby/flare/README new file mode 100644 index 00000000000..9c1858c8816 --- /dev/null +++ b/solr/client/ruby/flare/README @@ -0,0 +1,29 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +== Welcome to Solr Flare + +Flare promises to expose the power of Solr through a Rails-based user interface. Integral to Flare will be general purpose faceted browsing, auto-suggest, folksonomy tagging/annotating, and much more. + + Visit the Solr Flare wiki for more information: http://wiki.apache.org/solr/Flare + +== Getting started + +Launch Solr: + + cd solr + java -jar start.jar + +Launch Rails: + script/server + diff --git a/solr/client/ruby/flare/Rakefile b/solr/client/ruby/flare/Rakefile new file mode 100644 index 00000000000..98a185e02a9 --- /dev/null +++ b/solr/client/ruby/flare/Rakefile @@ -0,0 +1,24 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Add your own tasks in files placed in lib/tasks ending in .rake, +# for example lib/tasks/capistrano.rake, and they will automatically be available to Rake. + +require(File.join(File.dirname(__FILE__), 'config', 'boot')) + +require 'rake' +require 'rake/testtask' +require 'rake/rdoctask' + +require 'tasks/rails' + +#require 'solr/solrtasks' \ No newline at end of file diff --git a/solr/client/ruby/flare/app/controllers/application.rb b/solr/client/ruby/flare/app/controllers/application.rb new file mode 100644 index 00000000000..eafa2eaa39e --- /dev/null +++ b/solr/client/ruby/flare/app/controllers/application.rb @@ -0,0 +1,19 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Filters added to this controller apply to all controllers in the application. +# Likewise, all the methods added will be available for all controllers. + +class ApplicationController < ActionController::Base + # Pick a unique cookie name to distinguish our session data from others' + session :session_key => '_flare_session_id' +end diff --git a/solr/client/ruby/flare/app/controllers/i18n_controller.rb b/solr/client/ruby/flare/app/controllers/i18n_controller.rb new file mode 100644 index 00000000000..3ff6b209273 --- /dev/null +++ b/solr/client/ruby/flare/app/controllers/i18n_controller.rb @@ -0,0 +1,19 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This controller demonstrates the returning of accented characters work from the Solr example data +# and render properly in the browser +class I18nController < ApplicationController + def index + @results = SOLR.query("acute").hits + end +end \ No newline at end of file diff --git a/solr/client/ruby/flare/app/helpers/application_helper.rb b/solr/client/ruby/flare/app/helpers/application_helper.rb new file mode 100644 index 00000000000..b72ef869ca0 --- /dev/null +++ b/solr/client/ruby/flare/app/helpers/application_helper.rb @@ -0,0 +1,15 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Methods added to this helper will be available to all templates in the application. +module ApplicationHelper +end diff --git a/solr/client/ruby/flare/app/helpers/browse_helper.rb b/solr/client/ruby/flare/app/helpers/browse_helper.rb new file mode 100644 index 00000000000..7d520e70c3f --- /dev/null +++ b/solr/client/ruby/flare/app/helpers/browse_helper.rb @@ -0,0 +1,14 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +module BrowseHelper +end diff --git a/solr/client/ruby/flare/app/helpers/simile_helper.rb b/solr/client/ruby/flare/app/helpers/simile_helper.rb new file mode 100755 index 00000000000..716d34979f6 --- /dev/null +++ b/solr/client/ruby/flare/app/helpers/simile_helper.rb @@ -0,0 +1,14 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +module SimileHelper +end \ No newline at end of file diff --git a/solr/client/ruby/flare/app/views/browse/_suggest.rhtml b/solr/client/ruby/flare/app/views/browse/_suggest.rhtml new file mode 100755 index 00000000000..6cb4b05e6fd --- /dev/null +++ b/solr/client/ruby/flare/app/views/browse/_suggest.rhtml @@ -0,0 +1,24 @@ +<% +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +%> +
    +<% +@values.each do |value| + if value[1] > 0 +-%> + +<% + end +end +-%> +
\ No newline at end of file diff --git a/solr/client/ruby/flare/app/views/browse/facet.rhtml b/solr/client/ruby/flare/app/views/browse/facet.rhtml new file mode 100755 index 00000000000..bf658cb5e39 --- /dev/null +++ b/solr/client/ruby/flare/app/views/browse/facet.rhtml @@ -0,0 +1,55 @@ +<% +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +%> +
+
+ <%=link_to '[clear constraints]', :action => 'clear'%> +
+ +
+Queries: +
+<% @flare.queries.each_with_index do |q,i| %> +<%=link_to q[:negative] ? "-" : '+', :action => :invert_query, :index => i%> +<%=q[:query]%> +<%= in_place_editor "query_#{i}", :url=> url_for(:action=>"update_query", :index=>i) %> +<%=link_to image_tag("x-close.gif"), :action => :remove_query, :index => i %>
+<% end %> +
+
+ +
+Filters: +
+<% @flare.filters.each_with_index do |filter, i| %> + <%=link_to filter[:negative] ? "-" : "+", :action => :invert_filter, :index => i%> + <%=filter[:field]%>:<%=filter[:value]%> + <%=link_to image_tag("x-close.gif"), :action => :remove_filter, :index => i %> +<% end %> +
+
+ +
+ <%=link_to '[clear constraints]', :action => 'clear'%> +
+ +
+ +

<%=params[:field]%>

+<% @facets.each do |f| %> + <% if f.name %> + <%= link_to f.name, :action => 'add_filter', :field => params[:field], :value => f.name %> (<%=f.value%>) + <% else %> + <%= link_to '---- NO VALUE ----', :action => 'add_filter', :field => params[:field], :value => "[* TO *]", :negative => true %> (<%=f.value%>) + <% end %> +<% end%> \ No newline at end of file diff --git a/solr/client/ruby/flare/app/views/document/_document_delicious.rhtml b/solr/client/ruby/flare/app/views/document/_document_delicious.rhtml new file mode 100755 index 00000000000..dca714bfb77 --- /dev/null +++ b/solr/client/ruby/flare/app/views/document/_document_delicious.rhtml @@ -0,0 +1,30 @@ +<% +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +%> + + + <%=image_tag "http://images.amazon.com/images/P/#{doc['id']}.01.MZZZZZZZ" %> + + + + + + + <% doc.each do |k,v|; highlighting = response.highlighted(doc['id'], k) %> + + <% end %> +
<%=link_to doc['title_text'], "http://www.amazon.com/exec/obidos/ASIN/#{doc['id']}"%>
<%=k%>:<%= highlighting ? "...#{highlighting}..." : (v.respond_to?('join') ? v.join(',') : v.to_s)%>
+ + + + diff --git a/solr/client/ruby/flare/app/views/document/_document_tang.rhtml b/solr/client/ruby/flare/app/views/document/_document_tang.rhtml new file mode 100755 index 00000000000..5be65f2c5bd --- /dev/null +++ b/solr/client/ruby/flare/app/views/document/_document_tang.rhtml @@ -0,0 +1,43 @@ +<% +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +%> +<% + body_zh_highlighted = response.highlighted(doc['id'], 'body_zh_text') + body_en_highlighted = response.highlighted(doc['id'], 'body_en_text') +-%> + + + + + + + + + + + + + + + + +
<%=doc['title_zh_text']%> (<%=doc['title_en_text']%>)
author:<%=doc['author_zh_facet']%> (<%=doc['author_en_facet']%>)
type:<%=doc['type_zh_facet']%> (<%=doc['type_en_facet']%>)
body: +
+ <%= body_zh_highlighted ? "...#{body_zh_highlighted}..." : doc['body_zh_text'] %> +
+
+ <%= body_en_highlighted ? "...#{body_en_highlighted}..." : doc['body_en_text'] %> +
+
+ + diff --git a/solr/client/ruby/flare/app/views/document/_document_uva.rhtml b/solr/client/ruby/flare/app/views/document/_document_uva.rhtml new file mode 100755 index 00000000000..40696a980dc --- /dev/null +++ b/solr/client/ruby/flare/app/views/document/_document_uva.rhtml @@ -0,0 +1,36 @@ +<% +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +%> +<% + virgo_link = "http://virgo.lib.virginia.edu/uhtbin/cgisirsi/uva/0/0/5?searchdata1=#{doc['id'][1..-1]}%7bCKEY%7d" + # url = URI.parse(virgo_link) + # res = Net::HTTP.start(url.host, url.port) {|http| + # http.get("/uhtbin/cgisirsi/uva/0/0/5?searchdata1=#{doc['id'][1..-1]}{CKEY}") + # } + # availability = Regexp.new("Copy\ info\:(.*)td\>", Regexp::MULTILINE).match(res.body)[1] +%> + + + + + + + <% doc.each do |k,v|; highlighting = response.highlighted(doc['id'], k) %> + + <% end %> + + + +
<%= link_to doc['title_text'], virgo_link, {:target => "_blank"}%>
<%=k%>:<%= highlighting ? "...#{highlighting}..." : (v.respond_to?('join') ? v.join(',') : v.to_s)%>
+ + diff --git a/solr/client/ruby/flare/app/views/i18n/index.rhtml b/solr/client/ruby/flare/app/views/i18n/index.rhtml new file mode 100644 index 00000000000..98161b0c66e --- /dev/null +++ b/solr/client/ruby/flare/app/views/i18n/index.rhtml @@ -0,0 +1,16 @@ +<% +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +%> +<% @results[0]['features'].each do |f| %> + <%= h f%> +<% end %> \ No newline at end of file diff --git a/solr/client/ruby/flare/app/views/layouts/browse.rhtml b/solr/client/ruby/flare/app/views/layouts/browse.rhtml new file mode 100755 index 00000000000..18a15f51977 --- /dev/null +++ b/solr/client/ruby/flare/app/views/layouts/browse.rhtml @@ -0,0 +1,28 @@ +<% +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +%> + + + Solr flare: <%=controller.action_name%> + <%= javascript_include_tag :defaults %> + <%= stylesheet_link_tag 'flare'%> + + +
+ + <%= yield %> +
+ + diff --git a/solr/client/ruby/flare/config/boot.rb b/solr/client/ruby/flare/config/boot.rb new file mode 100644 index 00000000000..ac6267f65e4 --- /dev/null +++ b/solr/client/ruby/flare/config/boot.rb @@ -0,0 +1,57 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Don't change this file. Configuration is done in config/environment.rb and config/environments/*.rb + +unless defined?(RAILS_ROOT) + root_path = File.join(File.dirname(__FILE__), '..') + + unless RUBY_PLATFORM =~ /(:?mswin|mingw)/ + require 'pathname' + root_path = Pathname.new(root_path).cleanpath(true).to_s + end + + RAILS_ROOT = root_path +end + +unless defined?(Rails::Initializer) + if File.directory?("#{RAILS_ROOT}/vendor/rails") + require "#{RAILS_ROOT}/vendor/rails/railties/lib/initializer" + else + require 'rubygems' + + environment_without_comments = IO.readlines(File.dirname(__FILE__) + '/environment.rb').reject { |l| l =~ /^#/ }.join + environment_without_comments =~ /[^#]RAILS_GEM_VERSION = '([\d.]+)'/ + rails_gem_version = $1 + + if version = defined?(RAILS_GEM_VERSION) ? RAILS_GEM_VERSION : rails_gem_version + # Asking for 1.1.6 will give you 1.1.6.5206, if available -- makes it easier to use beta gems + rails_gem = Gem.cache.search('rails', "~>#{version}.0").sort_by { |g| g.version.version }.last + + if rails_gem + gem "rails", "=#{rails_gem.version.version}" + require rails_gem.full_gem_path + '/lib/initializer' + else + STDERR.puts %(Cannot find gem for Rails ~>#{version}.0: + Install the missing gem with 'gem install -v=#{version} rails', or + change environment.rb to define RAILS_GEM_VERSION with your desired version. + ) + exit 1 + end + else + gem "rails" + require 'initializer' + end + end + + Rails::Initializer.run(:set_load_path) +end diff --git a/solr/client/ruby/flare/config/database.yml b/solr/client/ruby/flare/config/database.yml new file mode 100644 index 00000000000..29c5c538835 --- /dev/null +++ b/solr/client/ruby/flare/config/database.yml @@ -0,0 +1,51 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# NOTE: Flare currently does not use a database, but there are plans to eventually leverage a relational database +# in conjunction with Solr. When a database is added, lib/tasks/clear_database_prerequisites.rake should be removed. + +# MySQL (default setup). Versions 4.1 and 5.0 are recommended. +# +# Install the MySQL driver: +# gem install mysql +# On MacOS X: +# gem install mysql -- --include=/usr/local/lib +# On Windows: +# gem install mysql +# Choose the win32 build. +# Install MySQL and put its /bin directory on your path. +# +# And be sure to use new-style password hashing: +# http://dev.mysql.com/doc/refman/5.0/en/old-client.html +development: + adapter: mysql + database: flare_development + username: root + password: + host: localhost + +# Warning: The database defined as 'test' will be erased and +# re-generated from your development database when you run 'rake'. +# Do not set this db to the same as development or production. +test: + adapter: mysql + database: flare_test + username: root + password: + host: localhost + +production: + adapter: mysql + database: flare_production + username: root + password: + host: localhost diff --git a/solr/client/ruby/flare/config/environment.rb b/solr/client/ruby/flare/config/environment.rb new file mode 100644 index 00000000000..b545733c4b7 --- /dev/null +++ b/solr/client/ruby/flare/config/environment.rb @@ -0,0 +1,105 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Be sure to restart your web server when you modify this file. + +# Uncomment below to force Rails into production mode when +# you don't control web/app server and can't set it the proper way +# ENV['RAILS_ENV'] ||= 'production' + +# Specifies gem version of Rails to use when vendor/rails is not present +RAILS_GEM_VERSION = '1.2.3' unless defined? RAILS_GEM_VERSION + +# Bootstrap the Rails environment, frameworks, and default configuration +require File.join(File.dirname(__FILE__), 'boot') + +Rails::Initializer.run do |config| + # Settings in config/environments/* take precedence over those specified here + + # Skip frameworks you're not going to use (only works if using vendor/rails) + # config.frameworks -= [ :action_web_service, :action_mailer ] + + # Only load the plugins named here, by default all plugins in vendor/plugins are loaded + # config.plugins = %W( exception_notification ssl_requirement ) + + # Add additional load paths for your own custom dirs + # config.load_paths += %W( #{RAILS_ROOT}/extras ) + + # Force all environments to use the same logger level + # (by default production uses :info, the others :debug) + # config.log_level = :debug + + # Use the database for sessions instead of the file system + # (create the session table with 'rake db:sessions:create') + # config.action_controller.session_store = :active_record_store + + # Use SQL instead of Active Record's schema dumper when creating the test database. + # This is necessary if your schema can't be completely dumped by the schema dumper, + # like if you have constraints or database-specific column types + # config.active_record.schema_format = :sql + + # Activate observers that should always be running + # config.active_record.observers = :cacher, :garbage_collector + + # Make Active Record use UTC-base instead of local time + # config.active_record.default_timezone = :utc + + # See Rails::Configuration for more options +end + +# Add new inflection rules using the following format +# (all these examples are active by default): +# Inflector.inflections do |inflect| +# inflect.plural /^(ox)$/i, '\1en' +# inflect.singular /^(ox)en/i, '\1' +# inflect.irregular 'person', 'people' +# inflect.uncountable %w( fish sheep ) +# end + +# Include your application configuration below +# $KCODE = 'UTF8' # Rails 1.2 supposedly sets this automatically + +require 'solr' + +solr_environments = { + # facets: default, all *_facet fields are considered facet fields + # title: default, :title_text is title field + # timeline: default, no timeline support without knowing the field(s) to use + + :development => { + :solr_query_type => :standard, + }, + + :delicious => { + :timeline_dates => :published_year_facet, + :image_proc => Proc.new {|doc| "http://images.amazon.com/images/P/#{doc['id']}.01.MZZZZZZZ"}, + }, + + :tang => { + :solr_query_type => :standard, + }, + + :marc => { + :timeline_dates => :year_facet, + }, + + # TODO: :uva could inherit :marc settings, only overriding the template for VIRGO links + :uva => { + :timeline_dates => :year_facet, + :facets_exclude => [:filename_facet] + }, +} +SOLR_ENV = ENV["SOLR_ENV"] || "development" +SOLR_CONFIG = solr_environments[SOLR_ENV.to_sym] +puts "#{SOLR_ENV}: SOLR_CONFIG = #{SOLR_CONFIG.inspect}" +SOLR_CONFIG[:solr_url] ||= "http://localhost:8983/solr" +#SOLR = Solr::Connection.new(SOLR_CONFIG[:solr_url]) diff --git a/solr/client/ruby/flare/config/environments/development.rb b/solr/client/ruby/flare/config/environments/development.rb new file mode 100644 index 00000000000..63570f610af --- /dev/null +++ b/solr/client/ruby/flare/config/environments/development.rb @@ -0,0 +1,33 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Settings specified here will take precedence over those in config/environment.rb + +# In the development environment your application's code is reloaded on +# every request. This slows down response time but is perfect for development +# since you don't have to restart the webserver when you make code changes. +config.cache_classes = false + +# Log error messages when you accidentally call methods on nil. +config.whiny_nils = true + +# Enable the breakpoint server that script/breakpointer connects to +config.breakpoint_server = true + +# Show full error reports and disable caching +config.action_controller.consider_all_requests_local = true +config.action_controller.perform_caching = false +config.action_view.cache_template_extensions = false +config.action_view.debug_rjs = true + +# Don't care if the mailer can't send +config.action_mailer.raise_delivery_errors = false diff --git a/solr/client/ruby/flare/config/environments/production.rb b/solr/client/ruby/flare/config/environments/production.rb new file mode 100644 index 00000000000..5f8a29a497a --- /dev/null +++ b/solr/client/ruby/flare/config/environments/production.rb @@ -0,0 +1,30 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Settings specified here will take precedence over those in config/environment.rb + +# The production environment is meant for finished, "live" apps. +# Code is not reloaded between requests +config.cache_classes = true + +# Use a different logger for distributed setups +# config.logger = SyslogLogger.new + +# Full error reports are disabled and caching is turned on +config.action_controller.consider_all_requests_local = false +config.action_controller.perform_caching = true + +# Enable serving of images, stylesheets, and javascripts from an asset server +# config.action_controller.asset_host = "http://assets.example.com" + +# Disable delivery errors, bad email addresses will be ignored +# config.action_mailer.raise_delivery_errors = false diff --git a/solr/client/ruby/flare/config/environments/test.rb b/solr/client/ruby/flare/config/environments/test.rb new file mode 100644 index 00000000000..1c4e13d697a --- /dev/null +++ b/solr/client/ruby/flare/config/environments/test.rb @@ -0,0 +1,31 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Settings specified here will take precedence over those in config/environment.rb + +# The test environment is used exclusively to run your application's +# test suite. You never need to work with it otherwise. Remember that +# your test database is "scratch space" for the test suite and is wiped +# and recreated between test runs. Don't rely on the data there! +config.cache_classes = true + +# Log error messages when you accidentally call methods on nil. +config.whiny_nils = true + +# Show full error reports and disable caching +config.action_controller.consider_all_requests_local = true +config.action_controller.perform_caching = false + +# Tell ActionMailer not to deliver emails to the real world. +# The :test delivery method accumulates sent emails in the +# ActionMailer::Base.deliveries array. +config.action_mailer.delivery_method = :test \ No newline at end of file diff --git a/solr/client/ruby/flare/config/routes.rb b/solr/client/ruby/flare/config/routes.rb new file mode 100644 index 00000000000..c4a212512e2 --- /dev/null +++ b/solr/client/ruby/flare/config/routes.rb @@ -0,0 +1,40 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +ActionController::Routing::Routes.draw do |map| + # The priority is based upon order of creation: first created -> highest priority. + + # Sample of regular route: + # map.connect 'products/:id', :controller => 'catalog', :action => 'view' + # Keep in mind you can assign values other than :controller and :action + + # Sample of named route: + # map.purchase 'products/:id/purchase', :controller => 'catalog', :action => 'purchase' + # This route can be invoked with purchase_url(:id => product.id) + + # You can have the root of your site routed by hooking up '' + # -- just remember to delete public/index.html. + map.connect '', :controller => "browse" + + map.connect 'browse/facet', :controller => "browse", :action => "facet" + + map.connect 'saved/:name', :controller => 'browse', :action => 'show_saved' + + # Allow downloading Web Service WSDL as a file with an extension + # instead of a file named 'wsdl' + # map.connect ':controller/service.wsdl', :action => 'wsdl' + + # Install the default route as the lowest priority. + # map.connect ':controller/:action/:id.:format' + map.connect ':controller/:action.:format' + map.connect ':controller/:action' +end diff --git a/solr/client/ruby/flare/db/schema.rb b/solr/client/ruby/flare/db/schema.rb new file mode 100644 index 00000000000..873a19b6017 --- /dev/null +++ b/solr/client/ruby/flare/db/schema.rb @@ -0,0 +1,11 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/solr/client/ruby/flare/lib/tasks/clear_database_prerequisites.rake b/solr/client/ruby/flare/lib/tasks/clear_database_prerequisites.rake new file mode 100755 index 00000000000..831d7513021 --- /dev/null +++ b/solr/client/ruby/flare/lib/tasks/clear_database_prerequisites.rake @@ -0,0 +1,17 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Added this .rake file to keep Rake happy without a database. +# Remove once a database is in the picture. +["test:units", "test:functionals", "recent", "test:integration"].each do |name| + Rake::Task[name].prerequisites.clear +end diff --git a/solr/client/ruby/flare/lib/tasks/routes.rake b/solr/client/ruby/flare/lib/tasks/routes.rake new file mode 100755 index 00000000000..3fecf372f25 --- /dev/null +++ b/solr/client/ruby/flare/lib/tasks/routes.rake @@ -0,0 +1,22 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Borrowed from , by Josh Susser +desc "Print out all the currently defined routes, with names." +task :routes => :environment do + name_col_width = ActionController::Routing::Routes.named_routes.routes.keys.sort {|a,b| a.to_s.size <=> b.to_s.size}.last.to_s.size + ActionController::Routing::Routes.routes.each do |route| + name = ActionController::Routing::Routes.named_routes.routes.index(route).to_s + name = name.ljust(name_col_width + 1) + puts "#{name}#{route}" + end +end \ No newline at end of file diff --git a/solr/client/ruby/flare/public/.htaccess b/solr/client/ruby/flare/public/.htaccess new file mode 100644 index 00000000000..8a74972cf74 --- /dev/null +++ b/solr/client/ruby/flare/public/.htaccess @@ -0,0 +1,52 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# General Apache options +AddHandler fastcgi-script .fcgi +AddHandler cgi-script .cgi +Options +FollowSymLinks +ExecCGI + +# If you don't want Rails to look in certain directories, +# use the following rewrite rules so that Apache won't rewrite certain requests +# +# Example: +# RewriteCond %{REQUEST_URI} ^/notrails.* +# RewriteRule .* - [L] + +# Redirect all requests not available on the filesystem to Rails +# By default the cgi dispatcher is used which is very slow +# +# For better performance replace the dispatcher with the fastcgi one +# +# Example: +# RewriteRule ^(.*)$ dispatch.fcgi [QSA,L] +RewriteEngine On + +# If your Rails application is accessed via an Alias directive, +# then you MUST also set the RewriteBase in this htaccess file. +# +# Example: +# Alias /myrailsapp /path/to/myrailsapp/public +# RewriteBase /myrailsapp + +RewriteRule ^$ index.html [QSA] +RewriteRule ^([^.]+)$ $1.html [QSA] +RewriteCond %{REQUEST_FILENAME} !-f +RewriteRule ^(.*)$ dispatch.cgi [QSA,L] + +# In case Rails experiences terminal errors +# Instead of displaying this message you can supply a file here which will be rendered instead +# +# Example: +# ErrorDocument 500 /500.html + +ErrorDocument 500 "

Application error

Rails application failed to start properly" \ No newline at end of file diff --git a/solr/client/ruby/flare/public/404.html b/solr/client/ruby/flare/public/404.html new file mode 100644 index 00000000000..dd0fca84cd5 --- /dev/null +++ b/solr/client/ruby/flare/public/404.html @@ -0,0 +1,43 @@ + + + + + + + + The page you were looking for doesn't exist (404) + + + + + +
+

The page you were looking for doesn't exist.

+

You may have mistyped the address or the page may have moved.

+
+ + \ No newline at end of file diff --git a/solr/client/ruby/flare/public/500.html b/solr/client/ruby/flare/public/500.html new file mode 100644 index 00000000000..d20477dada4 --- /dev/null +++ b/solr/client/ruby/flare/public/500.html @@ -0,0 +1,43 @@ + + + + + + + + We're sorry, but something went wrong + + + + + +
+

We're sorry, but something went wrong.

+

We've been notified about this issue and we'll take a look at it shortly.

+
+ + \ No newline at end of file diff --git a/solr/client/ruby/flare/public/dispatch.cgi b/solr/client/ruby/flare/public/dispatch.cgi new file mode 100755 index 00000000000..cb8f7d3959b --- /dev/null +++ b/solr/client/ruby/flare/public/dispatch.cgi @@ -0,0 +1,22 @@ +#!/opt/local/bin/ruby + +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require File.dirname(__FILE__) + "/../config/environment" unless defined?(RAILS_ROOT) + +# If you're using RubyGems and mod_ruby, this require should be changed to an absolute path one, like: +# "/usr/local/lib/ruby/gems/1.8/gems/rails-0.8.0/lib/dispatcher" -- otherwise performance is severely impaired +require "dispatcher" + +ADDITIONAL_LOAD_PATHS.reverse.each { |dir| $:.unshift(dir) if File.directory?(dir) } if defined?(Apache::RubyRun) +Dispatcher.dispatch \ No newline at end of file diff --git a/solr/client/ruby/flare/public/dispatch.fcgi b/solr/client/ruby/flare/public/dispatch.fcgi new file mode 100755 index 00000000000..1a5278db81d --- /dev/null +++ b/solr/client/ruby/flare/public/dispatch.fcgi @@ -0,0 +1,37 @@ +#!/opt/local/bin/ruby + +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# +# You may specify the path to the FastCGI crash log (a log of unhandled +# exceptions which forced the FastCGI instance to exit, great for debugging) +# and the number of requests to process before running garbage collection. +# +# By default, the FastCGI crash log is RAILS_ROOT/log/fastcgi.crash.log +# and the GC period is nil (turned off). A reasonable number of requests +# could range from 10-100 depending on the memory footprint of your app. +# +# Example: +# # Default log path, normal GC behavior. +# RailsFCGIHandler.process! +# +# # Default log path, 50 requests between GC. +# RailsFCGIHandler.process! nil, 50 +# +# # Custom log path, normal GC behavior. +# RailsFCGIHandler.process! '/var/log/myapp_fcgi_crash.log' +# +require File.dirname(__FILE__) + "/../config/environment" +require 'fcgi_handler' + +RailsFCGIHandler.process! diff --git a/solr/client/ruby/flare/public/dispatch.rb b/solr/client/ruby/flare/public/dispatch.rb new file mode 100755 index 00000000000..cb8f7d3959b --- /dev/null +++ b/solr/client/ruby/flare/public/dispatch.rb @@ -0,0 +1,22 @@ +#!/opt/local/bin/ruby + +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require File.dirname(__FILE__) + "/../config/environment" unless defined?(RAILS_ROOT) + +# If you're using RubyGems and mod_ruby, this require should be changed to an absolute path one, like: +# "/usr/local/lib/ruby/gems/1.8/gems/rails-0.8.0/lib/dispatcher" -- otherwise performance is severely impaired +require "dispatcher" + +ADDITIONAL_LOAD_PATHS.reverse.each { |dir| $:.unshift(dir) if File.directory?(dir) } if defined?(Apache::RubyRun) +Dispatcher.dispatch \ No newline at end of file diff --git a/solr/client/ruby/flare/public/favicon.ico b/solr/client/ruby/flare/public/favicon.ico new file mode 100644 index 00000000000..e69de29bb2d diff --git a/solr/client/ruby/flare/public/images/flare.jpg b/solr/client/ruby/flare/public/images/flare.jpg new file mode 100644 index 0000000000000000000000000000000000000000..5db42029267f44d3ecb51c978bd53927d30c4f98 GIT binary patch literal 31282 zcmeFZbyOWq_chpXaSaZ^-3bs}gL5IcTL|v%9w2CN4W0z|;O_1Y39iB2eQq9m-=EBU zGT%S5)->zh-hI02)al;4sjgE6&p)2mum>gFEldGGP7YuI0003%gu(?w_dsj_&!#^D6K{ zMiMLuKtVx4tdQuRp8ymQdwXjWIRi&4q~`_TBLEKz3kM4e4+jT_01uCVgn^8Nh=_!Z zj*fzXk3&F!hl7Xrl9-z8B@rbN9v(RpIVCL}0|NsADGM7jJsUMW1057R0s;~u5*9Kt z79AlTA>IG-_S_Bi4RR-7Awm9*^REpG8UzCi2akY=gbW3M{yg{3>3=)<0)U1BfuLbP zu&^+Y+XUqUIS0UCz+#fKiNIkg8NgFGV6*$jenp@Zt?0l}o;aa;ZRi+)h=hxWPw9WNcz;W^Q5WbLBi+`RmP!lKHm>YCcR`i92NuI`@RzW#y1$*Jj?*|{G-=hxOZHn)Cl z@9gfKo}FJ@UR~eZ-eEvOhK7Lw!NC6x84B7B@&RGMz>>4UVTvfh8#rK5u=^umi^hJf z=s=`=t$c!G=s1CdOU1E9efm4{wL(;b^rqb@BzMnpeO)n3@8u= z)bl)m0)m1ZfG_}IASXJA-_fV2+YuW!bRjcQo1@DHK5)L|3y7Bhl*h=y?Ksk0!k!O< zCeJ@zu-Uf!z);C(e7NVao(EQyDe7kWh>6!gQjIY21{#_j-Ft$wy)G<|Eb)zw{{W~| zHEvzHoY^6Z#c-k?^uZxf_-k>zWF=*>V@W>c{5OfkI*=W5V1hwAX|C`gCb7tSGMlrv zA?d^mA4qeRCiQ}X3aD1a;S>am%%xIF8b)2`%Xlx1#&%V&aeFI|l+F#RdJ*^KYn#BS ztFSy_SH>jBMzm+UZ#_OJjyy1EX64t2Ws^t0{m?4H_>1jGD{A9mht^13M%DT5k$$)% zw9o0H<>hs!M}mWQM8orKWoVez+cQ$3~)b^Qc`cfeLY3X%PB9| zW}P*l_e1Gd(2macyUFmDpUMrkh;e$A^eCjUQR_J|iJ}O{{enV?DJFDL$mZE>DVjL^ zI!7Djq`6Y4y``MfVOu&wrj_gjiT(Cc_rmY!b8WQ1t@)`Tn*JX60GuqzjDxg6M6k-( z(0qZ`qJ$e(z(@9nvJ4sS7O-Wh5xdR{ZCv=lm@C2YXFvvX$57WllfRtiEw)1|6N>X5 zdKupnhvkb(@wy_keb+^wxtox!hr;vj^UlgQ4g$3Fk5c0i;hDVOSP_?I3_;sWzQ=~B zY>b}@SspomiDWT*Z!#{HQxt%Zu!jGSb5 z-S0fzkH08Rb&a>l|NesBObq~&Gu;qD#V^d~&6zBQyq$KkfDxTN3@=bew(ZJqGAtFl zN}Sd{KESFr#lYpxbE?mjJoao19UvQ3xGb(c#$L<@{WuhQ6wbwMLf%oHKE~gjqUd6h zqeD!NIE|0kG`44Ws8j4QGzk+Qzyv}g*bgT(_T*t@RN;0%@I#5uV(12?Y#_69iHCJT z^EoZ2WOT{Zc*!y+qm}Ywp4AI#-g%(`SI}P@677!Ls*KiD;TmFyu*D{POCPG00$FpPxjSj6;U#O8QKXO!tiFS8wAY#NOlem*WNh@rv3P-HkM z$_9|c5K+fKvsF{t#=&3$fu1YpU$wJHO#s|JZMG zU;GTr?Q1|E7^=SC#utPY46dVVO~Xt4we;@oUW%z{vU&SuFGEXhu?_xD12Z=VeBN{B zwZ|W0J|iFeP-Y?}R0m8@`pZF!rfRk#opBA9+wdo)G*QAa#opPmbrI9uc?N=;y@pKH z4M9_Jd0H`4mOip(R9PmkP^v)FhU` z}u~JKIuG!gly2ZeWm}q@bga!v-4HWpVV{3|XSPP@q>e6M$fgE}Q6MKUAzAf}M6QA*z?Yv{D@Z`g&#JsrQ>_4tO z`s~SCQ3`bK85@T!42Odz(66KKrONnyEF+s1tgg8#Ma4R7l1+QjHriEgF{QqjeZWM6 zLkb+>cX_Xg^oG1DiP%9I=6yfA{hNH4#hhB`X8;u@lPeC5mn6Z3RAeXL4TxXhNHsFZ zU#PZDNg#6B@hepzQDr$6t4(PnJwSGA|bba1MdMndij1|MmOKdspg>y@_ z&AZ*A1|?d^Adg-ui#;*)ngO$y9fV?!xak`l943*cTif|@JInH`k9DoE89gRvK>|lb z9fiP&H0e^urrL!yN?jS0pp!EGHJdtc8t}Sf3_ow2OM%D%wWI<@4gW1#;J7+hu7v|_ zdW6=tNcRX+rdvHrzpXSsFosgrWArQ8Yhm;bbdNQ zkv-r*cp%>@xF={M2%ZRFa9xV@SD-D}*W7n2dS`bJz0hGz= zYN7qRBVtrcOGk?VN`0~Fq9m>L+ibhN{QcXho`t@6&XOX;wPX}w8QVe!m^-;{`@yIq zE}JodbU$sT!>^H-OzwsgHqJL_?WOFuBfab+12lEn>2<>zB&{`BD3Ywu-2FUvNHvK4 zgl^Dq^ifZy;RE$H9fHrmqv|T&=-V&-I|{e!#ofAn9}p;K^Fd5h0_jFJjHtJ9qNMz~ zN4(Sux@ap=^tI$UEz>m2{=Z7zJOhn^cNO`BL1EqlJQxviQg0aALTr_fmbT9QQ?}#4 z>x})(0!Z`exF50GgXvZ4AR{Yo(ObE14?E|0is zZ`S8AsRg6>V;88#lLGCO<=~a$&KQGIDbbVJ6f=5NS;t5&E7`Ul*U{Yj_I0PZDi^^H zoCzZqtQj9Cs&HXW2UPkfVK90beF+HZ*wRsmkeC;94zy9mrj3ar!fiEx7|F$qzHo^& zTRaQms!U79N^um=>MRz-r&G|~c(P*|bK296up9qCFS;Q7mhv`Azceo|cDq0&PX{ra zd8D6vA3XeA4b+Z%Ps;&jZT}Ia!y-Hm^;r*fb?%1Ld{YQ9!Zz;%L&Qi<&QUU>)i0>Q z{^F*~qIj{j)gjib!5?f-Q3p_WLHm-AD0dmgQmBce!oejy)tEmi1tpAj`95m6hMo>m z{XAnMT_)Y;EUSd;qoqmH_IIFSYQMlQ1QYth(A`ZVVAvy$!sF*D2pyhif43NeZ45nM z%u?Ewb~}=CKK9}8^tSo9R;E+=g%|nKZYy=V{$p^Im%3V`NDz~dI_Z+om!}L%LS*qu zacQs#QB@n$foGhLwL=I4ymqld$kafnrH1hOPomOXeptQ79s(9y2|y z;V70;Q>ekUb;_*-SNQR$-ZG11up3fhze7SS;jv^_0mUcXwB*1S4FEu=CD7Al@0*aF z&lm)1n~nX}M|B9Chr1&0@vX(-lQ=e|XpsBtGpbF|>L@a|H;S7iQuxYzCws`R?{&Xk zFSBGKHu!jC689$GPXAp?(dq~G55_)Sl!O(}zI4r5H!6E@Q5g%xZEeo}ZaOP=?VO}qtTsIbuzLF`;xtpxyaN0)=_&<-_aRcF z(ewp+7L{~*ru#C(Alr`?zGnbknF0C2$cj7!QyP!1eou<>5<8B*>6=4?ZjyCYF5PNe zgRsimy0l^GU+;xnP{ZB$qlq#dM8KJhoZSxI=afQg)ix0gq`J-pHZJ?z%M~3?iKOm2 zwdlLDh(DeIuO0fj#;c6H$hzcM(T%unS16Xv6e2$X5 zr2QJ=@$btt(w*k(^G{z)Ps9yLH#ZZs$K)ra90qfa@7BF;cF?gTR6bQ?TbLACjNYR! z>-?OsI0kN@dbqeXPs?rmpP}izz2%A@w(=(sbg9q+gTN8Bi%!9k2(clEV zrVH8n8U!6?3mH#}cm{SXJKq+kM>nm^ex?*T_x~Wekv@GI+LiL=af1$J=;2*T5i-|W zT_Gj>a=sKYK6zCy+gmxj=E*_6RkV-mIif_U4@4W~+WCaRqTQ>PCA;PCEaY0g*-<_^ z9koYz`Y60rS=5uQt#6I<*i7Pr@{Wnkgy*F_ZGR|oYk3+*sMBI~)`?#j7rvP`Uicp6 zWs{BY#f%K+qBwShDE3P`FHr#RgGDK#S5kH&lD)gN5iu$+;kP>Li1i|I49!*VbOR}g zLP{QBq08rfPO!IR@f;~QEhD{}&h!MM{ z1e>AEJ&y03 zf`zt%(IgSUz(&5VN`$Mam=hE0vB*=!w7pP%jXEY#(5T3M4@-ZZWUx3m!9|0Bh$#Gn zQEwsBD3*MkRp}D7Fq4e%vY>Ck=SID=CBdis64+oNqbR7Y95ehy@ThNA2AK11UcBV~ z^EO6z;4i12Qnx(}+CyIVOW?#4F)7rF4B+O%#yn3!oT|}(J zl0K-GIUf@hUAO024D|H-a~8D3>$r_kib>;Gz3Oh%KIu3aSiiKIHT=*VO~V!ajvq?m zxL&`arO9<7#$h`Aa~)7@kLq;wGGjQ&!il*cl8{Nrn@#sbT9BQ$*Yd~An`&-2MCgNa z_TlUZv)Q8d$O=}a2&X-Mwdfjz_~Cvc1Fixz3(@*9|F>x&US zy6)8UFdzD?Ep*H}g!bUx5Sr9()?kTD*<7uYB*=O(I=&x1{pr057KcsF z3td^zS@eB>{i>gbpOD|n>WW)yQ`lHNUR{SQ?SkRls7(Tuv&;=aOUYOQ5FFJu87CWu zUStT&s?oRUVoG|l2(07O(Qtj%xK+T@Ydt5*rjLj8mD6leS8W{!_gDRkF}tzWBP-Y(FFMt z#E!m(JVxA6cfqDfuN`5PeoXJ8OleSR1_78yP|Ef)%`PcYUV+n;o7$15zvM{PX9VFh z3wHWOdaBNBrj80uHj{NXoa+)*Rp4NsC9A^4O5@FlCSvm;Z&9L$Kb9T zly1O;%S9=N+Gm(pF%(e$5uc)e6KmpHfTbS+XWJG$hBpi=6Dqld|Qo8&7 zdFJcoJTN+F>N5Wq#XzEnF6*k##1EC!RU2D}EZ;;a-p`;w`IM;j*XKmVI7 z+&>8}C}>D>2pRWu$n1CB9{>P{X=`j@3(2Y1Os$N_KuLcFMj)E{(WB{d&s5-xfdKW5I+n5vN=O05vuM1 zfWr4*HtHY%z>@`lg5iJJ&XxbTS5UZPPNuKf|8NI|A_@So%g@i}G!UP00Pq<5{CxlI z`S~#$06>cX&~9zy;^_KESxA6TBLMI}I{x#T|Ed8=y#Ai1fQEs-hJl47@R8tQ5s(oP zkWi5kP>@hxprN9pzd*xyf%yUp6B`o;8y61`A0L%>E+!!%DJd-t2FrkD-@bh-FQ=fOsHmi@tfH!_2FagmXlQC`YwPIf>gv6F_wN0B z0|P@tBV%J@6B9Etb8`y|OA9M2Ya0kJ_V)G;j*gB_j?T_5&aSTR?jG)*o?c$w-abCQ zzJ7imKKT1XlGuSkAA^HKLPA5s!otHpeTs;PjEstkj*f|mjg5Tnw+bewl>bt}f7QTm1?c~!fxi`C3emv7E8ytZ_8$fO&A;nk3iwa_ zLj^#afZo6m!JNW2!G*!=BCsLCBhDawMRr7yK*dJgK&wIj_(BPT1mg&^0V@Ps6NeV( z9Jd881Ye2ZCBf>;B0?7;VPaU~L6T@vH8Ntd4e}xiJ4#l{YpMonPZ|N5C)zH$kMt4@ zC=A1lkxX*T2+RX4p|2!ZVOV?Eg4o4gwV`Vb9~hyh9Y=w;_E@5Aj&<%i;T z{9)R^IUp<0_oI1`bTC5*ddOMmR9JO*(kG7y-AMi@;;6^y#hBLEuW=#qb_oiJuaYp6 zZjzTjx29yIen>M*SIXeaB*_A0U3|ItdiU)<`zq%m_ayH)f2UxzaHD9cc)Db`w7aae zy#9MtMR{dORZ(?eO<`?uU0Ho^!*JtN(|q%GD^wd!J8K8H)4D6MyQAm0kF;NLAarnY zm~zB*ba2reQUqm$dPFkBaKtMlW26~mDP(`-W)yamVpJN`Z)oIb z254W=sn82wyn0cOA%-!AiHoU+d4%-=8wa}>M+_$hX9?E>4-c;vUlYFw|AFA^OX-)V zgz1FmL|~#D;sO#KQgl*F(jGD&a&Gc7itm)@ly+3CROi%nG`_Tww8eBU=oaY<89W$e zn24B8nRS`_SiZdSU{zy#&5p@_{(6Q(ilc$^3s(rY1CK5*n2(2_PT-{=nh;3HPv~Cw z;?0rBp6HI)j`*&`f#j*wwe+(LqAd1XayeG{Hwwy%rb=GQaVo{CJ!)&}&zdA!!rBHp zLAu3ylke`|QyVB7`Wdg9Vwy>rds|dn9$M4d7~6icTXUdrG;u0)K6d4DAMnKSGV-qU z1^KCcDD;00RR35Wj2PM(PX5V1Vk1f_x+E4e&MSU1srfTwN?IC9x?h&%m$h%&*)w@# z1@eWX#Y$z`<*OAIm1nicbtw(>jg2j9ZEhWCo%!7&eO?0ugN?&FBhM2ulc&>Zvl4Ts zKeHB}mn&9{*9kVpw?%dy_8JackGPJnPa7^puMuv>?@}L3e(UEi{{MH_|99?n|6u<= za*qaK|9_tQC0xdTxQ8%LaPcSie=x837x#ZNuLR-#ALeP+X(j)M%>TtZ_pkrN{Qm*o zjmrNs^Z(Afo8P}P|2OX;CjU>&XIx}i{$@T`=}+!UrvK)?+P4<@H}}nQf3n{#(zEp6 zv%es}`1~jPTcLmR|C{}D@BhR<kPY$yG-XSz$-*nQZ_Djh1ZrGv7D7$YdjddB7FAz1p@m*%)+*Bnna$( zl*Owg5v9zetHCoehq8BaMDiR8N{R+b;mQRnOKNcHY#KV6v09zlH@aMU9`Cx|gA61M zqm2$t_)UY&HY~&~)2yCttZe%ry+)1`s&jzLjhn6ehNrIAvX8d!+6NQ=?LgCyo53!j zpszn zHgj(K=c{?wh2|xwWr-D!)!enQ_1jINEw*3c+ZsFOyAFHy`$h*Uhi{IUk5P{IPnu7o z&eYEd&lfHdFXbUGidom9*9$k!x3IS%cSv`UkoQHC`>_YUhs1}?M~=t9$EhdEr?97s zXZ`1a-_QS_rAY-yWHgw20iiI=ykMSP_cbhs& zDns^=u@dmZ)=*vnvWN6tP%llKBvk&wOr{o+UcHk7t^MDZ`0d?J z-y*=#rqa~)vz-T@x;bkx>n`g9q`g1=$^O%#2_)LTJ^k6b;orxj2XzPk)DZjUI{)?| z`9~aPf0zEtGc#cN$2GYAaeY?6FG#F5zhn8w6Y%HQ8*(QBI0z5v>c3Ijjn{|Y`+wh@ zP23^lasUvsv-fbcFf(^1`(vRRnV6l8y^FJnBNG|e)`*3P46?|M^LM%bxc=x&kjuH| z_2(@Td7kC&{x7kKFju@sf>vK9Yxup|s;Ib7Vz;kHb%F>4Reg?TI%FaYzYaCcJVaW} z#HA)&P8_&aSz&U-_8VwSz4zu|i?|iz-3o(QZ+SVVV^NZm5{G~68WagwGq~W><_Tw7 z<{VR^e@a?n;RiQl$tmVajJ<$kK}3qpjl(lY%KUV9?Is2^q% z^0Wy&(yQvW?t4D@N}15-92%ME>M&Hy&tY!j^plaauX=Y8P^QLrwbFiXnRX#X|Do4I z(~?hRxDJH@jh&AB7R81^G(Ahv3gOkV7ytd_FV_nVznFcAf(Gx_dy={wy%EfmvK(d) z+foz~?JZ;Gb7|HSeD9}+BdL2z;)=vacoRW%LlLj$p(Nm{8->D0wJ*BX(=RaEs*vOJ!lHu-mK2)`0+yu`_% zJC{+88eK^$-qe%v1WWA9ZE7^wOlk#)gey}sBL@qa4}NG{QgpN`nnjpeo`;Q~!rdnd z79Av!9^(&#w-vdSBJLMtw5-l{FZ9;lpK5!6HsGD|uE{=m5@VXQD9y>xrdww_3SwTN zQO1vR>QD-R7xcI)3m-9=v1SPrC81X(m{}na+1`dP98l!CU(cJ&#pAWI4ITZMiyiN& zf5Bnn^1~*k4?eUH<9e31@n@cKFcj| z9=daKn!T#48*Pm%%hh~S@x^vW87N`4B*>bXIFW;`?LFhK-E21_JR>xS%O3Q{Icz*h zVZ^@ear2EVRW1@7IR89)8hF8s&t7R<6rTtaAj$XMw1M|Hv>@kVa#?R=Pu>EZ{m;<|^c%!WFQ>W<Oj-wNSO}!%9DA(p0-z;|i2)$}u|Jf3?Yg%jMW1j?9dTr8y_=0hU ziWSvo<%V8+4`NRZt-L*1{O=V_5w|?B1XV!}Gj$|C$Kb=|mKHl0t({+Tu<7XQ3=VCR zi4G3LvDKre@oZ$howF))U!Ll8d&0N)a_)?(eI|&j_4A>vvbrf;1`mdN^s7VS=&B>z z2)}CT_T-uDl#5F>&ohE6P}&^LB!C}9*$ol<$)t}U+Jdr{*8VgFIUpJ1=nVnzPCS9 zccn!8kuf|zv2RVaUqswbT02>r>-5Di`koe+w%7YQLuoooT*I2pOlJIvRIZrx_@efG z=>akU*?AJ~$w8a(J;K%c5tm+Pav>itbOaf}0boQ%7f+OH6n~|S&TWZRv(Q@&Eg|>KqU7;Naa0X@BgWPN^Hn=NDYmsZKMl!tF5Yc& zZ1Bz({YmS>3RCrTvgp{t%=zsGOe5c=hN^{*`c5|6oawjBUTbE9UCedozE{-a3x>2s={NoY{!6im;h=*EE~) zIt5w>>sm>4T3FeNF_nmo#g(dP)w-ptJba$llJ^-YvRJ)>;Jw%FUk60d6PM+dSTYWG z>=5g;F=Y<*blSwqYd;MKA8xBgq79ITO~@u|p{+v^xxmNqy*yiaskO)FTA1f8ak8{n zVRdH&Qy-vuZqW8&QvtP;emKOO=Jf-b2(TH>dZOmQJY;6%EuUbTx>LU25+RJIV}bdg zdgD-}v-FtuZrY{>ogH6VpKHd*f5XGCv15?#7R*e;1EgMDo@PXU#N2m|d$-4krNE?a zUp?9VHGrP+@w^_L8EvCuA<9bI-NVqbh-0fnV>JbPF`3h{k=n}pLLj+orTQ7@@C!>G zTO&nsU!J$amm<_{%{b3#XJoJE|IW@vX2PF>VvGYrdKZ0v!&v@fCU4PrZrN?J zP~PI4Ce>9qEe$n!PO&9;fPzVKb@kZLS0%uG)k`B=hju}G$(Hs2=-NkW_ z{RCL~*WQ?rn#GFTpgu6T+at5$2)GnX6yQc79J|?Wa?Ig_FeTy-fTW z8v>604H#)V^4pw`t@4KxtnV8j3&gCgW_(|{vut#)pwxUaU(LE#pZPZ9+!LITiWlx)SGOyAU!nv(Rv{!z4!GCNP&Ez;C`LzBC zmts!2I{eyzJc3~-C!Z)CvumqkFjn`fDo-E_U+CqF+bB6ob<;4?&sUrkOi=qV==j1R z+6w4HQ8qSX`S}-pGJA=KadIT`D0Ol7mtQyy&&F!p4IC)ldnGIV3w=XxM#|p{4%e0jh zbunN8r6B=Roi8DnpVzmRL>LG`1dRJiL;}XCM>WsD2UVhMp3&6n#E0t{UwB^bK4Cbq z{CB$UR+4j^YlJwNI)W@(n(T>$@Sk6NM~xFrw_%L7jDHpNL~Hp{FsWj#Zrq!2q-TEA zW@NSme^aND-j9N^I{Ar$^-F+zGkYXK?uhTwqxVR$nwkmSA=i{o*0-O;$I$|eg|cYL z>%t+8L0?Xpt9xHnu1Fptx9e<*4vI+WeF-M~KrEFH@>)l1C3MGHKkj}8@MZHw){*m5 zkLap<5=74i3t@ARG;w2>)x}|0*2nzE)m<26IEndWrgk3EG#=$P6Xi8)zOBsVKd{Yt z^WCtmwV}DwcJlRBtuAptVIK1J(y_QS$;!IaC+&&DZDSsuBl8~_C2hky&@`1-?K}gQ zSkFM~Y{M3T6W8YPp0jeA(od_xhgQ*%`xMH^`EjXWS|>c~hn)954&lk|%e<4#D*1c% zTnSfCWEG7V@7gCBWlafb$U4|rS3SbpH(;(;ohNQu%dWvy-42;iY^{xtePwMS&XA>^ zyLHR>r_?PqN3kB}S5sn?LL{{6OHIUtv3>78=RMFjHv6IN3z5!l8x`(oY|A7JByvM@ zs$l;VsE?zFY|H3#C|ynFk4}90mHca%mY|`x8CEN{bfWO*)B(><=0a* za{6zhq1o%W`rJc#d#7VS1l!qO8cjF0+u&35zz6>IeiF9)J={VUD+Cjlvn3HahZSDs zpR3_vy%=3{0orwT4`}Rm$F=$INE}^^`1j3jbo6_&IK7E)23CaucmD2oNT(s;6n2=q z@91WoBVS9`21vEFM4DP;zt9qE^hltZh}>(E`s|%&MIo$BWxxwobLcg)fH&5k? zzY3jHz~0-ky~L*pW%{74nUjaBQWNTN?OjjqQ8JaPSc>3efwyyzyfdPk|I#U!Gk2w2 zH6^@Nc|>uEseu{k}F!~q`9&(O0#Il&5aEHTY%AF9KVLdnw`Hd&$x?^ zUpF6ZZ-%6xzG~+3hN@Pz9*cNPcNn%+-2e=o8bHCf?L?G*iWv!(O;wc9Zqo5OzV!vU zeAyX1wVT<`Ev<<*ZK}B`MGJCInm-CDu!Nfx0deNeW>6_-eO}v$Q$-*30F1rT6>n6y zN>9?(-~+vhH=<$pu3oA|M!blWQi#4+qI$0iW*PuIRGg%>J{L0tY^4!!Ru&^LuH&TX zl!cz_ion5r%~G)ppq{Zi{O?~w&fg&d$8!P&mzs^c$7Hx z4B!@@R8+TQXmv6mOWpci&H26FM)$XXj0bM~3%tEfg_7-C(I(4GC!5ht9OzywEj%u? z$jwFml2Rdh>nX|3PDp#4#t+l1Gmt%*=tP^YXF?_ZDfXEwBPEIZ`o}W!~)JI`_6+uw%p6Ma8lsB;A|B- z?0_ABN9_UnHLvEee_c-*flE7C+A+5DmUU2|Vmonu$xYyO4`fkfcXW92>=Vw7i=V?+ zcha|P9fdcQnm;$2Gh}URr@jey=ffVQ$V91$y%=t-JK0`6?#x2dUe?^1$K)zxL1~Q2 z8F6v4M<8$B%|Y2lV|NsdYKPJwaGYmd=5OO3Z&Y@wo((%{y1GvaapXitG0!BOyR;GS zG0XiaKDCDht}u{}M;S$b&lkh)xq)*waT9-fyAvetlVw=PWlDRi^G z+8VaLZi<3JU45SG@#{Jjw8M|nwOy6mcPE9z2a7uUD-TiO;V<%s2J0p++me6%oYZ;{ zek^4}(+QXdkZV8OD`$*WtTT-1|PYi=-hrEg~6f4@DEJCvG`|T zMIFMZb{9HAhh$N6V^#5d`K{~4Q|v>kH{DG%de}%$RQ|Udp;wce_d=yPWU=0bI@B)( z+i}CDlOh{2V$%cQqQktYi+4+eXuNqR-{}*I7R@DjknQ7A4|v)3`GW8lqwwF`%b9Tx z9h`E^wvbAZ9$kW`7bAcE}k?DKYmY({%C`1voa9wIn@qaL_44we1{pTn*~uq4 zU+vFei5i55NA7oZV z6P&*4=41Yq#MzXE?9Kf&nRQRHpdbQQ;b+1T)QRg%Dfh_mZ^Inj~!>gGBzt3bvsrTcoXKh zskL7{-{P`WKIU%5a_LCow7Cy!0vY`WuT!tk(e!tvTZArxu6GyljHDai&OM=KX_lxK9a1H zryJJkvI7?B+Ea?&Qv{ApzZScz0IdwtjF?77-1n9Ry%CmEAFtfNE-Q9TUu(eJoV2IZ zQ!R}TW2ISrLPRH{6?xwL$>7?Fi(?k*?myS2eBz?u$jx<206zqQTrVB?jLUD)r-t{TIq=>on8f@d& zp32b@mkW1vRJynH42V_S*qxI0c_uD3+>97lOxW)k@Ik!Dp38UK>BgRfkG6?~_38JHg8MRO+9^sX>`!N*LBr ze3f9N#a%kzs5hZ$S%XVrUs!A>fAtImcximx`t){+xtTk!W2=wI0<(J$L3$c9wigj5 zid~-FX)Yd&gg^#n%CWLYRbSEXK1iN?hEe9%M=U91~87e0|k@ufT0$y^$QN6RaC z7|_t7@*0G-0rfy^#8nC1Sju3kKKcWe+p%h-WS#zI1YY=LXXoe&SV9LXS|Fv><7dNC z$3-s`!De^^wtDjQRn@@tn&A_SQW>L@Y-CjC1Sz3vFzFg&Sts+Pptc?R&O+J~~<~!nzdW#;I3o`5|j*=pT?i(;Dti!yG z&L;9o%?8KRk3TsY{0^5|-F}>`9c=Nhs2@Dtmv?TzDxP2Trai@8X4E8l;G8bU^>G## zD+_VFfx94sjj*++2Jr|Bld_e6acRT7agsY(&Mv>{nOx}{G9O})y6wDV+9bHIGOg8zBRy3NMgFESl z>sG&$%a*c@d0*)LVKX8yJ#Yt2j^T5da=<5w462sR+uW*)C6W&lnthhv78zgtjMLKm zq5;;3b3~^)jp5y{al9J~_NG6Do81tfv&K`bY*eTMV@XsB7_(^5lKT(A5d}c0^r%wO zB00(~J2v_xtE)uHs24Ifxe1u{T)8e|--MmSw`X!s{F)PZMLnDiw)BV26{F4ViHVRN2Q2xOY zRgFM~<6F8*yxaY7E}E@&jAu^uVOd*LNT#>pxVK7yr4VU+yOV{VZA}BW2BqSwEdnH) zh{{fG_OH5?&<*XID07Jq1SGUGtrQQor&6EnE}DKGK6R}%5iStI*Gb(A!2LQDiOon+ zUSDfK6QK|6w;UOiDOvwI=SO3OZe7w47o2=lBj=|VU36jfF?FwNG?|^yIZ4uGvCn)I zy9~Gm6U{E(g>i9lxvn%MR5VJdBD@ng$xLB@Ocn!P;k)^Aov(GT<;>ei`*-qLG}~DGRRaGD6oDj?5qHRc-uEOh%QT5#}iCAW)FYt6=!emw=GWb6(1um zOI$69T1j9$oc-d($+cgTq9r@6MoTx^*Odxu_Ap!Z;&1xYL{F-cWPVtk!Sw^=;^w%q z6(CR<&}*-T=-QL<4mR;mlm>X*^*<5?M*tt=u1P8(k8Zs4B)s zxeDsW(CJ8hx6Xz&4-s;hJ(C|)v(C@5uFr%|%6v7rwvh6&w7;5=$W;p`k05I_L@S>& zr#UxJk&xQc64CEQi@dYg@Ppd-B|dc)#%aYFJ>6p(6p+aNqV}nK|7C15ia*Z!dno$* zJs7**$tJa8Wrhq-MH|V(vQx{+ceOUxUDpd+0?@L+KsEuzPDr~V6~ zs0USfIi>X6AF~rGfqND|(+aASqx;WQ+m3PE-P7N%niZ|0#PIg$Odd1krOxwm^mwUR z_eVhK^%W4Eco`Ku(yMqU7>zXKg1OBQt=#;^4@{=AeXx{j!WD%k2y-Y+jR|GBCek-E z^Fg#k8d<)hD%!6)j)VBT1GHF}FMcu~g)nXn5XDLM|7h3k$Bz;8O^i!VEwo9d^t-{* zFJdhgURIg7oZIxlLQ8WHN@i%E$h(9M<3S{#f~t$}RedEmR_gYmORzQz)akt?-I-D5 z=h_{6<`DQnM-cvP=1fVk3`T+(a&Fz_IeU<-q?6ugws=#P+*I-q5N70(fQL%l2>b5! z0F$FfuOnmml(8NQzn1zA{^D_{hqN{hCCHx4DNf2)^W}`Ni)_u@y4r?>jEn8@k1cPt zH63B%-ML2YFg5L|K}<0^$9I#zXqoNUp?$%WCK@m#tNotrsygc^4g6cOItXhvaPGl z3>yFSDUs{e&2w)lS=RZo?(4n~&JDqO8}vDOLTYD6Ylk8 zp7o{ChfWnhWey368dsFe6~AVz-ncF$h;T#1%-W!+E=t%vD-Wx=jT}s*`nKSe`I)*Z zZdvo<6U>_%F(v|ZiWfQzgdgnR2%g9(KaC{cLtcfhC+E1o$2r!{F?%8AY+>CTPdzd2 zXTr?nhOFxcQ%zmqPuJwN|Zn~k-V z?vVcP)Z0$IAQYW9pro4IA zxq~t#&<yR85J)b`Yrdo`p|0Hjb-Y&fwpi=zrUZV&`tf6)IGY=h5Rc7*)Xg^Wz}36V(qyt#wV%C zZ})QHI02WcyZ78_DZcVqY_@1zD8_NDY>L{uPmHrJK0>n#+F7#SCm{<3!g{KDFz$6R zyLW2O2;lCmaufO(MFBR+0;1!DKIDWirJRI02TvRHay=3}UgdIkZD)ev%Lo}|6smC? z!E*Q05wZc<_%M$0Yf+NuGCR(PVy;EkLT_aa-Ez3rbR%%^&6ro}fyumbLhlhXx<+L? zJ=A@|3_+5+Usj>DvJ3;xn*sQGf(&Km${1Pu2+mEfNnye+19<_`>L9wm6;@Cnwf{{Xf$>E1?N zT~Lz*4}rC!^MX8zM$)m6YkG~SLJ@`BzXD&n=J!hW{SSE4^qmL8scU7U+D0_kNmRAB z1yN)Q7C$X$RA8!I zC(5_`(bxHq#bUNm-AN&53T42OR3WmU9-{#AGDb=1S^8$HeG=X^sywqsuXTx8UujX! z?3}9O*!9LJm7>$;xKx9sPkkfTz5f6si1C)BAHp!lZ)LZ{^O)@|mSU(QmQoXWQGvBX zGxg3B-n%PnT{lb9Mx2(yIAFI;(W2lTxgRM~I()q`_ybx-aH)Aix8hV(CtY*LXz%NP z@*#$2O*TnwmPJYAK2FE^?MMW2yl1lg4r@F9C%^O;x&HvaWB&k(tGugurkYwbT*>UmWwI+uiQn@?W1eZFUrcqdWum&8BXqDwD` zx)q+Bd?(>HGAtKsHN4k1mjO~}j&}ukqhhGTa{SpRBR`|ow-0Tgs>5`taU9`c$ldae zxFfgC(zb0naGY<~{{TvB^c-89{hR3b@2C6|xYH-Q4DKI(q=-Dhz(S;ORGtoSdE*u6 znr+4P<69@4i!wLiPki(w^=`-9*42IaJNB`qHOo0G+~_qYdu=QGLgwl-0KjBip>fUt zW5LG?Pqkb+L>?iq)3o{Fo$l=}(@+t`aVo^q!U&3a0u>V#N0K}+BV#g=h9KsXn^8@q zm9*>XRG%*`z7>6Xori|4G;>1{>&CO*1rIL8S8Hh zw=?Rn-D=kYJ9jTL-(AgdwGmXjjC{ZXqLNrB=ZdH~)K#gi?2`Wgz>|NHIV<9RuT3_z zyZmC&>Uy*nJ}%Vlbn6{KA#FBaFIHac?@^aBTt@BWhe+ZgVI#-x z8a(%d_KD=NbOw>j4 z>?o#_i#FlY>~2~WSwOSiBFsc>a-$_l=V{=Qpm0r4@%M+Wd_&-^S4ehLj^^KdCN?|8 z!k;n|k;oz92RY*&;+&~-PnABJUu*PdR3zr)rKdOGJHq}Wy4O4(;Z0J~>oVL}K{SoD z3^9xsDhT;`-GF)aug>4vhvJur{v~LBE!Msq>vu4GJ@A$MHnVFMx8lw(5bF|81(1T? zb(0SSK5U+DM#hsUq?9JAqOCZ(vG-cn`tlVh`xwWasYc0p>h1n-0Xf}-fgUUT3f3)%%Pv3lr|0sApE!(9nXHns#2Hn4XIS6 zC`xg5NiWECwY2k>dpQhO4L0Z^V%x^zesP?IT=UbXy;;+nE@#)53}30 zK{@$e;F3AYgMq;v=AATPnC=;6wLoT_gQy7+Ani~(4C4!uNyT(1MNgJ0$eBuB#VDL(ZvIq}!5()t{3`1@MnWk68Gx`$B5B z6UN!p^xHc*10AgdRxrxnG2rE{I6VE&YWki#NvvSFH-2Q6=u&84Srmr={LPcsw-wD5 zDMFH~ue%rb#Yz7F-_FWNr52DGu?p<2c+o$0UaQI`S(!+9s2!Pq`wP?gjGhKpdQm zcE_f1>s$7hb6dw}ZrF=noX!aUxam6dQohmvKs~CA5f(LvrCvQQT_dkI*-w(Af1N=MHei(S+wI2>? z{v0|TjoppY#}|>XMIdOvE#~c3M_1a*mPQ0m=$Z(mx_>c?5hDD%^PRzuw_Nkk`qY+}w-HYV+n|c* z!6OE6?4{HwI1C0?9=*xMD5Yq5woLT9TeDWiXyA<|c%DBwmTxGQS5nv`<;PG5T=WOO zHP&3~i>5^#o&?t(V7O={MkX_ZjGmwo-=`zJDb$3gHkOM|klKFDIIFkQ^*$-^m%!~G z;+BaTc<;wPB%8oirhOm&5iL7ddkry`D~VONEv#`|Os+QXS!HD-D~-7o>z^5$z@H92 z7O4CTWhU-mByA2A}h>jU$v{q1K5^O^u$c(Z`-zqsBPBkj>`*+P7 zM@_j`%GbY`VcaQIojLTqy>0pQBJjV!1o(gBsC;AL9}`Qb_){i#&~&dh3mIL@7cUdb zA~_=jY*ZkiY@OI2S7}<6<;Jd(Tcnd(h{DYiGvqlNbG&kKv=TV#M|#bc+?dvjplN}iu0YrIU9KE(zJpl(qCLG0xhZnNK=S+ zbHM8BoaFJq#VT~9-!ys}-d@Ehz3zGDj<>q6h`e=nwz0jPr0{8W?k7e*To&<4NM3f5 z)4yDwly=2-UVKO;SmQ>B4=c-*0OV~XFe9lP9%z$lS4BpoU%H*Ix845$4m-oP$qP<= zvLKZg&GQ!_Nypv5!5n1t;C1G@kFe=q^YI7$`Tqdx*FQ6kh}nDbQgO3Cl6xIyeLds1 z4zetOD=UnLW84AH01wMG(qC!X1?{Y9ec}5{nQ`Thw8FBTg|JRoAD6x=-@W`veEOr# z)ir0ihw#(GJ{s`1i9RXXcxcIQZ2EC6}EHx}%hl8If1- zWDTmUM1Q{ny2q4AgFRGRg$6Z|spXq!vZFBQ~5sp|4!F6LYlmA66mbzpw;&Bt-=-XYNS zZ-hP|o8ouF{UGWy-9a3lCh+a3jU&~q#|?1`kOJMrgzZs*I)ILH6pURgHXgE+-Pc#v z&h}UNnL)aV)8^85`Eu|0{pZpC1@W$t;{7j1@jiv9D`=N+qs4O^fl;C^I(1xa>IlF& z2b_xAzSZJM3cjCC13D^h(`wCiWM7G7Ps3X1GkY?2(813!63%5m4NIQu6R z?H#)f%2j`d6{eqYW$r(;ZQ_>Q)moeHgYgj@zf4@j z*5=yY{ViPEiEZS-@+QP(H@f0ql#!8<^T)zJ0C;Q0mmjoW!}~jpTSU|&)+E0D{A{9o zeO}+rp3deuHzmTuaA%CZ4$xErIIS&KPNZYME!U!VUxG2D={l13uFIJ(SF`zDpIm$s z@JGa-2>dP4d?Dj+6+x=YrUayOKP&w|W||QyXvd43Z;c zupHw(!r*b=uQgK6OW&A(C-lAObvg!V0Q-osN8oivC;@KP*4Pg||=SdPvDoU~ACAg9p}BG9Tt_o)z$YXobAf?euP~d! z#bPkAGFXDjr5Z0&S)Uj;9IDq48D@O)+P&Y5B2Emuugql-(v^P-$t zNJ}y6<+)psLa9G2dCzM5M@gF6;#c2pC5mXmG2{r@7=TB90O#vlNz?aSBg&fFw{`f0 zYVtp4I(C!MwYHsKt&Y6}aiD-k@!7hQ6sXvZhSR^>Jaw+aMzy?}8#v&^cZjA(Ju&x! zaDMl$Pk&mgS=pB+t$v4L1UHJ1+QTG)8AZbs!8^Ti(*%##rFfT%bq^Q#4mfhFguB6|V5~I_S{D!SKX=YaN0+8cWVf^-C~WgSTNZlgO&i~ZNVp! zGsQCY(J$lTyM3Nw7+!(PqAa%%GamhZt>HS&3xW#MN$Wxk>+^@ScNRrljc&*{{ zA8y_W!yF8Js&F!XZhCYz(0ISZ38462Q`8&IiQ>1KRRK%k!ZB3?8Ob9!=y^PM&FGt& z^0#!f{7qE7$WEG4dt3a@Z{dRY$4KxMriCVp8|nhuN!cxBkL>yod zFUxd?%T`^Xi8$!E10!i%gV&mgLU5+J(??x8^zAo6-$P?dxU`E#Go|A*LnDSo!3bNZ zVox2$G5o1DIE$OBv`dwp{H2Tz0EM8=V(rJuRK0=jU<7aFU z&nKtURi7eH`Tm>!{K@_`gmj8b-svTFe;>XpviQLdilQC?)bt5-Mz=`>8kMc*+Id{F zJg~?C2RHyHrhC?QooV|;+uOF+AHa6*`u8Ce}aE%JqqsA$3x6F_Jrmfcm->J)X5 zo$m{=8RK+gj1ULg9vr`yS{gOwt;}Z9XwrLmVr(>s;5SZ(gPi+}){?5RH0KynQ00=g zskeRq0Q8b6I7)o6O*Y>COTWzZdmpz;dPK93o5(0sl;CmlkGt2dKN|0@HK&48b#(DT zZEg&bT&Y|I89zAV9UCN^@H){qdEIt4alBj>_c(no#y&Clho{1P0dQ=54I_vD0ECH< z8MRa+k!Fwg6PyWL4o+2i8uo7i_;%C7T5OiOC5VE;XO>58`;7C+{@T|Bk9>N+jnmZZrGqN}TKC zdJ*$uzf218EeFBAB=~FMJKq_21~Wf_b$1KmFAql_n$|b(By0BDJe)?10rPFtV}Qy5 z%}**-V^P0%X1#j+j#<;6vuR4rO6&URui$-=r#_!>wszB9pDjZMlgn3*R2e&6MhOHF z$6ma1nw|^&GRo#C;$kdOW!n3S{Oh?BXaR@=1A<3VI5np#oKti8_Z06WpF&%Uzb-gz ztnFl7Kp!{mmJ_QSWVR1H`VNA;o8qpjn$-Fi!npNAY2q&sTrI3O7V)I!%H9~{n&MWC zzkn*ngy;TSAW}&JrCOxbwklVJy^gm#?}lD4(=~fb%_ByJdwqLHi|o3cg~W~}wQ?}B z9H{eAf}OH~wZOnRQU|QVsf6;b#+(d{zcK|260ral*m61OaBzE$D|yFfO=+X~J^ujV zhf38ZqTQaU{FmhWoNtM~9{5L9Z|#_@&9Qr)Q{H! zVxbvE+g)7fs|u=S79Q)>FswecB%7K zfNX!uR2_wSjm^*4=6hSnhm=P7W7~m(LE|L<0DF_19x+O4QH&Qv*qus>a!aby1Tx4$0B4flcaA!6KMJ}1jcMx#{sxcnq^xZhQp?(Ie;;@H-mhrs1Hnk+fWNwvUYaCrn^pFj_l-~2@Vs%*SKmcAMIBcx5H$B5^a ze-MO8#zEU25m|uG-YPnDu4rOtMp9L1%2sz)YpKm!o|K_ZliHTdWj5J_1z7+HKiwt8JIU=L|nS5>HON7o=ZL8YQIu~RrrK${w{N+b!W9&wDAiAU z=+^ev@IKyMZ&J3q^Ci1@nGta+pPM+~FI(_F4?zMt?8a$jK11FMKp8SqQUR$5tstri0x!Wr&OaM}H2?LUI^&JU3(I0jkuelX}cLf_? zR&VJ(BhvoGVGY&vatM{o(zGRZDs|&0>UbE%eEaZ*{{X^|;>q>BPTN(BSiQ4!@YVjc zBvLizpC!cCG9(^ZV!aj05?f*>Kmb#cQ=ct8FBkWhEBh%)bhGoe`h2%Nn_Tei&xkbJ zT~or^!P5M1W+Yuc`B1nahD=kSAH0pSa)kHF?I2bk!*7ayD!$aNd?Vwp2wmwODzuO6 z(Orn-I$o*>1)9$2kD0z=Oin}LWX9l(M?@+rwB<^o<#BFNZ)TFSU#ZVl)T%nv_Lhx$ zz5ZX3@9nO^sj1Va07i-SjleH1nA&nZ@KpUO#+7=XyLk#rYvjs+d53gFDZt=!%Q58o zRH(NrT-{!Fk5+UtY7ojSVuBAYX&=f}jffE}nf@`sBxGl|IVQSi{{TU|ohG>^%)^_8 zM^-WrTXyC-A9xPf#%m=9ea+=nttF|$c+$olYr>kPg_`+Vxg(M! zjZb`mispPd<2@(GS{|RRCA&ouEU@2N!giZG=(3kEvavj*ip!n>`516TKXzQ|PRU#T z9Ei=f=Wj39^gG`U>M=umbGi0gMyjj4$B~cjmdMFD;MZ;c00|wB{Qm&I{rt86018Sv zX(OUl<0tU!e;GDV+ge{AHefAD3lhe>r?V5(ewFAKelxM~wVBjBQLfopX>zZYuX8ML z#)l^dd$3SOGq)nR=}Iq_4KBa%KOGF^2{=Z4-t@0`-pgar44X?wCv_!G8CVZt)AeaKc~M*LxHuSW z5gIw_IpBlEZC{g_RFB*DlJfrmfXpaHpDRy(ulnwF&`kS~>1BtPxiGtWE&*aO^B?Ex zE5FnASpNXg{NU#0OEij3{{Sl^smHJPdgtj_({lSt#*(WT{9QKu4x>)Fmg4I6M!QzG zi)(<}vIZHLCORH7&#iUV9x1X|r@RrgamRvyIaWJ>8*)!foaeqxZ&q3_ElPr$j^66r z>NNdy#~<0FkwwTr?%*i%TO$}e<*;#(26?W(MG#Ieqj`LSLrIW8K>b1xa$AGit_@0( zY^lZ-6zX-^F2=I!K5nJ%5iQHL>l=@G!tzS{_cZ~E6F8fC|fxolkxzpw_C~HUA%PYDTiIjyI03I=c z(ygUV9@09r(`)+f#W=;n(U;kOmWNB=zZPcGR)+d`8uAh?Zsvhx`#jRds?5;4fJ(7% zQ^+9TsTI;+T-s_{eAZr6U|dA#*DbI=mU7tn{#MDxK?em?j2s%QN~b3(YFei4=+&9a zN|fCtCbze$cDZ)Y+b)eAhnFVgw^$>ZcU77|xaFJ>T%Eg#7#TV5iqO*TpD;x>gw3b0 zaR={6+4FS=j2@o2&0KfpHH77>OWsnlUC!1WVFjhb3wvPgGbzbnz~CRd)C`;h>qOd^ zY37E~<&~L(&dj*}9jtdAdS~&hn>5>S2uCpd5N|p&zKM(cM3QlFvu(X>*kLa z>h}Kt3%qq7i!IAy79bd^`A~pxXFj z$GRLEu9}89mI?k+@+tf3$U`qY9i$V}rEp4&CY)XRb?okRR&I!GHQ@=mYdc6Ha1JmURt3&C0$HCR)+OidFX8sqFMJj7!$i69r^HKr zvOT7d+bylQQJahB{lq|S8-OlwLEF-bbSTm0s^NQG-Rj?xeGK6#b4H_&Jg&EUYPbCn z<-Z2JKl^BS+s4=b026*DE{ov-WG^hV4Iyn(@_kM>e=$Y;=*NAi!zzUJ?ov&g%h|#{`qir&IwzlXs7Th*I!E_sK*RT> zWOcywKBpDa+}~W=+*&ahf=&vDZKRCtImQQ0038l$>T9_Xr|le@m6BU`I~yze2<>9~ zV@RraWh5Pr8*v}0J!`GfbrCcREOKV#P3AcxJJhKw&tNgPp680Mb`;i^u&KkA_BHQ( zWu#j*t=6S1R`J=s@@76#m3%H4+W=tali7uMrT+lL9aG~))Sd$HE~hkp5X1ifYxt5K zhTb&X13_+BW>#e`hDFM#Iabe2sVPl5o&Nx)RaF@1w^n~HtMB`cr}lo=b z>v;7o6T~-G_fgowk;ih5ziWzf8Yo2El3=QUps5ORiuA7pT{nmPcj8|FUD_gecJ2@D z?+->tmAM(e(`~^glCnBD#!lBh)lqI$J@2(0eoE@ywK)`FN}83MZP|HiyZJVDo+!~S zua&h=2i(CQiFEsn1-jiFPv$xojfh4m(~>%b0FXkJ?j^ryT^8Zkv{3nw+s3~yV#E{F zWv~hU>CZ|hQ6%BZ^9VvpX>`^6_BuOjfu}=rcYN-$y8w2~TO>1MAa3l$U}K;?t5-+Y z{EJydz0hLJQ)t|-qmtx<$K5#R9F7NibmZ@1>A1$cWiIBmx@nh@I$7#YVV7)^Zc*kR zml?}@06w*MPS>=}GfTOe80K(}e32^_B!=CS-vf>S^gLpe(u-C!mo-Nzbt9j}cG|R8 zn_~v?HfYN|w7b#uR9np=?%nL~E#{0oQ$;G_;)LhbcAyzr~Bl*J$!RRnchbB^OD zt#V%5ZW6b}&v(^!IUApb_Z}zlcA={LPVwHMs(47;2sGagSOZ~U3@nNPyD>MJbmg~Y zn5uxQ$6WibgC+1c!;KG7w$U`LH$w0(oLSq*5Zl3T5`xl5PXPxREP2i|>rpAxT2 z={VuX%m^QQ*?cGP!@yn^@NfJe-WAcJ(lisc>P=SOPa^f%R4KTcF2eHtSrw?f!_kOQs=!|a$e09C?Z;JKr4fsDto5bD<(rwf29wvZ9sMC{y zZ5HO)bW{-LRb|L00l2TDbbpC@Y)znOSNiH%-#YJ#*84WBSwRGBd=~k6Bj!ExTE-OT zD0?S*le|=VU0YvW%%ukixlQvZw*K_A`CIWle%DvEF-ar$Ln1m zn{@UO&asw>SzN3|i3+D^2L#~s8SE%(Yo0Y@X-7io&YYm9%dCwFucKJr?(+6Dk06$j zlp-Gc&Unbk8T8G0M~Q!EO*=rB!@eHyCW~?69}#N0nX(s_rLFY)H^~vKmr^dqC4mJ~ zb8SfZzWMW#XU&%*HNS_;|Ot+&{PtFa;h3zX%+=Q}y>dy!Ka)Qg(F z=`^}7j^8s%(1Wkd8C`O_F3Q&?(6xEIE&EG&ku7A38xIipcFOMJ2M!p^Z6m`W7&}SX zaJl{>2eRYaJVD|3b=&^{58SQtrn{rxN2tMY(5uRjHWElzX#gaN!ROHOR8vZ%l{Zh~ zO8%8%4gov9X<>Kr*c4iR3A~g~!W+q=f*2PC+L<@l-VZ zU~bf|udbtAUk?;(_b`C&!l^wQ89no!#}&}s*D7~kulxcQlW8-dvDU9+G3v0+liG-w zv~7{JmHCWH*argxBi6R9^#LvHGR1U_Z0c|XYvv<##xf6LF@x5yzbv_y%YQ;#BIg^f zr$Y_ZoRK7ps^jN7RS**qzyLN6Bird(kYC4*Mg$RwqeAD+Kua#n7En0AB#iwzH03|t z^FHWl^2t7zF-E_B@!c|{5=;mRiLp)sf;TrA>T~X+ipn<^b`VJGC_n;BD#*YFd}n|< zk5P_?xusHWbX1{FOYRiar6tk3m{RHscWbFis zqHpadp+CQxQN#QjOU>@B_kM@jehkt4FYx2Sc3vCsZjW!Kcv=SXqqKxM? zu_Tj@O?XC~b#41j{6_HSif=9!UxHo~zquMrJIG%W>Zl}%t|SUdy{))sNP!r4-p7En zqNSv*uG)F_HEve+wr=-dH~#<(&dbNqXgWr%@ZZB83%8ofSl4x!>~p2+Q`<_A#NK_} zyCEPttVqh)#sdSI?LINv+jvvqf5Z)V&2b}IX*Y9Pz22#K$|c%FV`y*~d4&K35=p?W zy0z*?oFN5xtEc!8MqJZs3H9A~*YzJSf-JN@*?(W~h1Q>Md9TUf`-m>@ugg5wS07_# zX(W(?^BFQ*2R!kPE33JKSMe|G$Kc!ROLcz{X&x5SHEUe9S+4CNifHz3=W*CqqMw-Y z&TA@B<&{YEQ>Ol0&HdWyTI7_W87Iv7y)Ewl0Fm^Mh5U1O;!PvNTD62@$-1^nsJ0N~ ztf(Z$K_CnsNa#9aw;$lmc7FtXVfc;k&iFIOD?#E-G$ZAsisUSJF^=0r=uXklN zrs~cOZ_JGqy^^VYRiEXj+B0$TBm_B1!YeGk_I_F`oYbT6g{Dkyf=+tF`eY z7nUuiTC*%t%*HoimQAA=!8{BM^gg*Ysix~zkwd7w;Bl#pTMqHL4;YiBx4<~Lotk3vM zG>`fSH~s=#fAQ+iPNy{QIXN_cEf!j&msVD=?@=UpbypZ%=dV35E3C8EBc9$l*+-ol zqF^8!u-u+G;va+be~UU^gATo|-VFlI z@ZPqSZ8FCeqTsHu#-qk2Q}`n)-W2(zK5WX}S)d;cFP|v}-uxx3slD0$WBDBCdOltLfh- zoYt_E)KiM@_x}J*o6e)Ay^TFnyMH}DLx=F~)Sfl?$*cH?$bk4>*l4gm7=*aE+cm>& zBbM^Kh}FFegT5m6z9In?$qu7 z0B4)2ZM;3K_%bPE7FrIsd#72($7>`aPcjgBt$Ft*VKjgiZPNm|+&Skw50~qMSjkDY>}wS{ zwA$3SrCi;|b7wW|spYUUV8|3iz;K}UBy;QST7TQ7Wwf-mQdJ}y_tXnSZS-b4a-8naOkLB~i=k0nBz3DdMptsII`kL>pZy)V9R=~DmWI)A%CwFs^*0`fN zPNds8YbOUNMf@L+T~35u-KEvKO*fdY0F&lXmB>;+>O1wtY-p_{`h?Ilt>u*pMt_8i zllwaUU*>vky}I3BTG}bvlG>@kHk~SFP(AYy;bCl zO51a995JR3P&US#dsk5%%zE#~Zx5TRa2rhUrOnl`4##AdS5U(t12`WnV+R@J^MY#d z{`ERhdpKX_k~60dXzJH*ROR`!eJic$vDjHy%>>eU%^IUIY@FjEhI$dwykGWH7ur|A ztpd%j+IeHSzmv?A6P9b~o1r9(ZzHc^Qw*}0Mtx6kS4=Tq*hL&t z%DbE-Y6HH|Hyi{ORoiH~#|rf^u=ZS5rde z&!i%1S5;p)3`Jp@vy7aU40%<~~HMa1esBC|@So<#s{ywnJiKC!HP*l(dWasPz^1$@Z_0p`uo6}RIBG}jr~!gQ za0m`SuFG|~4%iF!g1rEURHPyW;DQUT+uOa}TVmE?E!F~bLpO8-U{Mxj(Z^4OG)RLq z0A;L^~BO%?4PnyR_Y4ySU#4I<<|jOk|kO4 z3Q*tz7hd@AksQg9+!dhQl$&z%bwHWQRHoa+T+{_kPlkT>KF0n$l2NvF&CoBg0Pmd4T;P^)THtpX%NG9>eL z&`0;8_SL@H{~Y!!{Lz@)L pLo+nash|3(|GfU*+}#hpD(|+Sf7`YewJrbv002ovPDHLkV1jD2HpKt{ literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_1.png b/solr/client/ruby/flare/public/images/pie_1.png new file mode 100644 index 0000000000000000000000000000000000000000..392792e7c261ed7525bff80314f2c1dbfcb90688 GIT binary patch literal 761 zcmVVU1=x~;#S*x%Um_Pjk0pn(P& zPYBW}Iz^`d)?zKz8l50pU<+&kpdk%u_=I44r@hnO8J$>9{EhZTd*cZ~S9f)Hgr)6? zzv8aAE8Xv&Q+&00wR*J*@NK@$w}%hl`|In=%P$u-j9kvL9=NClE^vVZ3=HfU8%t6q zlgk0{@EHjJwJ>_O2jItDp`!uFvoGu`fbYR4Dg%6vM!YnDZ~nkPtFzVF(F?}CxEJ>TaZy|p z7auqzVnR%a34laMghWOsMl_-k4Gm0G1%sd0XJe0h9%%y}iK1eJhu r$8=1`Ix9e}TBLIXGB1VIk4rP(}w?nwT^$bS!od1QHWMDPb~T zOFWJyS31xOefyHWE+S29e^Gw;+1|YGljn2Kb9c{cVt>GYiutkyYQZ%ev5$T119>H{ zOQteh2)qEb|f0+vb3q-7Eih(H7azy=#^wbPmE zZ82S@%XAsgbWPWEfGSj>O5+vb7x_hg5wIuh33~$IC-@0|q6xvn_+k7o4(v+1(ynxh z9@W4x;uvv^0OF>&DQ=eUS9}L)jK*l}{KSq?!)jO!19X~B)9HEyQ%E5NXtE}2^7)CC zqPaAe<^qsL8tE#6q=R&j4uEE8hGs5Kkh-axx&cxk1yZOYn4QVaWM?i;tSrAj>(BbD z2pX~>8yDExvi!rwurYkHF8hkNecQfmAHV`EzyggY$mPbyhK8CKRg{GVRsBZ=ImR)r ziNPmEKdjDw0f(B^t=yzo(IgH&e7{HE1$QyAK_`y04@_0 zynjq%D0}Ks3~P@yCwiFU#D&dzW|)|$+zc4KW1}R*QhfMw0(Eq8CWfft2FB9 z9G#a6fv*S5C(NYRGe~^t0peuRnb)O56dTxaK`s)xpCg;pKb51~ws!=t1_JWRH zN3Ww75DQ{KER^rpybp>AF(D=ZIV6YV5WtV~rcE5QRv1g>llf$xF+HM3^ax|5n{<=zllXe6`#yMC Z{01r_s7dB)i3H* z)cPMs3-pS=C6C>;9tUjI^1UTlfKTi?ZNC}(i4eVO3f3}!1V)`yCkB{h>yy%&{{iU! z@=DUj2OJYsPTAKb-4bcsPIZ=E39M3x8g`cb<%bqMG2z%7YfI>0zxupSZjvg2nWv)y zOLri^L7asIu&ERR+5XVM{%m|w(Q#q~z&of=NdokIH**ICfRE?gF(C*a&?k1u>@|RA z$b5USfduGo?V`6eg%7r1expJ4XqE)%-PoUFJW3tVdt?C5^GSfdV^fumXzGBzJeSUH zB>@su?9SK@>VQO3j3d631i12w$g|5p9WbINhm#*k0_0Q_MIP>?4#+-Kcu-;{0kW=b zR&IF$5T9c2^Yr0^5w5r&>*4@bf#8MkH3<;eXhb`Zu)BV%F8HU!l1nVFE7xTM*bW2U zk^$#q(vzq8+W>BhdRm_uIvDqSeeQ#k0AnP%bi=)c>jA%JW?DyC77Z}od#R`7GT^&7 zC3mob2NBvBdVCF#a9!Q4X8_Eo6oOy8`1b(=fA;|zr~ps&@CDN?z^(1V<2!&iquYoL|Hc#W^nQOxa_OOS&8iK?@9K-?8EX~sF#R<|L+CzH)QX(Z%sv(%0$<5?uE>5f}zc1&@ z`DzI2k}l~N*xIW6Bl?IwQdw7h#oOcC!HlHA$pO_dLYF$*8mzNdgmfN9kK?c*^=q;b~KdF?rH_1?|Jfmr4GiANm`(=ZJK zsoqp?s`ubCAUYpnE8B6XYcal4dZLMvsZLKkuEF=rb0%KZ4i)azXNEhiMU6uGoX?PyI aEdBzfjH)~*qtFWg00001KIqEP)q=-`5(l03$3b_>0dRa&hiVB`Y zussa&4|-WW1y+|Fn;v8&Ca~M&&)f^J1z7N_)lp)Z-H7c4M${=MHVQE zMX@LXyR*BqyR(4q(|x)R*q8RDeF_ty8;_tt?FpW;*e zF=|u;chDVl2LWz_+u$}T=a0ODWR0wmwbK(Tp~RG!5(7wtL`b9_!3;9U0IHx0s&IN@ zf1`R;uj&P`gB|Qt5j32J({KVLM{*>0c7mjvbdzqtI4};3gDQggnfy$C=Iq2O@`v-` ze7K6BF6e@OhNZ2@@7MjhzdWw^ixYuFAQ1rQG@Yi?jVCA+Mn{K-n->+ug#|_V#|bi& zp-dBvk9+t+`SuuS`P!baZ#N!qy6$;?SuT$&1o#umJJqVb$C#OFD1X}O8 zFLl+?F%CDF8hm|CuOeuTTBFuz6M`k~bNv9dlf-* zpSjQ62e1~_!dgy1^d=i<3j=LiPqwDs1Ex0vUq)<|%S?lAsp}Ka-rI9!z*9vK9q2#@ z;97O9x>ip>u!PsdnPLEY_d(<-0H$YzyPYmi3oj=xetZU?3;pnwzt`-=h0a1}p%YNz zN?eJbpfJG)OajhfJ(HDz3%`2b`J~EaHQ$4r%*mV#2vH#_L;*I$hS<>Yy-Kea;fF9F z+&Wt95ZWj?nxi>N4ZC4C?38MA+MG5=X)>KmC)1Q}u57Msu27mRCX2};rJAHknnbDL kF+7H+9A7VWzX#8Y-&>rZV=i%~82|tP07*qoM6N<$f^`XN_W%F@ literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_13.png b/solr/client/ruby/flare/public/images/pie_13.png new file mode 100644 index 0000000000000000000000000000000000000000..b35afb402e9c65bbbd87d7bf1a5efc5ceff59daa GIT binary patch literal 779 zcmV+m1N8ifP)>cN=CN?>F;inAgE}!GFq`(-vrit>uV3^2h@f zxgu9&pg3KeE=~jHfH`0e06VFj)J_Vpxoj?*3lJ1R5fs2V?i_cH13cjgPXKhG3tf%V zbu?RInx<)*26RI=bOWF=m8snNiEsi-uFw^_a(rTYXtUa^HVe>6I!Py+5zHZn9H2|Oq)W#q z_BVP^59&bxTiC)@9YNwD9^wILffi`t*855|^PydGW;uZICP#irO)>j}!`Xmn_(eNkDSpVzd1oS==_ zn5~VE&wl-d@;Q3=_|DP5&r5R$V~e1$C@cz#ZEXFk>qmDK;HwkG56=#$gKsbIS?B%n zxcNE_;5-KL$0KX_7ncL&Ksf+til%7F(Fs>SWz;WofT!1&^FIRIBUkPY<*n-)_JelS zuG&>VN=iv732-qk#>E;%)n>1I30w&O8WC%Y?ZaB7zzVFu7;zIfaWiJj7&FEUW7*B@ zW_FXYwWYPCwI#-~m24$jVa!MwDI>)g@e@DsAH+9H)BWIS@f+kunQXGEiV6S#002ov JPDHLkV1gKuU&87La0?2nmBB zkalPfl7kNP0`Fea=K^A^UjGH~v%S3cz27JAcbEHB@B{FlQhC|}`4Eau>|h5wfT0+Q zp#ZzV-QaEz(7W_5y$i@hGLcLK5bT0oumjSpG%L*lHowhp^8*%Yp%x0j0S6rU%T*NG zVisjl76nvI)l>~&3R9R;{Ed)#GEe3KXUrLM#sD%##>iM1f_p1_D|;(IQc6lmse~To z!PV#LbM*n@s<9l%m7#Zo6H$l6#NYXfMOW@*+$Fg=-`Oi!MiSWf-X^k{n2 zL{OJ?SwF$n=G6Dg(EAHV}Vzyrk>*xw%FHmvze8%|>sIGbX;O$oNd9k@u@18Lc zw1sRTTc~uwUk|44ZqE;ZtPQ*z`Br%DT$3+&_lt?3#bfbUJOHs0E3uX@TGes$((NoD z-1i!HW=zg~ywcIy3!J&wGTuIHB8Ym_qaJWBIhUME{M<#l;cK8=;nssx}~9qE1Z+Z5ya8tDBT0jy6hKWw^{ z8_PQniZL-J#sE1ahvX1IhR6^ZD&3QaH5VL@fac^(cr6XsA7A(1Fq?{av7~s4r#Ppz zw3gO#u1#yx+BE0!&G=?~lk@e(^~Lo?&f`Yhh#Q=15iO!cIHwJ?fi@iF3#H(B@U-{| XkMW7%ewCkp00000NkvXXu0mjfPv2;F literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_15.png b/solr/client/ruby/flare/public/images/pie_15.png new file mode 100644 index 0000000000000000000000000000000000000000..be4b333d6a3dca64ad44a4909497ad587f60ae9b GIT binary patch literal 792 zcmV+z1LypSP)&1*Ekq)C zTyqcWXw13?chI-bd|j%IHpf{PweR+3=HvU#_kY8i3iea@PvzKYfQ`hJ>{!Gi7JxXswLG;v1EZNndKjQ#8m3`8fD2f_0-(Efx9;9L zu)n1@>&<#IfDAIoSO5|iaS<0lvouSy+XqNJ)I&W0StDy?%>pnN%|&z3?E@>y@6L7S zx-9@rx9K*wakNGGKGSFV3hSb;c*Z~DpYa1M!XhkEdcev`XJ>0``Jk#gGLlaJqd|^w zj4R{d6NVm#-VAQuzw6WCC&#W8)3(PU=%bTO z7J$;AG$@tP!0#r!=iUWA0uGN_dHSJ!pDTIs@{KkNK%qrw5n2Ggim&3U${^%$1zkX; z|L&R4EgLU%&;7cw36QkL59)afK-8fQb%30Z6LO;bCJ_}o_Iw2@-49;AXkY)ahRjp3 zzYD-w4B?M0w(e2K2l*f$1Qeg*Q+y~}j&DuQ6L$dad+@@|k-~K)5sAH3f!&|?%Lm$u zefIxmdTB511$0fWdP)YxTiC)Dp!!sw z>N`HMKT-XvU-bjXBagg=Aazj}bpfov3aoH)f-JxSECA4bx=;5l1odfsTAw~SF;o3f zeN-Q{5Hx&-&p5%+n(Bv)kP$jsn{M&iz}mpt0KljC6rZZT!0ztQP;YPjqKYy%m&^U7 zg8~=0P{+e3-gwsi=6mh=4Tk?m@Z8bbBq*h&w3M#BfbHwW;lM9I=(%z$JXU?a<6+0+ z&bJnVazakXiP{AZ5e9D$KDtwVp38RAey_tqPz;JeF$fSRaS~@8hEE%HDG!`^_oUfhM?g-rUk`k<5JW3l(F(Yi+)M7I`a5ZV5FELz0u7x?-zAHqbN=(IPl*oz z+R=w=R)PnR2#Jsg;0bv`o)GFm00D?J+&$KB4z|fJUcbK!G)CKEzL(~B<-eH;Ho+zU zHKnH16u@FE#$q+-#?zTEJ%IcE)nb1;;PiSU7v?PX^J*#Z0xxh*n`tv`=3I+waV^ey zb~C$~-Q+y8m|4s$a-J<^OW6|VT2f1DNzSR4da3s)zfvmh;A!z2TX&VbUzTX{00000 LNkvXXu0mjfjGku^ literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_17.png b/solr/client/ruby/flare/public/images/pie_17.png new file mode 100644 index 0000000000000000000000000000000000000000..5cac956520f82248452ef6855addeb164c543598 GIT binary patch literal 795 zcmV+$1LXXPP)zE_!yv(~BWAz59pS_w8`+m+$9%fA`O+;0MBgD$`j5BtYd(gnyHxrs6{Pm^T$;b zI%2lWmf13(YO1Dc0F#-_C07&iBPVE3ou@p<49w2LDZLAHT`*fe~8vte|Gn1Lg(*w)N_hoz; zp8=pQ>5_hmqs__p=pNm3xXyWt*L&7`)_VXx!6*1c@d3NLgM;1O<%7!d!a^c(M1uks zxKPHA@0{}F>)Xer`zxDT?N^k;bq=7oBrb_dr3aWU$un2a1E&7^4~@@@_gf;DZ+Cn* z0JH|JL2IydKvd!yZULloxPRng@pk)*UiZDH27u;vbGx}6Al0OrRF}?KG2;Hz+yPX+ z>x^|5a3nGPuIqI_1FX}{9jzY?0O5cG4#2tMTyd_H(d2=IR|CmM^ZD;ZQ7z6X=!LdvJA~eiGnMu zRp`QKC~*u-%)$&hd8xM>i=|U1f2cjX$9;$MaqoM)S4T&}f2#WP1}G)2V#h9au?rOB zf?SY+z3IK_y=g$}(z>)RpcpI0iZOs|=9;-?K!^)*Ar6Qm;)pl`SageS(E+re1udoH z>dGB4TVM-p0Z>#$RaAhW9zZ|T5A{P6zNo_IPao6cR#0NK_wCKYZ17J6pZq>h@f?mN5YoBVt61 zR3CsV4P)GWfKLu2p0!qPZ{6(q(ErCBFaTOQEuEH5fVc5B-c~(l!}`sK_vR|tKXqx< zy$Xn5E>C&fCV*&1JK6zhNm`PYYUpHN<8x~k;QjsY2Lq+IcSK$cPTd0FgcD9Pz$5W~ z-p~61hu7hCcuNPI*qQ5hUi-o)*B#$NUq3$vntnKg7rz_p<^N`aEXaa@5?A6%9AIG< zX5mty-LBh-)ERX~ogqr? zq;^s}L>r46iyMnXsY0rdDiEnLHKxXh4xVb0`79A_(noq%eq7ldJTLtMf$E$O5DLjt P00000NkvXXu0mjfz)fGd literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_19.png b/solr/client/ruby/flare/public/images/pie_19.png new file mode 100644 index 0000000000000000000000000000000000000000..5a61a37cd5898f082572d307932eceba5f2500fb GIT binary patch literal 783 zcmV+q1MvKbP)lzwN9-Q*p2PRc4GkN=G>ec5axtAVGeMOyT)DPfJ1la4jq689(c;f z*{p6cP0$2Q0E()piV9GPN>r+RMM#{)NgVK|y=iY6AY){VjMX4mTrMsbmw`25O;{7E z*rPJ|`h0!9K7dd0DL!SKKlTjLBu&!f$%*|XJDZ)&&H^+-BQ#<~a2@Md2NY2e74hW6 zj-mvVfD!<(femb!2@9BH`o{3;#sxVcUIz2H%{zze@ zFk&L8iJGXL;?^4SWlh%PgR$W+UhQ4&UF`+fB%5TDl?UwX3=Vd8*Dfka^Yhv4KUR=q z9OG)Z_{jHrzaG}m&pm3N=(=+-HV6s}!h*0+eSrOK$LpJ2)%Wv{+dg;vHW741T~SxG z`T$G_zPbotpSbq3+rm|L6i@T5=ix(r20=&25psk8;v`PutlpFRa(ke^SV4WLHFxDc zP`B0cNg6g0L?ar}2zZygOWvjG1#PdI{DF4s^%n+4-#@(zzz;wCW`g^WFbR_|;FEo_ zPqvV76t}I%76Hhce(pif@n!yx&t81+0?wCBms(nl^JU+Ie40=5X+VsMQ85aTAu>dU ztb5wyT|eYQR~Pr>vV1)VG!MMv6L#SIaNEVJUrj#1DkabIEYFx;uh;AKjHxr~j5@L6%eBZtIzApL={!<>@w?H$bwpSEUL=mW&HM3>{<#;(> zjsxC=H{neH=hnG(ZUK@isgeqWlA&ZM3G^)XEcPq{GMBl`0ra5{ea*vlwOV445-E`a z9M^Fj7Z9c}h1q^a*e2U#n}C+lGFk>;3v7Wc+(OVl@DKb0;3RYsItksNMorLTdQ6W2 z%AspIm+@09pI3v!8GXkK1 z0t(j%@-PqcFd#~zBubqV6r*BPi~>C10S~SbtgKX4Dl456YluHznXk-WBj_2P;dQXI z4e_VEDQ~L&-EfN2)9LB-G(gL=Ov~*Ds8(lYCMRz%YML7xwtdM(Es;bL0RjTz1{*6% z{Qbs90H1?rKi>d&SAF{9H9)V*PrkhX=uz?U*XICz${V>bKyPz;+6RJ{UmhME0Z4t1 z70UoVT#>vFGdjI+*g52U3w}9MGZYVd@dBKPu z@gqJUkI7^5*d>QV9+$`Eae#HRZr0s7F&1S}76oKgR%Nw5-gr?DU;qPvwykYz+np2C zcC}q?7eE9NMC#*B-{R_Eb+9@J*hxESC;z>eKCBPx!+;Vfff6*%H|+r~D38|Hw@S_=X{8{R zNQNRU36fp|NdqQFdwH?XrBZ41@{hKDxA*7!z8`+SciiLHuJE6#<){IwiLcwSiA`(* zC8ea46reC&m@Z5Mx>xt=UZ6Z%o-NM;d^_LHw*x{{hze1_Hf|fYjRO|LVpt3S9q2$u z^*FB97Sjc~KoGHvW{Z@J=J(HdR=medh6SV+lkU<8}WKGuO zg9F+$e92d4#QzM0Gg+Hnm;^1+C{r)7hvodJI0O)U~#fIS)4pPu!{UZ zF;EPc0P3xbCdihNa9b#-rC@fEKPt_`jY0xZNrEL4BM_V)0w&(}PtqRh>u(|@TT z&v?c+@#7=CHy+%JHSXub7h;$G7*_xaF(D>25zrFxX0J>&?r;6p{m}c}1ke_-MQoAA z1Gux!QE9kwzx|po-`{TnXz^G)77sw$NE>NuAVI(i*9!1CKe^LeyNwTbzwWyY93OB$ z?|x|l2q&Cy0^*XmBrY{xiDRGo2Gl_9`(2O5o;-UDzy%jvW`MgQLu7~y0g@`Il3G1r zzqrfdm(y3P?>p7=!trnZywel9@a$_-mP~7*uoaJ90aX)tKDh`Xo;3+>G%X`ghprtu&P$osyYZ7GscWDb9`d9{xM_B z7;_LbB~vnw@n~)Rim8~&!P@o~uMeybtPcQenoYCm`U`enpr#MeN>q_1Cm2Y5#L8$Q4L7{_8~dl!Mz*@)ER zB!~`lpaTeIgV|uVp&@wS0a_m@Pw)1-4|Rs0-FkBkKp0_!odowGLu7~y0U;%%gcKJI zhiHA+^Z9Dbg;@WG#V;L)-m_c#tyYcuI9zc@z$n65MC8R=nxTsdD9>w@}fdS7oiTpTmr>I3tki#S|mi+ zpv&$;tb^e{*d2BDTXwr-LZdtT2e+Qv%Qw$Fzwdo#_&!_VxA33J{;vV7#FXvW!47tS ztelmzGLW84&!%SqJ*WruAdm~^!nrWOxEL4X0=PwPky`}#34Vf~033$Ha2NoZ(1a%I zI9sVBrV$#U5kTA0wzMsP%2cLu`5ht4WSJ}jLQ+TyNq~%#aWY;7;KBOA`oTJ|!EJCG zTm>_-kk~Kwi~Rr_V`FU0yngC8NZ05ZT{}ClBcWl8)}FFn9k?|vh+0stPA14eEI z@80|bocntDe#iMD9Yiyl(F_PtAu2>GXu+6sh<{SPt!4hH^6DM{4?OS`13cQ%02v?y zfT)OysFaX!Bo-eK0ql~$<=U|I;~wYRhYLMGeU}jM{4lRu-Urzvn`D!Kq)Mu!0%U}Y zkP++Px;s}sU;lDKpj+&0a{}H5^>MTTXl(NjwtqbRJtbea{%8+-*aNcOtT*cg(lhCq z^bDZ)>3w=1kW1texddRl6mR(U6R3L>e-_gK_x$begYN-nLX5U90C2$tmvvnEfHl@H Xr5zSmY(pt-00000NkvXXu0mjfB_3*R literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_23.png b/solr/client/ruby/flare/public/images/pie_23.png new file mode 100644 index 0000000000000000000000000000000000000000..accde96ec9a3b53d36b2e719ba198507f6c71684 GIT binary patch literal 774 zcmV+h1Nr=kP)*< zN8M3@&G2S;GYptbW|P?j>`v@X>`nk&C0EH+0^+1NDNX`XND4_IfVX&yw*XY33RU^z z$_i~UouM;y1~Ae_+DHRbr7BgOp9qPO7>NO1-K%?bfb^4o(q97LR(vbI6$e(uRdH1; zqDOfocgP)b2Oum8i^5`V{m?r|7w7_AI6AO>CC8KF$#H-V(m^^{2w(~+qyR%P6hk>W zu!Az{je4UVz&h5kZUbmlTh&%IKr=K$Gsg!={nStWfVF4sS$j5s+2QPPcKGYZ<2HbN1K+?m0K`RH#8m`ft*2MM@7zZoT)(aPfwNBnqrrI_ zKzQJR2k_2$=e%=80P60?HEqs)%GKV1fzJSZ@WE#Xxc^3-q?2?4vLj%Y7*yv=GODhgMu#Tf(|HAC8|UL(oMQaw~gU0-*Z|zynL!vnZI}i zsQda(`!WJld<=XDj@j(ToGQaIEW?=Pw%nGRv2-LINkz>% literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_24.png b/solr/client/ruby/flare/public/images/pie_24.png new file mode 100644 index 0000000000000000000000000000000000000000..6a41639d29ebcc5f8618cf0994c33d4eadb360d0 GIT binary patch literal 775 zcmV+i1Ni)jP)5py2cC@Cua0fm6#J8J7XnFPzwdd~RJrB?Cd*Ac%eO2^F_)m4@umK85DA}=wHLL+S zC8y*RU_G=RS`PtQo7Sea0b9|n=vEXEtb$dr0#ZzhNio1OVi~cF04Cj}n{)tn*kLam zS5@qY`81#A(}0>*(`p*v3Rk#NdPmqKn`D!KGwzH#;{Y3G!)&+$z>WEh`HguXC8ea4 zRK|=7$mMl;U0y(35EsOS{Q92%AfMxNd~W~1c9a}XjwiQ>#V8^9`7v1$OQ*Xeb79l$d@!!rj5$eVdHZwB;jeOuo)0L+eM$FgGw2bPx~ z$Of_j13*pIWbFV)o0sp=Jep^Bop*|tdY5{adI1`xQ5r2hU~{v-zpJZqP(_)TNGAW$ zL4gDkD!BNnabxb0x&Z(Gjkb$@2%n5`qF)DsH3yz5>Op&nmK>P z01ys1-~gPn&ROSd8GvtU`t8BeeR4R^9()U+0S#y{0^He=kNKDnaCuxFm#2t?9X+dV zv>q+~bJz8UeUSk`G}rp-f92N;z6Zs)7#HJ!9FZe(1YmyVXMTeYKDK_Z6JMSZ(IP#d+r`Hf{T@6l{RX;hcr55hE=K?W002ovPDHLk FV1m-eWs3j+ literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_25.png b/solr/client/ruby/flare/public/images/pie_25.png new file mode 100644 index 0000000000000000000000000000000000000000..a739ad3a2780707563311ce28e044d9838cf645a GIT binary patch literal 716 zcmV;-0yF)IP)vqJ#yqu zXk#H<16Re;Ku)M4aj>>jPIL(^C~huwDgFgr92)3gB`V3{;1DbYC2k!HMM-IfR564G zx0(#;rI0pYbGDZk`g5rij9!wKCVscaH~O%S9qm!Kpx-$9ssOE>(Dy10ajzxST)u;SVR9zb*4IF z12k1rH9L5;4gI>QoBHv%;Va$@ZH6{O07WTE(e?`t4yLBY#!e5_v}Cea{L2msB#_X> zAD{E_z@4F=$KwW|v?i@dYuy8iudlznb_=fIU~pKO5e!V52;q?NRi?i^6X+=n-#04`$$<95KKut_$_CIO%B z(|vmLf|EuWc=lpp3UDVc7KR*+^G)A_VqVOPc|c952{i$*X*SKKPY!a1RAulvaOuIq z`o}MTN4^=<=WG^GyQxrxDnwSV)ob+OcKj;Q ycY1aE={(VoP;TSv2oaSiL+h>Awfr9JR(=7mifb(XLM{CO0000MHzL9uPRsDoRlICXG|V=^17O|ZBX1S{OwMJaS?Y>?s*3PD6D z8BES%n-(wU$Ns}~51ALy(^Lc>8NQ}f<0Ms_L4Q&G`az(DlHBloA zdBfhYHw>^jHpk}b*N=RIG)uEIdvajER9Gr36_x;+pb45d4&WBHum#A1EXcyif&DGn zFZ*RbfF0~$rvaeer}yc70Ntg#bhmwgbcBx35kRl$HNDmVu$(L>%gOeE)%i!u(Q>o_ zpem?>+Q!n>`G?ey8ajyUx_EP9b7FG>Fz3uUbI!g&r7}Go47Ls`imR)I!XY=vn2gCZ zGx#*zL7;W8u9@M_=iVH|b%5L&x5lkCk@-u_v%lCnAp87Mc+6-3=#pHLOKKwXhy84I zuyw%RnH!V0XBz-I0*-(q01zi}5~q#K8Ty2#ng{Dj4qx*R0Nu|nI0KOefapg*`T>5O zU+33tIJ`E-^%f@1U@WH2X8`zc6@N6@k)w`Gkts3-cthThH)L&KF6vG#ZXawQm$?5* z132E7I#24?E&qdTp3Sp)K#&AUkN^@P5fV9M<9y}a_lh-8XLVqI=rM3sdHFRR2YT*a zxfd)Qz5m$Xt>4sprn+Pk^;U}-mj*MfcEj};KNCv<7T(y4g=0l!3Vcq y06k;Z-;U1$r%qptU48_h7iTbJZD${_&H4eY=^Lgn40Wym0000_&;IM@oeywE@vND;(DsAPhOly7C>Lq)y$I)k8xQRpCu9(<~Yy}8~7zG$=bNd|(3 ztUWl7nNH(&ZMVPfda;Bqcix7p@7tgM|9-y@|NkBLuT%Gf|1|ag8lV@G;U}`lA`29S zqEHk7X;PY$CIQ(e`(z)m8{3WT#sHJeWHZ?ScAA}LrvdAjb<8>jm=&{PRsh&xhh4v2 z-BFL2&e1tK2Z)>Ernm`EfeKVGz9VFo%#vBanR2F_DS!k?kOZp$<`eluJ^`$=>+CvP z!5Qg<3vdB005D6;60@Y8KlB}>i*%7L9v|#i($RD@9R+BJhG^(0zzj0T03t8)B7c0a zKO=fXkLUrgg)MBA0V)o~p*R4#O}FWG?SQn6w$U~~DJdnTR0f!v$W7!XY6sKwAI**C zM#}(YUgqT*9<8RoU-rxXgR$l-PWC2ylf8gCu8ynYhHCF&4=5Dw-oJLedZ-}G%%szQ z*+G+PQcaco_*P5pdoQbpH9U8n^tunm8Xy~I<7~Ww%5Mu7emt!n$P8cV?s`@RXpLAS z)`;=K>X6j>y}x?k>G>NE2hWrNnmf!L<_>^Zh=o`T09&%|jn2wp4ewm%y_bQ;0dK5* zxeO4^Xht*OTyQQp7YqPbGW?pOav&cL#$Uex(283aCtXB=FpK3bS zk^)?<@4tPV0&Jf<2d~^YeEpHHTfeh|9qa%_chOyR15#KDOJQJR{=tWyFMxWn7%e;o z>Yp?wPE7$PZg$1HKLPf8J%Ro~ps~}t+HL`G7Cv0nZ)Y4>SHzl literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_28.png b/solr/client/ruby/flare/public/images/pie_28.png new file mode 100644 index 0000000000000000000000000000000000000000..ea48936701a72550ae3715253d3bd1fe61ec7d53 GIT binary patch literal 789 zcmV+w1M2*VP)v(d*sNS z7;2!VfzyWm(OhjUT8bb-IbBLfz(H^j6o=NOgM(cZ66vCLNhb?J2GJoEhazeN5o}C{ zs8|XmlQmG&v!zX<$!inP%2EtPo$7S z3eZGN)I=Z~$Of_jK{ z7WNMId*m*;OYQ=&iA`)809C7MRjmL`(=<)j4@jNVNu7XNPz!3o0GON1P39)+2P^77 zmK)2B82}YQ5tKTPwy3|PNQ$((F8Yd>UCXXz7ohufzwWP~vW_e=z_xm2^o+Z9s3^|O zC6j+RK}KhErkWq$w02?pQ|+*(2abWG_jcDsKrYHfxoG8ubz7=!y{~p4+tH&8yfgsv zK|aU_s{r0zS^qd*JMcieD{%3J0npTI>NWKO#7xY@Tz=t#8h`#FUOlYoaoe#YgTTIn zop-uN4S;At3t9kM%oel7%5U&}7KU96+k|trR&9WKK@4 zk3R(ppTERbrh&$Dt=7Z$0X8Di6CHpx>Unl)3gBOL`n%5q=)*&VOSh{WSm*oz6u~5o TqWs+V00000NkvXXu0mjffF*IH literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_29.png b/solr/client/ruby/flare/public/images/pie_29.png new file mode 100644 index 0000000000000000000000000000000000000000..feb2839c0452de83e694941774cdd5e88c751687 GIT binary patch literal 785 zcmV+s1Md8ZP)Sq6hU2*1ev`R6sQsS=$rNw8_|Oyl%ohD5GsTQC9J0;2^~s|K$$F) zU|p!~Hkuo{UG(m(on8!~<=sEjzHf)a`Taheb1wJPu+88<&Az<`6pOy>6-gwK1hPU_ z$O=GuEIpPU1C(~9U1tA)6kzt4J!TJJPz|a<1z?30*5d7I zNVICdk5(PU7!m)2m6z#KkASA0XjlQ=twERIO2!{GB5KozjLs^ zBRgb=>;SNW6|86g)jG9KtpjL^rf6#SfYeEy)Cs70HLvD1fEjPboAK@*tf2o;W+*eH z0aSQ}S9bAe3;K(SsEAwRg0DEz73qp}0c@0wve7as=kXcyz(%%j_}oDCP(hfPiAMji zgLUWO!-2L%%9Kad8L_iC;8PZLL_t(Y$IX^8NE=ZY$Nx%R?=TLh zq}U_LVG$8>f^=|damiN1vAdI9x^!_(pi>cwSzE~BWVZ}L4>D9iK^m)qWQt9w=c!zB zw&u9Jyfj~z+JI4$XpQ}C_a5JUKfdqYyRV1-fd7=<`xa=0uj7akN+RENeRpE#m?<-5rU0=Z7Q{k3g2y<Xy2tZUG1(givF><-fQxTp6wm17_Mxo9TZq790(Z21fz8F4yIH^M1>D zP|nMFIS**d+OoC`usJrz=58)@KD}r=+K#q!wb);%Qyg;0A>spkfDaJaOZJkzL{vB` z92Jg;avQmg+y+shTBsJPM7CiYwn4|ywAF#rGn07*qo IM6N<$f{;mL5dZ)H literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_30.png b/solr/client/ruby/flare/public/images/pie_30.png new file mode 100644 index 0000000000000000000000000000000000000000..b32ee15866ae11370574160719986b52072f9fa6 GIT binary patch literal 799 zcmV+)1K|9LP)?!hTQXmw;Ue$*vSPwJkE$2%wi--n7tcKv2;Yb>S zkJ>HTZqojVyRGY4yS-FKYyWZeeLI|cf4>jsoXb58%8%ebP2;}?l#06IHxfu70qpU6 z{2mYNx_8~XZa^B6hNK}N>&yDGK0s|y8`K7X@iTtL59nv~Gx`}oBWq-h48RB@jHTN( zl)J@rg|5&QK#YkoF$Pec@|3T9MM!`INB}SkX2C1~WSUHq=^B8U^~`!^9f&efCdyQ? zMo z1SoM5C)IIl^ZHvQt7P3D=Y7Rnwk_M14NzQ)OL0|DnPMLIfUn;s0;d;ihw}XLawPJX z6;vx~MP0>@Pszm+;%x0O)oSORzAO9VJRlQff=saT!nxaPw*r@H2Wp)?vjg2lfO?PK zqxV!$QGtl}KsNqNdemM!(6o7R?CN+Cpr&8bujvPfj_8Q43}0K)_Sp_7^YTeUg>w(yhJ(jyU%B@9U1nhtz)3v8Z%3@)ua1n8 zQ8Eg!R@TZ|3m48Z-mwK$^)NQ%azEvPrnNS6$9VpH$@`#I& z#EvI%0Z`8D=n_ligH)@Xj=pn1hrH0_e*qXK29F;16@I?#>(;NNkU|QOgpV!nQb6p} z^EZ#jfXw~Z?XQjixyhZW_#n`Dt$Fd#WuWQ9Dc$)fV7z|z%kUD={IF+p;0Ayem~gms dyUKxe&UY9PEIy&k-zNY7002ovPDHLkV1h|ndS3tl literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_31.png b/solr/client/ruby/flare/public/images/pie_31.png new file mode 100644 index 0000000000000000000000000000000000000000..f5787d29c1ada5ba701c21352ea41bb847c46c27 GIT binary patch literal 811 zcmV+`1JwM9P)ih2DdG7c7@H}_ilcO#K|7mjnH6UFm-A_aj zMHEO1Ng*i!)1A|u)181+Ayr5ffZ|hpiVvVziltb9>1X&y zKtcL;Imd@z{FT$EHMT| zmP+QejwN*#u8SGR|z-(^4u(v#O zs2~guhQoi^K}w}mY6U+&_39V?)6L9bd7rm*JNg#K8Xz;w3^T*}3oD18e|$WUInYqK zHRAYftO1%mW{=sUqvEwhqf`gXmA)U&cF4JC#WLkU2eXcKKpUvSR# z?%tp~b6DP<4eF*@!2E3UMf*$&APP~4Lcls=9kGt2s7TO`D}Zv~!O`0|HsGyMD*feI8iT!4((n(9Wm5 z4;n+pkTC@C9^S)y0J?#0pc??CvDf0cq#u;mQ+!MifsLo`uOF}h?4z=~yDlufe%aTp zUx^`x81UT`IPv~65G#B0!RH60(=Vl`X8^TIDOH+)oN)fD{2;)b-QRWa3Bcai9j$r| ptgUpMEN=(Ef(4ttoqk}J^8=NzD9jMCg0TPq002ovPDHLkV1jMAazX$A literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_32.png b/solr/client/ruby/flare/public/images/pie_32.png new file mode 100644 index 0000000000000000000000000000000000000000..a38f4310da0e15a4858aac052123927b85625087 GIT binary patch literal 807 zcmV+?1K9kDP)=7%EB+NfAiHI^>;@8nL?96W^d`MYZvyC1dXydon0BU}X$K67K`|%*%rL{8 zzFk44TTG_N6qy2~uoRZU0O1Hnxa=oFO;8im1Yi}cf>i)0ALXNbIRN9c@!9w+utYD> zOY|1@C{1i$o7d(AjC000<6P?TP47W6LuSa#_Q8Hd92SSgVSsdy4$_ecFoY07fW%6y z#BLw#&qxl*Avpj<5J5x(s8|$>VgX2$L`gJ%K;k4$;slhWl2no!z*tYLC)SfcSW5r4 zSX-=311PgHE9Y@*Q~G;kkL+1rr#!`rO^Z#7O@P|1cB|c4RF0_E)&0PC`s|yAzwr9! zxbbmO++c!wRj=x|@Zn2}^HtNK++l^X||B{*znr++h_=XW!sSpvYHoo9)_QvgryK1*2dT z0G4NYmIn%h(V8#4z>hnxOP>d_2Nj;HJYrXX>XL!hXEngiN_X9XYnz{+@$c5}e8NNU zKDHieK@U;Ji?{woU_i^QGwNuJS>7equrp zwrG%2D23X^!?)i4F7xVU`C!#d>(UAD=DG-wnPR4xsS+F?$YQ=1AiqX__TMcZ&@gQ4 zbG%RiWP@yw4VKWH?`8kvJs9bROmDiIFA6GvdWYVjcK~z^T|?KXIGtUZ&4vIH9g{|$ zln3t%|2PcDZCC31TD60<)YyCV^}UZk?Oa#&8Os*UY8e<3hJ+yj za6vA}1%dU!`vJdW_dvS&2HVjdpn2d? WtTsCypxZM50000&V5RJ@@m_mB1ISe5`=%OIJ=%x>X@Fr+NfqGG)dC`P4=);@9n03+EMH>x5afMLI zU|q!+;XJK2S8ZqW_|9{>(Tt}1aJu^4J-p{U&yVMMU*5M&+yMV6<^NhB7m|YiSiuTb zfHl*aY0U)0C*zaxNq}qOnz$w)?M}PXZa|`tC?pDi@h~371IUNuL-HX&%1e1E4?qbe zl)1~5D zmYHQ{7cpSj2vPc%m;{J(k5^{tbAqPN4$tW4kM=*>q!T@Vv z4Xk1R#Quh@W9!&D08vB{%_7LFconY#NQ}fttaO6JNQ}e?@B%OJLKeZqSYj+ORywha z`qqRsVa+1Q88`!1!qH~bw{RBDvK42X;y`DhGtdc$Hqj>93dsB+U6bwrR*e^1+LFbK znoKh@q0lxRl!y{hvWtsvz4z7Ksfpsn4%F1E8iQMLh9EP?%rSHMClr0*@ufb1EWB_& zJYT#Z{q|^k!}~0Pa=Y9vx95?G6ZPUZfLyXY>pxPw*e0rhE`4wRR-ADcOU+WV)C^Dp zB~SuDk|asC*;DHWy?w4>KzPH}ex5I0to(arm;MGIpE}%7-<73Zg~0oQVuD_pR|`~re`G_ZDB+VKDY002ovPDHLkV1n6c BeF*>n literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_35.png b/solr/client/ruby/flare/public/images/pie_35.png new file mode 100644 index 0000000000000000000000000000000000000000..c138c2a6a961d4a951c9fbd8579ec70708cdac7f GIT binary patch literal 818 zcmV-21I_%2P)0NJFh<0gY8-)mQ3ljLKzGqybXNv|QFqiGbpw9J z&-j^b%qWG5n~R%^n*p^)?NNJ_`Axq;;vz2M+C8uhas6CB*AEaIu@PH3fB^&$0C)p$ z;0?P6_NVxAzML-yu!JQnB>@x*#X_+VAYl?F;p_ntBQX*qAWEVnN=X1Cj))`T$R3!Y zza?UcSdst=2Eia?akPs5X2C3&6S3kJdz-vX-X=h{%XZnmg~~5TmB;}x^R?Qumof)s z*|9N>`-_8=Ov%(X79VMUnrC0n9E@r(nrcrZVg(>G$xJen=@*o&@2^gD0P7#$)C^qC z98evtWRG~00BZU)eVV@1g=B4L`MU_jEe`qCbmm|i+E?2cX_-&Nie0R#R#mI20a~IZ zS^~%_StYB#y}(HeQaeD(?$6#l17L1v!15aPBc=0zW}=L*TukC1w9rBe=$tyI&Z#W4 zc4*-A<5obL3q1eUy>reTsDJS2$!!37=%G(MPq~X@C9#rN39#TB9eni=haRZ_7(9E+zAbQgIdGsP6w**~*5g_Mf;kzOk*t2-deX0W} ws5*D!f(_XFg>5}r2cQN$c%8c4)&aAupKnM@K-4d#8~^|S07*qoM6N<$f>nNUrvLx| literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_36.png b/solr/client/ruby/flare/public/images/pie_36.png new file mode 100644 index 0000000000000000000000000000000000000000..8784ea9bc3851a433bd7314d6fb341968340062b GIT binary patch literal 830 zcmV-E1Ht@>P)WENOq zR+trL7d=XWqDj%DXadA8u}kbqoZoQ|5+`vI=l+Rp;vRL6x<>(GA|_%=N6-TgJOHQS zRGe!6#Qua+b81cvzzZ+DNd)G$auWsY0qy4qKa$uMg=%`mJ%oSG?G`*tpmT2z^4I(3e5wdu#pS*T7mw*G#=J zdr_91m~gxQvVxRA2~;KrpI7J+YJh0_-7D9YvlpYzur0L%TjK;lW{R0&rqVAMd;6rf z-w(v|=G&YPvlkSrHN{6gNdzTUiB)1vU1+^x`l4+Nh@Bp59p1w|TlS@4xOH2-gtJ(r z6=_9UfR58~Iu4L936t>Fym0FC!B1TPu|0_OxYCJCkAlHNbLu%j^6K!MW?Nq3k6H#9 zWPscucgP*v#43E%KWYKq*3;0rhhm#lT5oj@o&cbP63S$P8_>;kGu;eSP;$TgDG)t- zz2w5Xy%$q_G=CVm1QdQ#-maQYoKJZlEUYZ5ExHO+E#Egie*@$@WCKde?u820)z#XY zK()v6V&)hiZ9DQ)cVp-0)4p!~<|{_<2nZZ^>E0Uw@AHnD)7}Ndk;2#&N9Y9UA!;CzYnGXBJyZ|~J%j|KdYNR9GBZUIBCIBjX#MLE<0|;@CN{Ra|zL-DL-ejo64S9>F=xVGazM( zoY>z8`9i*s4`3ein2#bTDJ7+(1jqteAPc)ENOVL;bbu6=!csVjps&@}>TBISv55W_ zpT%d1A}DG_t+w?s7&5DmoRNiq0q%rj`TCW*~U1>GF*uK=_5rJ>%WF z*eVC>%Ze-YQ3PdrnO>#`*c3K}O<5Q4(#O;cknZ*@D}Zp)RJo&R`^5JBIMi5h0Z@E7 zoT2NAB8W_6A`?&ztA00^F-0SJ{m8?j9S%U4Iv_5IpDu~oGDt-0;r0BE3rCYs<1 zYz14vRshOs5#9h08t)6d$=ZG~O=a7i*N=ddB5hXQ+sJs#|6uA^;a$UXAlLKs^al<| z_Gi0uYBw*$oadX1zXN&QFF$|E22!UAj~mLPB+x_|p;fe$(naO?ZKm)iN9Uk|G~ZD4|H0k U{ZR=~`~Uy|07*qoM6N<$g1Rbt$p8QV literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_38.png b/solr/client/ruby/flare/public/images/pie_38.png new file mode 100644 index 0000000000000000000000000000000000000000..d5022502b5a7ca629f4767cbeb448ed932d7d4c4 GIT binary patch literal 813 zcmV+|1JeA7P)G?hA2-@XMF^!A-k1&SA`p}^f*Kg5Zk2hl z7G4w~^I|(rwkf%@b@s|R-DE~%;euDoL`M(y(1y%4DF~kr9 zQf!J%vA~LJ#kJxB_RX3mz6oNtkpn(RO z+~H*TmY8^nmv{j#!bP|UKv=>Ow(yM5lk_A#3Frg*fIa}wU33@SwF$wbKj}~Ufe;g7 zLd*tglmkPHp~cVwC})+k%GvbwjCYXuh>!S+C$>g#EI1Y%14s|)AwBsBh7m>>;7pu} zGZjzlPdGDY=F9-1h$5OrP|ylmK?{&2vP70jCrB*BLM(tF3Zfup5sdf6`{I426HANl zj637bEP}j=H}NGbZCZRAZ{uz2e{?0 z%T7!LgMY~&B~cPpK;x4V5eN5n>%^#QNA5MWu8-3M84u%OJb8#c35Dk^fFQqIcG{VH zf4(ShR!`L}XAx8ltAS;RU5Uq8PGgr*x;+q7FJwuYvnalrmt3ry?93ai4ZumZG5 zi?j%k1WAwtAgaDtKRyCd{kLl_3~ap^WvyB`)(MmaDqpeZ0F-CQAau|{2k58u)B0&Z z9Q6m@y#P{?;)t!Wzy12t`=kfE`7V#eqF51#3P9fJoB zS5~F3=X?+Dy428i(hMB54ar~c0pxeJj=iHB7ozSQwH>zr7446e-fh6nE7gZ;W-{;3 z`?>Xtd5mHR_%`(^JTn2rg6>-*9YFHT2fTR+NRFr&P64u!-HW;fVEevx1uKstamiFM>)Wv_|z%5JW-Xn?f({DbSY+GQvHnP)iA;6$pz8to28k(o3ne zNCR09+hw$Ww#{{S=3OtYiF0n#x%$4HKll7ToO3VdisCE5f109y4agKK=O=vd!3RXy zC>v#gpebkynt-r2tPN{{*jQ{VHU>~KN=C^5#>!Y3DeEksn%EPtMx#My~JK(PaRMD4w7jyO{TXGmWb2hv^XsQ=_S3SHyfZEZnyzX$tgMI z_QBSMQ*kOz1;7h0yc+=d3ciA`0EnOXiNA0_q9$sh26%xNcwqxz$QUw)jD>@x)Hj3- zA;Sj1urjO+7w~9P>bHhl!>!47%2&LgThJ}&fOucLFW#5^!tq}}DXtyhi@Ppe__lQ@ z%Z`sbo$Kr%6{q4<4i}#wp8GJP${&on+wi3Eak8BPWNeI$v1MLp`SW|%vmzk6c&opo za_g`}B%}2MTF)x*+6hv#)GRdv0R#|OJ>U>-F2{hFVdRT33dDx2#iMQcgC$b_vUy%N zwbD4V4p6KSYs4CW7HEMM0Mfa5#U}-#ira6m`2bN=AB zX|KBIo2B5A9Qa<@cItp6|Dd(mD}5ldz6O|@{g&E8$^NugU2#ikS*Z`G65R<`b6&j7pX)q{tZ z($CNOck6F{;3-T%@Z#6dr^|r%i(;CAU+uIcy9orx!r?R4S?kLF(L_t(Y$IX_(OB+EHhkweByBH5^ z2(?R+B~*goT7-h9f;|YOLh8BDOFiY(Q=ut6DfCnVCH4=<#goVMwh#&mN&}`U*qb4t zuB)&~EM`e2qdUFS2D&zzm>T=tW`@W6cyD*+4e(#!KV|T?1-uZtj;Nr53eeP=T2lkg z(r4+jG+@uzGxiK{uAl4YIv~h`EXaT_>&yDGz{v8*^2jnEQjv-jfFFMNz0(bJ+G4iJ zHrXa%TBc=M0MnSpwC*cHHpm9q0Fec2f_MYeXqU;9QlrXN4{(H$b;ZQ za3Qz=Nc+;hwC_Iu>o>@D*bdvdIk8KOjFB-i086qYOLii7f)ktorfRCDdUIlbVosP7 z<^+H;$|$!HqyZYB0f1Fml~wyE$R^n&n*^vs9qP0ZtgX~mYAgK{bLCIe617AdL0h#| zyN|7P<)5?X?77z3^%oang;*g5@Dxw+RQCxQjm5>;+1^DpZGGJ^uBf2E1updP@JU1I z#Ci&l;vbbyZ-JpNuP%}?K)CpSePi>QtvXa5#QAemH&uf*yqG zp?ateh*RQ}ICVvf#c6R`oCe4+879O16C+^~CSgF7MOlDoRB_T~pW8 zHGsrPoW!p$bUD4KyXvmG`**S5@NTK{Dz9=*N9iaX<=k4eR;^XeOUI?-(lO@;TL)VQ zTb!4grDmzgxusjWrE^ZhG)%*-_)h6~9=t7n0$=vB-TkVeH~;_u07*qoM6N<$g03@Z A;{X5v literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_40.png b/solr/client/ruby/flare/public/images/pie_40.png new file mode 100644 index 0000000000000000000000000000000000000000..e8d5feb73aaa9349bf319814e62614834737a26d GIT binary patch literal 820 zcmV-41Izr0P) zF`CQRZUg7U?Kbbu+htzH=iKxAaDM0Ty9J3A@Sh_1*8-!)Z7T5VOQAF$+M+C>bRKXe(`{t$?Im(k^KSM7)R>@c`tIL!Lcd zL9Qhxc48-XfSqIK*g1eOgdxo4PeeE-921TKMV=y0kp~d!ggT*a8-fu>#1U}-KH5k7 z=q=PJ3nfh@O(ji$*eQ03ovHI_?;x2ZlVo!D#FBW;UbEK>5IxZoeJ+B2_~8dwC97nW zyC=3DR>i7V6@UN&2xJiC6}*C103=9)B$z)zQcbE!HNeODI3Ld-7&eBDVPpQpQu4Qj z+rn)b1UV(AAyw%V}5RlvnI-bT_&ifrKGpNEmY8IML(%G4l)HFW=MMxUzjw zh8Z38dN(*hDnTWvTpC{tFYz2$9v=ytr*}?_>OE8c;D}YPE_10V!6N4{u$FGSm*99snv7orVw1!2adq zPu14+>vR6y`pX2GaSjMIyL=N(K;WZg=Cuomd>@y*{|&^YY}afnAn+fymAQa@7wVSk y&ja$%rKzh%KpHys_{4nxGRPpyo^JDkdFBs<07V-M{Zoqo0000%r(Fa)=WcC3OLPR$~By^F5b=4%apoQs8MTXQ>3?f>gITAwI5cR?w zi?UdY+D_8G%$?@hyS8pDmrJ*$vwnB)`@H8oKi=p4%1cav|CA;FS|A-%)?Wk=Kmdqx zQ7*~>6CD#B6CHq{7xaQ2h*@LSm=&O;l$4SJOb^q;^Z;xl+sHNov`CAz2tWoIWa-o8 zWm;llCw5{7cpvZMeE{JIN4V@~MA9eelk@=!x5BM(0}`XeC^61MFftMuiHrbV#>;q_ zzo=0flr_p4Wet$;%y;HHQ{%I)K{8B+$#Cw(e&VrsEFKF$OvFS?nFx+y3}XPV=GDA9 zcVaW)HN1w`0Pw>P{~Us%T$GD)fCNd91Q$<`Xo;3+0WmJd#rPb8;kIyFxNY&oQt~&1 z8^R592nuRJEi7VbQ}R~|l|tooJ7pKUs$JEtY9P^?Xic+$?y2B`#Am4dQhVkA#>Utf8xTSWq2EI> z5Fmc{?@ZI4Y z9nvnWR5Hu^4X=0K0&sd3L0U)aXdM8*eeJU*V6y*CS@ZZ*(2zSZ%2`Hj-32UdUe~Ro zr~6Y6>S7e57*MoqFRgl$JE57Zw%0qZ*Z`=Yf@-=y?Ohyn43_rZ2fRz_m9-ahAh!2* zSLXmw^l?3-Q>Ml~a0FXioY5H{83obH00DziEsY~js@&Et;07*qoM6N<$g7o`x A2mk;8 literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_42.png b/solr/client/ruby/flare/public/images/pie_42.png new file mode 100644 index 0000000000000000000000000000000000000000..8e9de1fb4bb311fb37d4f308b673021f900cb88d GIT binary patch literal 837 zcmV-L1G@Z)P)ip_KxUIr1x_EP##Fj&N<29~&l zNs;M9w##aB;SpzSjbYV7C0mJmxVE zg!m92;sKw|r}OE6fF_^`Xn?RWYz!L#CX2~pvH;G+nK%<5?UHs$y8wwO5hWr38Dx+p z4wsf}ipdBWAtQj`7TkgxAUxp-zx5MgEv$vL0QvTOd%hiDJK0XQa|eRKiQq(V0&sFp z&dF^fM+uPE$?N2GK!z>DmSKxs-?Rpal~{>2bz&6*QJleS^&zsE-#|Dj>p$ugxKtMp^Y(Tr`8m6oO8GEAd4g(Rrv&#Mt#F+82V1*vQ za*vG>!I55w)wyaVB*_t0J^4w_*s$991bx+30+1qs~Jf@_?fzE%Txw^3LioQSZu+YuLR-s#EjvDCzrHhref$@Ws&d7TfTSQ7HXX_7@ zw^C}mT=V+aO-d)-^{wSDN@r>w2yF$F`kU;QDGjC6OTE;ae7L0VgL}mv8(Or=G`a)) P00000NkvXXu0mjf)i8rj literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_43.png b/solr/client/ruby/flare/public/images/pie_43.png new file mode 100644 index 0000000000000000000000000000000000000000..5e434d85f9fd52c8f5e60e82624d2102811246e1 GIT binary patch literal 831 zcmV-F1Hk-=P)XXU3b;8 z5!l?MLX6#lJ~X1J5Pt>Yk07HU3L*;XpOg?1ljsvG6rvBRQIH84m1u@!#7v7IGE1T# znMm_yI2E?rx;t~XKUUOi&T6aY-#LeK-iPy^_vK76#^66?>c1A42x-zgh8SW9!b(^P zD-iGmJOK}A1zLet0FjnROQZ!Pt7Mg|Ah*h`aw}LWEtQr^NY~T#bREbflgx?3O-VGx zqC<3u4p0ZxL3I#>A{3!a{zgn~rZ!U>Wc6nCX7z%}ZE~C3e;^p@3U!6LpkMBn`{m!r z(FE8_>?QUR$mq`K&ghO^|K%GLouX58CQs}uz6M`|uK`4rs1j9)2o5m70H{vYsXCJ< z_A_dpny2Oge*F032!`9Km2suqIfOI%Qj>UwhR{#p>Zp4BJ(z-7|@#tlmEmrq|C z0mgc4jG){ux6AD?!U!V(2R?a%n_%?W!`%(LK-{^}d(I8O>c1iMGt;&%@vf``KK??` zGHTAvsDQ!x3&z=55INZFy{-b_#hnbLADP*d?^ttkY*08(k)%JKRw@sHapu+1r}L68 z^c87A{+o3lU`CeGvh3XV`PfFihx9X4cuoWo7gr{F&&^#w9EYB>@EQd5LO-s{&9CFBspDrcQ z7K={NDLO$JR)&>f5VDYkJokziJB%I14zLbb2do2N^cj6d-#i4zd&hgndtt~tWF9jA zL672KE3uW>N+7*2y)V5lIzHuSN>M2)6A=s$AOMO(aVU<3 z6Z;({U&&YUfgnMGF$6V>X3;DlM#ZQYO`f1|372qz7SSSFB!*z9I#eC1PM%m){<2V6 zs4Rw{>QEhO5?dRU-=lg|&-6Iz7WaGmz5QO$tMn?pDuEqOss6E2m^|2hq^%1?L9P8# z%KSxTxxL-*pW&f4=my=OgFqmD4Sd!g=%t_^9r*C-IfUKKH|sNC^8C}h_7MQ=`~@+C zwqeB%Z;T2CrJxj);PaXfnSxCCdxlGrHSW5(emA6=t!>U7$rCf~bnh*^ z2U#Zz&TeR!o{xU09k|&F&f1Ij>n~8g#Tu~dIp!iSqFo~rP{$di0lp&`1j0RDAi)ym}i3J+LbN0 z7eR(&=|0=b+1Dp5_>)2IQU_mh`|6+d!05_{iyKbC*N#WGAD)9q&d1{SJ7JNWTd~{) zrozJ0-p62RDtdLG4oq)tPn^wwgoG47-P{F}%y;giLj`lcBjo@9002ovPDHLkV1k8K Bf^`4@ literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_45.png b/solr/client/ruby/flare/public/images/pie_45.png new file mode 100644 index 0000000000000000000000000000000000000000..bac04603b315b3310e68e9de6434599327666d11 GIT binary patch literal 825 zcmV-91IGM`P)89-UrXsqLf?mAnMsK1}Dy^u9kSIcksH{jVDO?$1nMu}- zm=oqhGc*~eE2|dC=SK( z_h3IqDOQS=Vjw_(z!X5us#!HFh>!@0Q09Q5M3jgU&|+Fli%kIxH-($RO__tm)vpQH zglnb%st(nmW^iib>Q|^0YQ;o5ZWRx?hFn7~&>Qtey>Y51QLINmf9pHpZHCy!9_zzu z5WUyWURaSnRF*qCz1~SSYDt%LNe6*I00i_}T?YM;ve@$gJ}>LO+i?t{`K`+@^#b#` z20$;+l0HnD;apmF;fMCm19DvNFTdLXk&AcSSFb>HYn!JfA6Scrs`LSgl6!OU$`yNn zYG7m%U~aYaXxd)zzid0%@(+#>GarI?S`*DaBD8Z4Fse`glTd&&kV4iX= zbo&SxZ_Zw5TleenNdv#f#|ygQL*P~C^T!Z0-8bJ@4xh@NHFoZU*hl5w$O*`LHfv5n z6PT;llvSOE{5Pw&t$z)<-S(=YQow=*OX6~=12dd2eo9iyMUNZU00000NkvXXu0mjf DhP8;} literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_46.png b/solr/client/ruby/flare/public/images/pie_46.png new file mode 100644 index 0000000000000000000000000000000000000000..e3ec9d3b63c9b81d5af87363d3c1e425033f1c90 GIT binary patch literal 827 zcmV-B1H}A^P)%4W|I#6kjnm|gB5~KvsH|v}A%@AviwZ>W@N{v#ZR3MQ=5<{np zSZs?$n`jelAP>vK@-PUKFbUJrE23-Fwdz_SWgulBWdL-Qx=LN;UkHvn$DQL&7}5{v zhxC8YqY$JOrWK|YLUd1bPjpY<`hqhk9KsG*(b;5LQ z^9_3=Xg4R`jeP+$paZb-VmT@(8Fw7YcmERqzQNy8-)qZ+iEGcj4^IGRXP52*aC12{ z7AEGycHf>nVB3#4U)$dPVc|qg7r>+h!L|;$tTmLOc1!vG7HI{6N zfYftW?GKh^{O2eAyiIy$4ipP1IK!IvyOZ*BV3Xng!8f}g(!61v@yMSGMIJD8ZO#E> zx82fdgVpb{Uz?vTynoThtyuO^1>prYr5@#xp=Fd$5E~w{*cU^BrM=Ma&XBsoeko002ovPDHLk FV1oJ~chdj> literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_47.png b/solr/client/ruby/flare/public/images/pie_47.png new file mode 100644 index 0000000000000000000000000000000000000000..6acf6f4fc85b0de9b3547c3df832386f3a28f6c3 GIT binary patch literal 813 zcmV+|1JeA7P)B1rj^e!T#pn^`21%ad2*hd2l(*eivm=$)GRd%gkSiDKXE`|6;@#dRZ}%pivkQb1)G9RiGxMduMSoR ztD^uFi(*j{*xHEtcEzsPC)W{AvAfJ&<}QP9W4JNgIOXIQ4p0ize{K$5fN*H^xcnBh zBOcou8)&^Q?L|F&-P!f*N&noTlGN7b_0I63PBOcbdk0{ms=wHI48GoX?&>TCZLn9@ ztpjQ}%&FgwJO#i!KR2i^DXz5JfWY^4B(TPn-Sq-Q@Q(HJN??#7T=5l|OO{Y@ed{s+ zj=~HxnEBYgaqo@9fu<+jv3A5JPp%_?={xhc6y!tlD%0ZJmx+Vvt4luGJ&>LwIW}F2 z0%SQGDS{Pe&K|n*K5;=ga$75ABRCscUQdy&Hb*=-4j1((a`GG>wJ*A{@jDBR!B82E6t8F zN-+a}rJD+hAmHd5dUX?gJ01?*+6AGcPNSm~!q+}+^H)Lghiq@o9!OhLrd4*q(u&gO r+w;LNW-iFJ0R`M9hK|G!OmMydFvm!!kJ7@900000NkvXXu0mjfjx~2J literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_48.png b/solr/client/ruby/flare/public/images/pie_48.png new file mode 100644 index 0000000000000000000000000000000000000000..f2d846d3ff5e07938976b6d595e60caf6344aff3 GIT binary patch literal 798 zcmV+(1L6FMP)AL_t(Y$IX^sNK;W9$G@&!9J^MV zX|P7aKjCthqF9RHpM?1m=nUy0NUa`{NC;9#9p^w1;R~^Yo8i{@a~z;_mvwLWtj1CmR&>ej zID*^Unzn;dK=dN_=B|Qde!VfMjO$%T z3-gCBS%ZK?x}!dKF-`@Skv6OD7d8OO#=dWl+0=n+a07*qoM6N<$g3EPvf&c&j literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_49.png b/solr/client/ruby/flare/public/images/pie_49.png new file mode 100644 index 0000000000000000000000000000000000000000..7c529cb2fb79455875fc7c2066b3dacd6b3777f8 GIT binary patch literal 789 zcmV+w1M2*VP)xP>+ zuOr-MH$>UOOC!u_OH5&fCZ%LV5cc4c4?P9J?4eK+Em8uZhagxFg%U+%Am3EO4OZQ|W!T#xSdq6tkmHN5tJJDhX)ew^?*s;0=$!V@=ido3Rb}itPQUXuMGnVtwO8N0&17qrFH?D4o!!q15k=eQ7HnTL4!_R zE^W6hCX-~6Oaj~zx5O<0ge5Ft_kJSOI5kd<1A4FCtM>v_JJn9L{{t}Qj=5uQz_0eJ z{px-6C`F77#s*^ppz^3ZDo^74j(3nulW8)2aInqz27QCRL4fp-9@4WLU;qIG0FL1p zjyX8k-{H)hnKJ_jA%sv8pr{jdq7EPt5+M=!fTVTgC*2= zL>*B_5}?2cj38rc6YARqyI|iMC%oc?Q$qu9qJUH^Ay5)y$ChJn!`q}=iaRM;UaQ`C!!D(>3R+f$V1UPfKPJAS&<*69B_^Rz6#n4|r_8?MyEZpa2CZ zNCw@WtHL*|e<1XR?DT2TR~+v#??0VydZr6kZ9>5Ozn02g&p7X{FbZgktHb2Qpww!jwH z0-zhZp&I~Gn8K9iD`K7mbRsB=@LD%!8_(1 z^Ns=Hnz$yeRi9sY2U(h>S^E0KPSFxtLQ4Q_noYCmMg(`Ui(Nn`I?>7XiT#T{pbzK+ z0CLD7S4WU~sE2w0R$v8IXrCY(WP@xFpcb{LRY$NGEk=vc_K8*HpD0cgC+Y~A#3W`L zTU(VsXa>#T`FGV{yb;<6ZG-?m!)N$R^9f3&@$u2o)E;1qkn6eR)d&#~WAl_K1D_t?xbG?mekf!JIqi&bfj8)P8C|)p{k);Lwo; zxUUfU69%l$ztg`S02L|m{A&lmA06Du{{*Dr(Bs#ifWX1~$cGm-1gT0@ssdYmTYX!7 zS14%5@IgHP6u3Lp6ZA|0QrQ3S{>xgE5@D;JBM!JDPP3%tNN?V??@i*sYv zm^EfO&+cS*vOAowFRm}HFLItOXUo|#=Z0#ihRQkhQ$O{e$2UsD``}IS7v;Xu1E~cr Q3;+NC07*qoM6N<$f_2wqp#T5? literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_50.png b/solr/client/ruby/flare/public/images/pie_50.png new file mode 100644 index 0000000000000000000000000000000000000000..b493ab679ce81d81e4e77889a92ba942900acbb5 GIT binary patch literal 715 zcmV;+0yO=JP)rXi`xe$G@hcsD+qC zD%=?i2i`qwhI?rnTgLV>d=PpYQ9;3a+>20)9Q3foYT!!`5u+6pz2#E~QYaWyFd4fn zB+!lYZln&jdneapo?aTlY}Y?E?eli-;m7a8_jk^@3i=!Tr<~}tKs{7VR}@e{0VuI0 zw!{K^OM6RuOMvJW-J%;Phs)t|7*H8iMwJoJNAwYW1Tf4RW(~7|M$$+c34jSEnCiDv zG&*7uCvg%7_#B_(a{yrpOW5WUQODFVbquhiEon;{PzTfjb>J@qi>t-e;wq5UXZ2Zq z3p1*N&1>`8ynvR|a$2r-|Hv~)5+p$q$0v3`CX@+fLI7DH3uK`Y!EJ0~8{iq9;hEzT zI}G2$_wYRc^2j4EBPf|Avt$Oy4%s0)?Gq$U;v`N$sz?>7A|ogSg`f~@pIA+Qzu*`A zGJ+x_GGZG?ThreodPGlkU2}^$Zg|*c>zv>LEE|nxGJi-=d9CY`L+G5C@z2XBQl2 z0IPfM#j?MV7Oi3FntQZ-y;iJlf`{4Hq_&bDfz7k`Zr@A-Ki_TUzTIhFNIAh+x*r0J z=0WJG2he@B7aiwh3TU%z(FFZW>8^+Sw9-#$N?aV6xlif3Z}QnlFQsd1Lqk?8rR!HF xpO5rWnxc!NGxL=0PH<~07pwJ+(r|*E;upw$etIDFRC53T002ovPDHLkV1giqL4^PS literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_51.png b/solr/client/ruby/flare/public/images/pie_51.png new file mode 100644 index 0000000000000000000000000000000000000000..3e709186f542c468f43d891b1b4844f648f41a98 GIT binary patch literal 772 zcmV+f1N;1mP)_Fx=GhM&Cm*;u;fB65Ohxd)}FNgm$k>LgeOZ1xGm}Z)3@S445 zuNmgs=G*4mz@6{Tcjv=m&tlJF4@AdC$41A3+M>3oEug8_)NAS?MvjqVWFVe+;)9oq z2zA7wU-XN9kQ|aja)2-kvoNo{BZ@vnpP~7ot#}14eySOIE-`1v%>n zx{KY#?&6=%D{k@dv$EsaZ^6G~cEou&e4R9#dwcEnKXfoswRJ>a9X?o8TBbJ7_KWBD z4XToCW8rxCfXdey`*Yt1049`|>^zzhKA^U?)KGaL08mq%K9m7OHK!gjG=ayGHl za1k_5w&!L(3;?8%$Gn1+-Pa!658-3}uaer;)ZP9ZNG2IWAmDOTr*wmcyP$7vD(bkr zaj>MC>!%;x0@c-|m&Wdu>mmQmEDVo(NAJLx?R>(qd*Dlv@;=+v4vL&ev=~*8>S(TQ zzX_^J-4(-u)%S-C{E<)GpaJIhSx*jhf%A5E`O~l9iG5RKsRDnAqvy*Rh-gjlY+C|V zM?qRq2gDyaSWwXkTk>`(_vQfUoZwvWa%%@}a=ro4izc^kEWTv`0000W(gu{F3}x8k7Q2dWw4SL260^Se_dxzulH;bQJ)XTT ziHlJ&i}`~w^(JTIr`>iEP|XmmL_7uP( zeRWff6kzI%h4n+ZgGuVxYvmPy@^xkF$qy-jU|~Wnplf_2JiD7ap#G-SGCBZ23oW#% zfIBJp%zo5w0NVRQi{qh#gB^?YH;ws#f?>8wZ%*z{`yVWRUmI-t0UW0<#-@4z@rLGv z?#cc^lIyyzDj!g>`D$ve1t@JW51fss7(lblSK2|(Q5sxda?QIbU9&k;sN`rGtHS=mpsgw55JDHcu_&=Dh`~-(|eKC?q1)b?Vy^}Rhj&%^WK`906W!=o#~e;R(S0jVO$ct#jugn_s& zu8Zq{s5NShT7j4$W{4Spgd^cdH~@i6AQQ*{iBsZ~I00#|v{%{-h&T}^;sD4YhdlLi zymU)UCddSt0N7b}mYoHNj_8Ok^Bob63&(}yK!vZuSK$MMW}#VV{s-WqYtgmn0s@kN zBp}&9jZ#EqR2fx9fN?W!#=W-xuV;`RG9U%RrpY*2#3?hgiz-m|x ztJyr*>ag`}JzEbTgb+eWfLsYz!j%9dLLwxRJs{B%EzttpGPlevCjmw@lN~Qw0Lp}3 z2KxX^Cc=ct9(2L9U|KN!Zm)S}=5F3UY*_-9=04t=83ND>!1JOSv{cO(1EYUTe$-=BCV;pH3?8w z*vjlL&K*!u-r3R}PXgR>W#`qFVL)(H8*cubJy_l;&5pVOK>Doigw~$~2qlzI0#$_< zl-HZG2dozNc?VAdP(uxMGT=%IUe`Fc&j9MNJLm0RHV>vcd&6e)0Ah#AS^aHof6D)0 z(dkB`o(I%aK5QP<0Q?7~=PC;}4$6C4+oT-9k}{<`n6Ty_6`r1PSpEWQ4n9by(CP>k{6IbqQu3jM$}+pt7wlhJ+xy1XC|+%P4Tp zW;p{VHQy%NB&LqnXT5gG4CgjiZ1%l9@B2RQ^WpbbehK%9@h zb?X43XKHqW0?_bX92y3|;}#6*gRUA^jjP7(am*q4*xPQ=H3LH9@-yG3_%EW4HI%nW z08wAfd_xN$vS><};{dGkO9zVph;S5DsRQXrdXk<5)CRRdZP*1kThKJE83H!PqMhV= z`YM$yEIT&l^X<~&tieMsmHGPzquSW^nkU=i7@+j+34K%1{sAS7Hn-ZJ0H|m=HE}kB zr*~%x7EZr7_s8cg26#YVUDYN)BrkqfdM0zQY)jb;GXuy+DlA%N`+D48-MVz4!&3n) zyua*ejHDk}-r8UPS`J9Rl=Nuk5>#+2IgzryBcFldIkU3AH+?YGhX>Px*J5EwE4VZ3 zthaCeDX@6s=E;i;An5#8zg|ckl>LJ#uj~d&19u)i9s;CI%A68g{O9)!{GCM%;yDo3 zx$CA(K+y2&-hdm35Qk^j2ncsWmCGG~=*?k^st%Cm*9|q>fr8G~fp!47{o48}4*(6$ h<4W@7QU_)@KLLj2JAls}BftOv002ovPDHLkV1i#gZKwbM literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_55.png b/solr/client/ruby/flare/public/images/pie_55.png new file mode 100644 index 0000000000000000000000000000000000000000..6048cfb1e3474db7bb1afa21217f80ea22603be8 GIT binary patch literal 807 zcmV+?1K9kDP)k?_T{W9kwwzYmfx^=09tW7Po&+W_eJUlPH&-1_&6aF6l)5I<`AX+ptUg5=y7sli< zc}xaht*_Qs3jtfe7O=s1>v-#UD`<=wqs9n&hu)!gfT7Y*X{dyFHC~NZ0TU)n(YK44 z?h%VF(IvV-@hBd}140(EkZ0ZzZKt+V+X*RyDT674psmnWXe<5#7<2}mK_|HNZoOMS zi!+K6OOd6>QUnS83H=HEQ^)^!2SuOg6Mb_B`w>@@tI5>_!Y=H>J{{mN!wiFBRji72 z?qI)0$x^bEEMSyTMk4?NFWzb2ya%z`b9s2v`~ig|Bq4zsQbTGe0&pa`{^He_@U^Y% zUa=q4ma=`tr(oi4TU_G_2wOg7kK_Yk6o&pi)L-r|_m@Wi27}%Gjt`S{c` z&4U*@Kz)5|ZOIo<4;(S%U4eoZOv)B zA^dEjYP{#)cWAni<&F-Q>o*;YIh1HjYF;>)rgV+jc4o4k0$luPQ__z7g##w!O4qY% zA^=VGg-`ZgS~#G&E>BmoE&_0=PS$LA0I@^KcT*qEA1uzfa-TE=hR2y7q*D=qWU`;b zkXBRWtW)L>Xw5n8z8nHlNhLKB@OyMk>Dx26!}7QGUALak9n2zEH#A%W-Le&DGZsx9 zkNO`>EZBH_s|nJj>y_;%AXZ%-lU6-@kY=-#m%RrvdT+1qt_CCP%p3FmJb&8Xt)F?t z8Op#{GSKr>hSA#lb@inX{MvoqVTX|I<0^Lr#GW*Xq+VE@vE{~23z%;eiJGyKLfAB^3kzM;g&-LcQmIi=K_8SKSb|H6G!23Zp^F721%Z%3AA{XY88cGH z+DxRHjpk99j@;(j&fMEgA~@ZLx$L=n&*44qkLQ05he!Sd|7l{@8xSh|hnVNGRgV@x11@dERzNSZfei%mW=^1!^t{H_rHV1#$>(D zQ3IkcS#rj{czwlR-P}fVlfX;`|7loD4TuzW)L*#Zf(!7I ze$r0@ZmnDE)&gFcSLT%g0bM{B&;e{7o5$t>L@&`x^a9D6WKFULNTd>}L<#^OeDEW0 zmoVQJ(+1i=8vwh*?yx%mnxsjZjJ_hAK29H}4-i-c7J&ues5xqm`X7Kklh5Qc0XD)$ z*oYW<6d}SAVTrH=;F`H+t~qpm&NE0GX(MfnAMAImUDhsZ7eLq3^>qDwfFIF;R=`$e z>VGebA8dB)61&7M0pNra&M-jtk&$%cHNbPP`iUYPpbs}FRJ8!3wKjgf89yK`rKPkK zpn_D83WfnXH9f;^C#T0hTZdBL=m4tx;y^_w;J>T6Q&|Tvdp@O)oCXj;4<0WZ)T8pK zJgP82->5;W3)9-*am%|8&j3nSd*I9{Kn+#zP`m|#CUeK27=V+x#q5q9=#_irUitJm zn9+~WI(W?X?Gz?5&&gIm7>tjtVxwq)^O?9F*&TdIJS zlj0t-CJYcVG@%<aMh(0DR>Gc>Mxkl+NI|A4q6lJ&`d6BsCW_ z6&V5k(ft!jC6JPqe`CjnShR+Uu+*rYb8O$xZ$Ty3s4fX<`y=sbYk&TeP70~{qs$x#AKl1Va20DSPl z4?Y|%)G4O4l$O#0Mw8KGGyxPr5fl-AMHm`}hM@rjqk>VvD8NuK6b!{b03AAq&Y=S) z*c0pt_9EseNRnhpvLqS6(zEm|y?_6L{~$F$4NwD-gZW};F|-(30IHg*rm8~$eufIy zfp0e|l8O|7w|`PLz9w=&UyNd-*eC{Ih8gAnz?run(%-QG*V)0so?d|5bUyz0Mu19D znXg<0aA@ZJbj#8MDG4Q^Bmn6lJ)|c9&?0(O++6Gb0{0j7xz=W2uBoE7pc-(CTFW2Q z0%#zObHz&sot4kZXXOEaj?`{VXLbl*tb1&`^8}bHJagfo5pZ+0mtNcjFy^`DW-K0P z%d_R#^5)xqt9VGXe_QGaz}w{HlATLenQ%i8YPG@emkQF#<9;MmtsIPAwIx4m=X~1_ z7+onH%qd+tAgey(=#IJoK>pAXQ;BKifc%l%?tDW4AlFOuWnKiL+yZXO>E(mbOC|R0 zlfY_@C_5u@{`!Jjo&D5U!bm#sz4D}FpFQ$GVb;B=jxHd^D*Px;4^Tm0V(K@T(uRR` z%$oSdj>y51CvQD%E%S#31^N<~CT+?V1H!WV54-e0)TgyY>uVMdqStLr+qDM}MmLna zSp~%A60w=eh3^mfxb-LFc#cPab+`U-|4qQ0{H&+(CE$?v_PwqIyqBlHPVEI~y@|I4 zb|89K`ZK2t;HwX$7W4z$W1IV=0suVlzzaTH_`qe(FFxHl`?}x;IsgCw07*qoM6N<$ Ef+s0_YybcN literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_59.png b/solr/client/ruby/flare/public/images/pie_59.png new file mode 100644 index 0000000000000000000000000000000000000000..0f142ff043d61625c665a0cc77511980ba81da8e GIT binary patch literal 824 zcmV-81IPS{P)BH~}x= zCAa(p;o9BYJu3A*qYcHAexG%qA37kh#?NW z9Vgr)ruDR*)&n-X&2F;;G(i(I5qU*;BfJsb2p};_%n~!eQ}I+h)jt3|29LpG0IWi* z&?;QO8HGqvNm5Bt2@qq9F~%5!`{%ubRPDENXTj3JW@FJ~N+yV_mN0`w?H9Yc3QFBBNErA<8ss0`)ki7~*R)!Y4G62Kc@s&DVY zfuw>|kczo>&?`3O6pp9K0H(uxdglA$RVLC9#PG1i@|Oy7o+ae0RjwS2+mg%Alg_n+ zfc!T3(fp??2jmZKHp_Rh0LA*kBZv7b2aIpZx9q>q0$kOa6Ob1IQC$g6$(iMYaaGxq z@|{3jR>u46*DOG!<2X(NDaMAbj?(1=N-td;toH(tLJGD!I2%FtdLlgoST~{G`be{c zu=v}z`YHh7wzTw3J;D89|7J*La(KKIn3}kZelxjy17QS_cSL z?|4yA2Z)RJTt9Xfh00001gLzY5ETS{9qSBE&bl#)d#4H=33J%r5t56!yl}6JHj-1582|LMJ>L8A`@G-p?!9NBN8mrD0x@99)OSWF+K*^0=9rH09bXaZq)(UVTZkOI!m)H zCaYwXtOAOvsEP^@iAY3hy&~)~yUZ>FLRQEMS%95pr`hQ?1P?b3Hx4&|Eq;sN;xEvn z1~`3ApVJ3$DK5pO%=2e{gJhkoll7|;J4RlV7v)8O%#ax}(~RH_cCZ5|q9Q8d)rr-k zcodJ~0kDTX?42X1J9LNc07#zXN&fl-$q*SLLx67RhHjiASez@)73Z!`%#?qsI8~fF zM^FMEBs0uTkl5PQUWaR#fbYk zz~1vG-$np#cK-IaK7eJOeR?$ySRUP~T7Bm5t{)>2@9c?!)i1DxZDCs&IKTl8+VC{- zx+};>fKH5-#{qPH9lbmL8K58jJk7iVsxQ7f!Y={){fFtt-L)|DO>%S&ZW^1$rU8%= zDUs4;nw}2pVqL5Y5O>90aTmY<1~72iLEVpj^aDa%hzs$n7du8s2niuk6Fa+tb(9Co zgXKX$j>$1OcKKpXx6|!(1DwGboME1C_#Wi4T$alMVpt4|VSx3se%60+q2t$!m=u#@ z@@%nx&{oOQJk3+8_v*cRFQw{&x}Yvln%mB8=e8+Lucg=0Yn0|Hxk|1=sTx(IYLrsl lrMq-jExuWreh=Ohe*x?GyU}3|+j;;1002ovPDHLkV1l8MZV+WOWl^?RyfzXyX)FZW@P*0TCVTgJ?Gr>`|vyWa&OYYkKjKIE75>RVXycO4?OSy z0Wv@aNWk0S?eKO0KDkfslLJ9R&=52LY$03776OEkFcL<9ui8h}f2DJ}&-1QA4$ z%Oyp7#PlFNNDl(8S=X#<7NAL*q{)?cgfqYy;0yqn_Dp-G9pE%^nmA2=0rZdhNByIK zlW-DFB8D@H5V2CM6e|Iqg=gVemd}TL27MpKYi$$2wCp9jcRc>E#W?g1y+aSsT3SnM zqX7=#0JZ^>g!<~KYe2wq{cJ-65aj7M>V^S0m?`Fb{Gl$1OX89MaKjCE7@)V#nqpQ1 z%%t;-y?1Fm`1#?^yBC2#rF!I86|j)*nfpk^4@gUCDJ=ykhGHlt4A5=r)pg$ty%FNwxI{lu0+3KK-IROehttYbR{C!ilysiZ?VNvQ6Uz`uOqRBB>Vdvjz4se^)};ZPD|oo?5-`QvR^&FFGvoS z7FHcj95DG^(K-247@%m^-qQ!wi36rpSA43e2?I!2fvnQz zo=$BvC1OjCtiAY2!E#tc;9}C|$^?o!An!#-qmc)&=&bd*Ph$tMTV*{v9s_ynbgj?p zfz)q>w~LLT*GK)k^)qJFqZ07eTb*Vl;2t(ibXx)c*bu+(3&03m_a++w)=Y*e#|orW zm3^(=4T!w^Y)9Gvfp^ob&6fcPA%rk;xs?M~Io|m?tQg|-_0000 literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_61.png b/solr/client/ruby/flare/public/images/pie_61.png new file mode 100644 index 0000000000000000000000000000000000000000..d4fffe1fe4c9c1f58b8122d52d108eaaee8ec7bd GIT binary patch literal 827 zcmV-B1H}A^P)y-!Lq_4V=*yD7G*)n zMv-j_&Wq?~I=6Xu=T8452Fp3;a((~1_i%pa!|$BSJt5?G@Si3$*nq{tS@H}EEU*Ah z%1JpXz^byUtSZ2kW6QDS04}vl?NS4rC{7e73J_=n8i59gP(&yq6abImF+2u93}O(o z_;Mk>ZZX@-_OiVIJx-6);{Z#s6iY3=BU~+4%hdwHLE)fq5a3pGtGU(706H4%7wrYW zNC|y}R{600|(Am|^w; zoO;=>?`sF#vazT1X3s*5x~Alm4wx0aJ)!Ol9FUc;5>^5*ZpO{Hy#Ot7uiiZw_MxDm z$yIX|aJ9EZG$sHrlNyq-eCV`%T0SlJ0(6vq$ap>DdqG!e=YiJsfHVC1l{1w98fI?U zyZi^*ifzTV;`wKfuUMb8D`it1Kw6mS8Kpl<1SS;K(_=FIrGp_yqj<3u!Gm!+(iQoC z95D}I*qvOnv@LkRFecTSd)W&x_S@d(@*BYeMq9TJ@BZoq82Lpql713cl_`FhSif>G z&YLu8-a#O;a*HhW+Wh+ke|7fE!L08>zr`xU+@dEe1lL$6XN1{0DL5DaUiy z0tp7yC5;eRJ-U@jEAlddEHy^bN}33n?CKZwfe_Ygd2ZuIW^j-5o6blvGW$>olYpJ*kI7aS_7$y%W z8bzaM1i4bKlq<&r?BpKT;B&*dn}>@>yS?p=?#Hnp*IqwU3eY+19-c&~HQAbMO$O}P zu?GMS7JNL})(6Ay#8cf@{3jYd(Ym9tA3O;a2}MVNm+}+O#lHv2sdB2E3X)IqNxlF; zd)m{g2QPz9=u>oVX}biTo8`^Nn}Hkdw?mUqNoR?(#QEDE_XGY|{o33R!nN(I5qr8F zAjOmv?N0#;T)6|ri343Zt{hj+Xxr};e^|SzbnyU;R7tPgDU(o=2}Tf1CQ@xhK;*|e(Ax)a~SyE-rm{?j^^jOM_<8x?2Xg(1$nn-J#G%OY9AG(|v%{IVZ-4HV(*S zGuccw05{xlhXHy^$L@~!fdJ1KCN(QJRSQQGc)T_Mi1nbypmS}T!0I3fiOTf zYkFwxTYF=!@^X7^8bB7<6U<=rP+yy`&DRzNxHvp`{q@Pn8*_rzByAgjgI^VH!}@{# zGJl!BEO;HVN$itH56i0nc!`LNMn5C#4NY4t)6;*MK>;g`SDcR>OgN%mEPD2v=g$Wc zz7?D;*TxQ*pjHJ6EMb6>&*e48V|YTRmgjes7{dUQb94LD-GE?p$CS*rc`zYSrxy(X zNyAD-UN|7Ka0U&)Zu4#7(^H!V%#b&ib{+vBhaB=?ef6_=^liGO3z!i$=bY~R2e7Jp z124ycWLHi=B@Vs6>erU4(tIgy0y1@vZuP$c1k|qQd(N#Nv}I?`)dP7z=Eo;jtYl@_ zuaHPHMNFiZkojVJ@5nYn9J0>&7LJfHmv(z$D##$Fjn~iwThclfvV#Y00000NkvXXu0mjfbx3V$ literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_64.png b/solr/client/ruby/flare/public/images/pie_64.png new file mode 100644 index 0000000000000000000000000000000000000000..fc3f3474c2f9c4f6e95e46a9a480b24758955818 GIT binary patch literal 837 zcmV-L1G@Z)P)gkmSyS;Q)p6QP(egOU?>rO=!Fad(I}EODugHrQhPBfDKShMt&WTZ@z zCEApxiSE|iu5P>Yt}iWvwavSg_WSl+o^#IcIS-uYMtFXL{}j$b3j~A9I)VfV5^xbN z!bJeKdRx7%9-uO)3@QU~tKDk18sG}KLaq=HXapL828gbXu8yt-_)fl)?*tHwSi}Y| z7cti>b{VGmQyYM=jj2FNn{e40A3ZZidW@-f6y!b*7Xkh zHlUxgbE|{`=)2bIWFC7lFSMMN({fyP~PL z))l^>mr?1cbW{q!0t+kw1SgHX7vEO`jAX3&ORN8c?ncd`rwU-k@OtFgUzi1SDxFHF z0#3%sIGF%~B%>~QQqCf=q~V-02VnTd&K_y_qV@`Vg}owxprfd(L082-vCCR>vwSt+ znH=Yima>ocLgiEWRQ~MRXUu$*6-=&I0G|7<-3~+e9SIeJ(CN(PKXfo6E#|l|j4=`} z+=28KxpH>xBN&w}Hxza*oZylo+4*gU0|>6Ts*vr!y>Nod%o&ApdjP?ujVa${1wiDM zgudjr^C!k#vuRgu4iLi-L^?V9-S1mW4Gvy0N&(aKk(M2HHl;#CeC)wMM;8#Ki#I2U z0(8)eKrB8fH3DnTT*kds|4uA%dtJ@*T3@lCf?k%lW$N-i07+8~m9G>4XI;XymT4V8dR)o1Nd0+xby<--AB+WMyMWeMPU zGi4F)By;x6PuF90SF<4FnBrkg7eG|d-6GvU&ze0 P00000NkvXXu0mjfoZxzx literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_65.png b/solr/client/ruby/flare/public/images/pie_65.png new file mode 100644 index 0000000000000000000000000000000000000000..172f3b7fb67c6151ac93570b0e43bf08e4f985a0 GIT binary patch literal 830 zcmV-E1Ht@>P)|mV>aR=|>}3*ZvF z#4a&lQ`uBD6+op^=~O!4?sfONdjW13H;fwwglIxEAsRrS5GVu+fbZlx`Az^4h(Ltz z?O6UEF=f;jjL!mQ6?=|90nmj@O`h1mJHi|04fBS9XhXCi+5qt6JULIk_VYfE*qWd% z?k@ohw&H178nAe(GpmIGY&o=)meNwdSY#|R76A^iL+lVU04^j=)lFstGhLPX!k(3R zH|Kd*YX;znnsj_UqJI?P~yRgexcd7gtfXh_d zeEj;Mv%*>7tPp@1W|+MIZP&*>zApuq2EX*{dwdsmy?FRq{uqE89cW?>q(!ud76DF= z)8q7b0h%Mb+3g(v3wv~SOEgq`k^sWv}uAT+Dw{KPY6-f&Fb2$`XJ) zHGQ7?${a|^C>bSN8L#@x=yI|qk{$u%j^o0DV*MRq8nJk2$YlCM2U!ocUE!&h4A|a!!zAL9H&*&*9fpT>{Q2< zF$Zy~014ggue98?VZTBG%?K$bS%l2z=>#KkLJYWjt42?VuHiJDRzt|BP$q~^ zA!ID+Y^F4q5JPigr(&EC`}gUKQ)dVvR$?Vq|F`q|J{YY00Nopvanh8Ws{jB107*qo IM6N<$g2@nhZvX%Q literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_66.png b/solr/client/ruby/flare/public/images/pie_66.png new file mode 100644 index 0000000000000000000000000000000000000000..79ed69a958c5dbdbbce0068ba280136b762c7091 GIT binary patch literal 815 zcmV+~1JL}5P)c+I?KUNewRrBmq?009IL zL@$>T>k$XqTs<=rfJZT|8Eyfvz0`NGGyaYcHo`{O0I^f-6gvSzN9YLM8i2mm$(vRJ z_)^noI=*q`IeTAcQ-2nKDgHL$VEoYKNHh|SL<6wxY&+W?Ilt;14AePKeN;!nmP3jvfhx35Tpgn!^#p)aXdS@ShSqg;jylHDuqj=pj5#Et38asV9*JW`lAke1VOS`JWQDolkL zfL`b0mj0gj3xrI4rd#*c-pP6Gymns801R|JxiUPR0MIse->e4~t3Gu(IEe#E6{#Xs zE8~c-*deTRRX}vQ2e0OY{{NY93Y?S+~1o9WaNpCNZzl$ zK4##bxbOf*;75-A?yD)lw`Fw0z(pWno9G?A4ul0?YiBEfG)0a+Uk-4}%P-YD2eJpN tKhzrm&S%M_YzP1!eDI@}iyyeo`3=r)Bc5p%F1-K%002ovPDHLkV1mNKadZFx literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_67.png b/solr/client/ruby/flare/public/images/pie_67.png new file mode 100644 index 0000000000000000000000000000000000000000..9fac515394f871134a2860a1b532efbccace4610 GIT binary patch literal 831 zcmV-F1Hk-=P)Ja0FO7v|YVGsyv@;m4Kl z)4>D1nchrqCgA4WoSWkT+RJtC-l~FVV0_ivunu4cQy32(x~bjNZfbdeF6Gqi2}$6E zx!%uq`$g0_@;2ZsIXnJyTW#tbO3>%VV zB=v<3CJ?F?&SdSc4*(@|Pw0*gMqSENp6v&sTXHf~wLCziq68NKWqfl{U-8ia70Wd}jh_L?A%~m~xErxn znVeREzHfj2{P}*)`JDrQ23ZdN(**b%;4VzBPiWDM8o=5( zPv{cBe8`f#FbvQXP05v!k_?b^|pFjy(hp4bf5*$8>`YK#qJAi9j!&TWB}S0e}fq z9zBvTNfra-l0{38`(Cusq@<|T2E!U}kdYmn8@kDVFlHpxbbzzEb^>nRy|**J+<(9z z)lO+ny9;21G{3a4&40j<*0eIJ&IK^|`i{pLb3j0B^wYSae+Od~XAd2$2DUV%XHvB; zfY^oGXa~0E*Il?b^6!ALoQn}x_5%<=0D&vua)SEeT9TANw7o&wBK>Kk~|f_^Jmn-4Y;x!?(n}@f{TKVVfpq4oJN7EOYQM5F$%GnHA#V z0P!|P@{aTo@+P)TXf~P+02^$ueaBJ! zUGY-O+{;gkz+CmY?usP9HD{dFv4ODaTeJNF0NsosW{dAYLPKZ>4G^b~)5qxnLPp34 z+4}W9SN!eTn|3t=&=p9Uas{hm9-KKNDXJ*{7hun^=h$-qgTx>)NB}F}%D3`^nBbCF zAOCR~(A_+foR{o7ILz8nHk1i~MJb?M_g~lpN5N5W6aXP5q=eMl1V>PY!+>Gnyu7N^ z{{VNDe*VK`;9?#YQWsJeQURD?g2{`Zxox_9JRERN8oP9({s%ZjPddA|1utml*?D#z zU~}1AHkTJcQ}c^E4@&|MaMq9QQGW$cPa4hA!23N|8ZC{MMlXWPH#CB0$AS-VmW@^n zjs)LNib*jkUj1HkNv28)8&XOE_r>KAQWJd8zryFM)kfoACK#fKs*ROzofwsyS(Lx{ zo6nyGG{^qQEauh;hQAkBvU9x%Mi(EKmpivkka;(KLO9_?FyeAr<-T?x^mJkk_rd0g z5p@U3#XKOgA^o;cyL!IvpM>n0X`OZh`n1aXM-nz(klWE?95Vx9%`r1Ozk8XW2l`2F z-Chb1UwgH;>+!~kC7f-g_6gUD`4sf9{dHD!eg=@l={!1c5C|0}?%);&FBDqH(I#gA z-0@!V+g5-%l>SQCw|>3f&#hm~<2iZ(^Ofn(;}-za)Ba2MXn=L)-SF@uz;(#fyVwp; zV(5brE+C@ql(1$DU`OUEi(-Iq?XITX6#(Lq4YBXIzy&v%A9_(PwC4gmcK`qY07*qo IM6N<$f|2@sivR!s literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_7.png b/solr/client/ruby/flare/public/images/pie_7.png new file mode 100644 index 0000000000000000000000000000000000000000..728645765b945ed8e9b52c87c06f72d035cc43bd GIT binary patch literal 807 zcmV+?1K9kDP)fQ_RJA2 zSO;u+HX%(TS2eK`>%!9LV35JZ7!wm*h^v~gIKU2=G7uK62~3!n&_sue9RQ(;acD`p zXu!7NI0y$dp-21r>OB{*#zOy43x2kDFTeag_kMTp_cO6Q@SkEnX@P23>Wl-Zjc*nKrpwITgoi~Yr>ka zCLEzgRdDn=dL6w0pWqXGqJ00*Z;-}ljK-QLwvQ54!b%vR!*rMq*CM!q4Qv3asEVrC zJh8t~J*r3b0NBD7wkik`JFycxKr=K$Gsh=LJ838F1c*TlVpI^!j%CNPW5*{}mVY2S zkR7NXsEfL&A7g3D@(1*Q9w@EL{^FIMm7bLzfCX8Q1?x|c&-e9pcQ-C7Npo|Ga=-~% zn1xvyXnef>`tqy+ocj9w?x+fw61EZhwc6v&ljomZ_~X_n6BHJN1!19qS3hTZYWf72 zuSoZ&nUw(e)2duYWyq~xPm_sg~=g#5br@!2M5O`Etmz9{vL?$w@+OgWP+R=b$ z6KzHdb-EE%e zK9(mxW;GvhG3XO+i~_iX`*>WT)zKL`LuY`rH|G?@Ek~E5%TdNsg;XI`U`&%WS(6zf lPU0laQhcq{JP)1}zW_yb)hzNCk0Ss8002ovPDHLkV1ik%ZLI(R literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_70.png b/solr/client/ruby/flare/public/images/pie_70.png new file mode 100644 index 0000000000000000000000000000000000000000..48c20ab743ff207a0285c0963f115a56b6c03ecc GIT binary patch literal 819 zcmV-31I+x1P)IAMSqPk@DMpRRoT z2`u(eInH|EM{i_V%yWQhquQu8fWc<48Ek;5VQQEffK|<^W>o{>_HcW+9RLR$aD3O% z{M};PqpA1_DPW+=ZTWJ*nK6@UJOMCNN6Yg{0ftPPe4)^HA)zI-gcgX?$LZtr0HGih zgkl|nb5s3QZ;t_A7A|*6hu6LkX>H(lnE^!86f~Xx8^V=vC0q$W>PQ{AN^Z@2(AGXx zHqqf*%#EwllxFDgU(AhBZB!f8fJ5m}I+Xqh8lXi7VCX8lFRk^zXesl3tL9$df^JMg zlh7mtV1^lHFM{*qBew>w1J3-}gC;*F=Z1<_I~oT8SRr&R1}^9jIYbT-V0YS`cBdD? zxr*0A>bt>D!u5Hk&iF9+yhU!2TjX8@Z7+IC+II$D;LPodd2=cFyj5%!Tg88lJ)YuG zW>yB@0=Pa<;gm4=qVT z*Tww=ego7a*!xN%}cd1kM;8Hm`Md@lX0 z7eVl`2Zw-!)Z6V1L3|m4C83~lOU)SoJn+EtCb-Pzmx8<<1wj1c8d7!Y--&S(9>`h_ z17uOmx7aby^M3!$SeqvKW6yx;!W)UDO~BH?^wLC+uaqZY>h0O05kPQUsaFjFWJ+}X zwyd?+`z`pJQCx)#Sa>zoKU@cx!&-@_OklC3?@o6S;JW$!w3!6xjZv{%n*mntNm_9Q x5LLIY_y`+dPV&YD?*JsD7AoI$feUUhzW{SrDvtN^mAC)^002ovPDHLkV1n(te6s)m literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_71.png b/solr/client/ruby/flare/public/images/pie_71.png new file mode 100644 index 0000000000000000000000000000000000000000..3b1d234fa395ae2ab946ba3fe2e404288074be53 GIT binary patch literal 809 zcmV+^1J?YBP)nNK;_|$NyU#nrm6+ zCe6|eA1qgqW~9ipoQ2(9=e(!MBeaHgPej3+cap<>_XF0 z*LHLY!C9f(*y*^fhq8goZO+twZ{IoJcm6;AhwuAX{@>s~g}vDV|6XtmD#(F_-tnpT zt$>IIo}CY?zKXdmsPeIW#? zj{d=}+SLav;k!e}OX89Q#zDu7Ss-Dh zd=7eW^RhuV9cId%T&<_X3c4=-jdh7gSHI3hWI2Vl?dzLBKcI5DCwJH9Xmh^0mRj1Ph5E8fNK zN2Fv*4SdNdW%3%=Uoe&K=^otya5DLV1apW6m*7?;>r&-F;=x;Q+MCu-Y|rrR`fhfx zSl9-a|1bp=DDQgtFeM|g-f}=JdWoJWkOEbXX?3xx-t~@_t!9RS2 z9Ob~mld&uB_5lvnKxfxwz}-61GO!i!o18`S;{ZEfs89X@@R+j#X&w;A&pusH0&I&( nda?f#fGlV*8a{6If*Z_Fx#ULF+G_;C00000NkvXXu0mjf$q;uZ literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_72.png b/solr/client/ruby/flare/public/images/pie_72.png new file mode 100644 index 0000000000000000000000000000000000000000..6fce418f03e6f4ea13a4599fa6820189ca8300b2 GIT binary patch literal 828 zcmV-C1H=4@P)DpqJ%Cof>1OHLtJ7Og+@(^3E@(3 zSn0l0rrGRbA8k#~x^XUA+u0_zes}MA-~apkcpeVt#CX=ge+nbo0(3CLPAp;)*f-$RTLBg6=hWJoe3 z833Z0XeOEipQj!1_wv!sH%35zQmQSu>o4YIbzq`ceVD!@Zn0bJ76T@!NotY;PSQy_ z$p|`V|EjB)S^x}7l?I83zOa`#bgZaW0K}mi??cb=m3$>%2@rBZPRN6GZ~_X{0kb97 zy3dt`USORTM~Cfp_%XA+Z4o+DnUI43ICh_!K_=P+_zSWwI;pf~! zx6myFT$GD)Q9%SP>VfP>x(GVA232jy`ji!U-d1m`x77y`biUTG-nEBwd%Dp5Qjq{@qz=K?Jv2gmnkm0ONV$jpVd&uHG0# zV~)5e57_c5Q+=Ss_dMX2#L;D{oaqDfqH?+_f+Y77=AofdY zU&cLv(;;a+)(G(KXIJGf030d5I$Z-mfCk*8FBiVxKjs(vWi6@^0s5l=0000%NK;`L$N##W&eVx( zR%DAb7|fLHg2g1V)g0*;v%pnqSBR(&W@!isHVcAc^Miy$A_`FwY2^qbWf)GO8)}!$ z5nkr%=tf>Jx6vG)w=b#DayPN{`*zOz{CR%wbIyBC82JVMQ^G?n;0tcRiLdBDD=?j7 ztnZltCZxJq?QOuxY3FF=04-H39<2ws0yJ!~xA%qb6 zZWre7BX+*DG!3o*h8v5_xorS@Zmijw=UKhVLEoZdG@;yaG@j@t!~9yIzzn$`)k_3%J#8wOj3vpb0h50K*$fItw27-)LSiVY2eu z^T2bXl2OU11i(1PG44fhy1dWSdjVi0teKV|&Nvyg-D2(po^vO;liW#wi*>Os){Ef8 z>(a^Udhz14CDir|*ebenGL~sie0Dphtu?tJip$;2e?n6Ckz&B6(%r!+pyqw$7^Q zX}IVq=5vC7;?!7f4vYZ9RB`Q53Si~7wanJQg}80o`ga`!67H(lY9&A$S6Wx4dkK(0 zv5}9Yn~7%F}O0MeR}oc#DE00000NkvXXu0mjfdrNdE literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_74.png b/solr/client/ruby/flare/public/images/pie_74.png new file mode 100644 index 0000000000000000000000000000000000000000..15babc93d6cff142bd3febfa018beff0f5ed6811 GIT binary patch literal 802 zcmV+-1Ks?IP)m9EoVjv6ACIsO&Nh->O+lInIT4lVwOaK^ueIQ=u6-#rHC#YNiq!b z*1`*?*_t-bCey8sV_RR;q~&g=>;LWWJ`d0Dyf5bx=AI4y(=b8}`1e394j~7adZM%U zXaMr6a`60FK$lm3u%HPTI@dn&SP1Al^d0&RfRa%%N(Rt2+D6*|m|=$5|8Zgd92 z2bjMVsv7--Lo#T;biTVTL43m%@JjKt`izULuh~mGJ#AW0ivF$C+e2~ zv~+4o&2``#^GxNjM&EZ#j90hF*+BT#{IHUTzU$c%wuCJKqWh!!qx(JA7d>LTR<9n} z>_2QiN2kb1r~l;!l|f}t0S=`@=}`Iu{E8lY0KPb7GiA>LfX&9e-+I?93%t+BGxCf) z02C-tUVzp+y?e9{z!f{zFnS;eVD4l&WeU8{!FTW-e1LY*F52YyY|jgPz&vof zCDBwJeE+n3T0SlJ0<<^1tA0)|0l40*`~Erjew)}PwuygVdpyMh`-&U5I{Ot3S5+zT+iuJmce zsn7wLtc-Gj)C(~7QfgLa6%bytMZ)P^IT&$Km{pVx#EN)@84F%-i+<{CnvqDI0wC8^ z3B|XUAILe;*7flQ5Y-t+#~<;sgL8=yCC{W@01}6k{MJXy2TPP*b3fSV3G)e@OI&GA zbHQ#P>E?Z<+6XY#u}>!*4jvT25L_;J3vi@u+j?ezxc5TM-tNWg{r=sWj|qeL1Xu_9 zO1g6aOWmtv6$Shl>q>d809;mMqrL)&SexA};sP;bUS$y%VBX6jauvX8af&mw5C97- guzZgT9Js>y1rFCNLIYwO0RR9107*qoM6N<$g1hK$VgLXD literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_75.png b/solr/client/ruby/flare/public/images/pie_75.png new file mode 100644 index 0000000000000000000000000000000000000000..d82d493df23a2bc838b54cec6292c1786e12eeb4 GIT binary patch literal 731 zcmV<10wn#3P)rNK;W9$G>Y{c9H6# zDB1?w0-3N$xJ44AvbEA85t4+UB^D{zV+o6z#~^ zEVf31@^0-xeNs9x`C08DeJWoU3DQ^07aEPy3DP5bZ#K4;5!}|+db{I^bb{M$Ev}9S zWdyf=Hnf<+fV@{@t{L4pF?pY+p=J!&^1)DNdX?{&{MDt5?B?(nAo{S&Y^`5^!QFk{ zg_oazYQJtlFDsMaD%FfGsow#p_6^
  • c=8XDGEpH@s(x#eSP$At^<4G5ikTb`G5H z2?N;)?rY?7;)#2AeOm;1|jr@C4xmb3d+r(B!!AVh&BVm`XCA!qWK|g6Csom zWEJ*hYS%ey)6BWnZ0n0TY0hnGuHUzF&j0ZI?sMN*vq(WJ#?g41`W1d(R2-1^I$}fT?HdnR?H4 zuixO0t)?3z{J_QLo?K2&vnqV&s1~Y)Y5^QIjv7aeKY{~jz(Zira$Rx0IB>B!azk55 ztN(GM$S5+30GMHh**))ZNWRP8$Mih_W^^BO-;MiUz=J)+C887n@W!9GL+lVc#DLvt zciNo*2@>Q&9W<-j`y0K=QXV*S?xW6=o!OfH#odIdz6h(w~*Qq%$+q-!G@1vi5y#<;reZdSoOpT7gF zk=@lfwZRi)R;F}iOe`YED?DT?lmt(Z7oYR!$h=>mhal&(IB(|=K#z)2#)?)?jIkqW zG_?d+JDJcfDPKeonb4pW*c^IWT35Pyg3(3Sp5HqLKnNj(iwVvW78d6c6#!9f)#n;s z{W~#XYpwbPA6TP_O-@MiT=#h&Tz@6&)IkvtC#|P!-43`)`xVxW{0Oi;tj ztdX`>=S15KbGdQ3^+slB-EFg6-*@*szvuaV?hfuAj%z;nPZQv809&|TuNXub3V`Xv zp}u|>V4c#PY(59fqUTjdG~h~{NFTcaaB@X8YYjkn(9IMb9}rdwD}|K+&%tx>900@+ zL(JZe%*kIQ2Zdv;jV6+#^sdL$3?v0zI6A1}R$7vOVdMFp*z8uQ$uha=3Aeo6*i_XS58!Szb?9$C~8>Q5ZNZsP%rCO>I-#)W6*5 z;?K;RuuLkp8*n}Ukw@?U3s4`URCab!)Nh_3=U}KRti^vYZdFS3_PK@YB|!c_dQsMW z{{aK*k~VEQ#Q+pP-raWKjQ@aQYi7pYQU+k~q~b*CDj*wW=Xk+fAc6#=siy+=Cp09UD1GkFK#bcMx776bg=tygwV x1LDD~`hCZM;6up+X(RvW&($_5!qdXrIf6ADo|iG)=Y1{+2}sZonQNv&YuEJDaF5yUjf zO)9PJVy?n%p_|)S%x<(Ib#BvK|GUHcyyyAxKAgi#%*+P=X(+JliZjieFaSU47r1t3+Vic~EF z=xXfK8>GPau2T&auEI0BPWs<)6>pTTd!BYS<6U{_3&0k-1#M9@kWuw*6 z!w7vsT4M3yu({ZDHl0le1k?d_KphV76FTt}u<}lpR$3y5%_Xi$rYbOpUvC%LMRpMY z2OMyO08ZM5yWhM9g1O&r*ao5pQsUG0Y)cEjJ|GT=17g4*^auUH5J2aX=gI9aqKD0e zsNKX$IUD&tx6-Y2D?%XMSc4M!wG@_sBi+Ki>2AS8UEHI-W8L%xL}Z zdknvzA>II~cVwqb>LG@8@|ydS#Y1-y1t1{aJc*_CyFEvu?Mc z>|pGGiJgTXN_s;86XeoNnE;^lvJUf7R}V(5mbkLmzHQnc4&dAO0#?L z5a1}*=eO1a-nib|FOLB;E#D^`mjL2DtA(2Z(0w^Qg+~CUwAnlx|36CH27!=>lD}q;;X9paRJ>&2U;tCTirC zHN*l0Jz|S z>$n}m*&`m#)b?q<0A1AHOY5EkmaM+&-b}|0yxQXm7Jmh($ZbJK8Ud=Fs;BA!o|dQO zX#pyq%BS)tu!F|D#+AzZKu^woQ+l@RN90uz)1TN3AQD0N2GCMkN=t!mX}7dn3h+gI z5nlvwR2&sYWnFLc8yrs49@S(6J?q_rjWwtYj}lhcp4XM>%5-G_T|^hrMa}@<<0dWw zUAGTvHl1}JHkK;?J=|-Km+1w1fnET>00RtmfCC%Vb6PJ0mUf*^8{)o;C6@yJRdwt+p8^HADYeGxi z2eGb1#Kf-#!t!pE)GBSm#zI`pJ93<46JknMeP~G{q_gR?&*4Bq)aO%INID2<8Yeh+#oKLCL#j3i@3$sPaz002ov JPDHLkV1iBKd$j-n literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_8.png b/solr/client/ruby/flare/public/images/pie_8.png new file mode 100644 index 0000000000000000000000000000000000000000..2558b4087c016c4c6c9d6a3b61ca4fa765dfea37 GIT binary patch literal 799 zcmV+)1K|9LP)_HlbY(f?&`rg&HAC zTO~<7&*15S=3-u+-s_@`7|kEkXg}M#_kR37@BQxX_Zrv{_)lrPYJqZaRY&Y#2RlGs z&dYfj*qhm#*_#2(ezV`~2M&}2@nig04TAeC`z!k^z=pUX zZipACQ5hUQhtJ^ygt!nF;>GjlzJqj`F4N`ui5;Vc)Q}ni=p>z_la&ajkwzNOBu&z! z`iY%Jb7?Nk1z;Q7*e)SRY{W)v05zyVjms0Hy|kD10>mN~u}TPLrZQ8Rsml{9%0HSJ z&5V{1G$m6qFR`>m`Msvs^cKcNU-8=D+ThwC!2Hb5{M9GO<%Wg^25J|T<%I=RJ>vv9 z#xbsj#wVmd_RU8DzGJxa&6Dcm8z*e+=KT}n(+k8!aZy~XVe7XVUp7&oY4`5id(Qx2 z?%jl92l&3n;_##L;n%);9(qr#D=INjh(Z)#y?ecTy}Jg{^LVfuGeQ83&vm~41swiZ zc=hFTVcI!A{Vn(txbecBy>tDnPIB}gOj#)_WdSrxvou==(IeRRwC{HZaHs?#{-=QJ z`SgcRE&#Xj8WSa29SzbT4FV~5%AIo8Pw*INOWKmQ0N%rUcu(PZ*HZKRDcJ)j5l0AtC`VwFP9)k4IGDy2T7!Ff=@*y(wr6?0o!^@(CH4t?@ zxG@QJuIB1t25m=^ldBgm(b}C{-?wxAzdygv`Tx)1{E44i!G9WBxBvOt@iB-VGwLP}f1GJ^2ap5$;mdTe(GJu4c@@5qi;N%LK2P6P^ z;DP769pUd5lT!;8Qzc+YyrEXL0WRmt`{i8!pNm!0Q8o|-Fz&_k6R!h&GvCZN1B@0% z3!^0j;OgAFF-<*SBd*=8c@g*)5pQGGc&7knX~)xnTkhimr9dfA0xSc|z%saxdwd7U z&x;kNrQl)Pp_9tzwVR&Dt%KG<>mWd>DK(|`2dKk290e?#rrULQf`{!ixSErlKkd8D zCbo%fVgPp7VfO-bR2d`nO@Pa?a@jl@Jd98=yVwN)@hNU7q1$&oC8ea46xeWWxHeo~ zfOhR@Pk&qJD@WgVo>R9BfB+w<{?BtZIvbshUV!T+-LvuL(1A4dk@kX@fzKlqq=HoJ zUb}t8218+?KoB}G)MR8#PFk&hSwY%-Ql+pxd@y=nwpRYT=g$K6kG%Ba3*iH@Kgg~e z`{)J8J6U?EyeWLZxRS!JWo2G~(F;;p&ICY9N$wV%-a8n*Pu6?92H>6*x5^&w-uL*c zBWD-$Oo=6hCtl)Myhtsr@d>}cmb+}jUPmACp%2-kCX^I|PX0dpciN1JRr553)3#>-|1% z?L?1ioB$lv#udF1uuqLG^&5fp1zps{+SYln0ib5vNfE$voH1SO0=SNY n_Yd6!5Q|vE`fe9EaF6o~(b8 literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_81.png b/solr/client/ruby/flare/public/images/pie_81.png new file mode 100644 index 0000000000000000000000000000000000000000..979864a017584b124701b5af43b593cfda60c456 GIT binary patch literal 824 zcmV-81IPS{P)o!Ysjz{^cHZ+moZtJNbKoV6OTm8{Cr1M;g=BMw3~gutzKA9m z&n195uzOo`BB13xzV*NZFqVJ5@i81A`n^-=RRHyRjEGwe&^D!B5%d7-*pWHWxd3RO zfo6F*!rCof%#uBv90cSS&k#o{0E2p_P&R7)dpWyvZf@5Cu6?4xqAGwcqs!9xf&DOI*P3rGe1JI0Xb$K%omX}$JC&V)UZR)iCDs6^(29CMPUOiCFjo%y5z<{@ zOI{iPGICM6e4Q#(6{-papoSW1Gr;+#vBD<_fMNd!rs}r+Kh2F)*N1{^1J}X&y{khT~wQwy2EwmS1BN zAay=oz(!IdUJYoSh9~>N0ON-D;fmb=QAT^~(*;nE zaSUnq0Pp;r4F?s#+LEA@@F4)6@Pw!3y&Q zDKWhZ|JHOX=Rd48$(*SuEfds6CPfBOQQ1uUhmIa9YvDtL1x4gTN#&mwW>%Oj%y5## zSo9$2s%?VK(xz^WuDxVR%l&cY`o7(B&*6OTIrskVp-lb?{?mAQ8sI2Ymq&cWV>AMz z57kmtJTRR6w7ZD_%yFuY7smloF!;l85uoHP^<$?33}zyIb1~r8mQ|fE0(>q<=0vvv z2*MU@cRU@%*)1MFrOod>3}}Z-0t(gu@aD8j0Dwh1g(K_J0ouJI4OOS7+X*otCd2?;L)XwXw&P}f2gfgH zD|&CZ51R@#D=%#y!?s>K$}~ZT2$gb%?Zby zNN4Y7;2C%Z9snbZFxmlr<7-y-8~{j*p+f89K9F{&@Jvac<9=4Ym2c$(7ScjmNIO8I zu$fcGaUVv}urB$>0+hC}TK*>I$C=7a<)(5wz=^sydtTPe!^+W$+fOzFpd!j1z77u@M~$NyrrDoj}m=)I(b z-jH(i-+% zfRUTjolyz+lQ}I#)xeVA&}`lz04%V;ay*^uzy;1vb=4hAV%y;&00000NkvXXu0mjf Dk{ox% literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_83.png b/solr/client/ruby/flare/public/images/pie_83.png new file mode 100644 index 0000000000000000000000000000000000000000..3ad2893fc474141bcaec0f5ae398004a80cd75c6 GIT binary patch literal 814 zcmV+}1JV46P)P)wV$S!kw{5gbz%e%CB9W(F9Vnr%nD}33Ir|u zSE(hn{3swlqjswVNZD2q0e4R~V zlh`D#+U*wWg;I4?4?r5eYwQQSm*i53jcseAs5ug(wFjkbZ1tTOqa*%o>L0b6Ag3l_ zd{3h91UX@%J4yQ&5DXlYRUJI)J3(G$a*Cv30YUDi?L|9l0eW|E75~iYi7~#!hosyD z0v>Eri24^0LIsHTDE%u5009ILxW*U%7hAfU+m8T7qw;cI$3GCu ztm%C+3UCcuZ6ccc{G#tc9xp+f_6Uf)aF^Sf4bb0({s;?Sz7Rtqd@LRWA}`iex3tVJ z_8YQPDAGo>giK7mPwmYk#87^{zodu|{Y+kAN)RDBschZmIzsf%bE1<3gcxqClS@p5 sSfdQN!&e9)7Gfb5&$sjVJ?JZb0u%p)xrobj_5c6?07*qoM6N<$f^qX}@Bjb+ literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_84.png b/solr/client/ruby/flare/public/images/pie_84.png new file mode 100644 index 0000000000000000000000000000000000000000..51d2c8f92308041abfa06f567a39f89a7ada67b0 GIT binary patch literal 805 zcmV+=1KRwFP)LYQX{Ab%>@pvEu;>|c&%?;~El zsI2&w2dLiG*Olb~7P-1yanFH{91H9Uy#buef5H~k10Ews7MWgq=%B9r^WY~>K+#k( zl{W4;Y`r7-zRQ6hfFGUO(VugGiDTlJI1T_;6BLRm5inO>^PrgNIB09!=gVvP3map# zNG(!}07^&+DWN^U2{fP*P<7S~mY#7U>_7#(As#@VcazUY>v>ItCPEVdKnpFjHh}tc z*;9!Tuuz(3GjV%|(bEE^Lka*l{n?fr{`Pegn_^RJz-%#F%oZC!?Naaa?tUkBuI)~t zU<$xN+APg?=XwL*z&C8#h!t>cZ1hoIffHb{S1(nz1GLlrw^%Lq-)GD)W*9Rz##Wyh z>CE|fRxSWN@g$SZ>*Q|3;fTd*^pp2yxQd1THs|$yq3vx9ngy# zZAlc{03Mysyprd_KeSI~LhFTY8$j=u#Pk^!*50&C;?j)w)^6_{gGM-~bG0W7y|_}y2)TNP=E zK4d+=>00000NkvXXu0mjfrA=;6 literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_85.png b/solr/client/ruby/flare/public/images/pie_85.png new file mode 100644 index 0000000000000000000000000000000000000000..04d27d3a8aa646471c0fa68071d9008582613d08 GIT binary patch literal 844 zcmV-S1GD^zP)8jZ_O)XYNn#O{Zqyt6=I$F_9C8UtlMM0RP?V<=eM|6_vqJvZvn5{Aj78RYT zRHnU74K+<|wtCl1C(<@G(|&g^|Nq1DdHxT)JTmXM@SjHLZGgYXJH0}rm_lfDJZZS^ zfI3s*)(duUTO6vJk3rJa#E5%9Hc@pTa1$ug;**kRfhH%(J#PsN$~F#+P6sA%n%n-j zlXdipTaGxrC9&Z2W;QL#0BLf~OmRc!ClU~$tqdyx-O236!>3`uVtvin6@Tvcd&Tt* z$Cuj|pe%N2!qkec!@lRs)3?LRMnUD`SN4}`aGTv`x48?zmc7oxH!i5`pE+&e+OC6s zk6b;sc%==5k}-j79;KWFVQb98xR8#oYqmC9o2{ThgNAl~GtYI_ys+Kiz3RF2DX;q`+1Z%Qw{D4I zJ69~5r*>)*dk-dmHGW%s>d)^sKxJZFUh?|h11e`kr^Q-)05z+URH@e91F9uUX+pja z;GoOK1<^M^7(L>1Xi?9>6&d zi`EzLTl%+z?K3=w;m3E@or?K)FrD?l;VWlB`AlmK&23xv`&bCfo%A&E5`+#rHugpk z2s&+f*z2x?0z^Z_I4|fe`Ib9MP*oWXQB(fh-{I%hkCbwh-SA=ii~En3L#^?O{Y({n zG~CNBx(wd^4L9p-AQTTi5quZ=tsB1}P6b0u^LMN{4+By{Bz-ENMvdD4cAW?IaJ~Y! W%oexF(_P&F00002W?d7%Nbh3fK&H^@U4U_8*% ztyv3v)hf&CmB3hS`vv7Pz#9AcsZIirErBT^1pr}MXA<24{Hw&ouJeHJ?r@G!0)T}r z*g1PSlCvWoOC3 zWfY(r)qChAfF<9OZ^?HC*pCNj1O~HZ9jQ|H!#oq@EyV(Wr1Sr<7C>ufTCMpn&8AlOfr+qBy#{9zxRUuL_T*I*~Y7hNCT*w609huXEvme$7HwY=HxyPes_5HSqvN%F=L6^9Q4Hg>U1B0pH~U zWn{4fAXeint^h2>MWQ5k{(vFJGap`k1b_)9n2vyx@!G)ov?>D#66V~l$ozLOR`iX; z`$+&%797rwx8HaAZ^m5ISJZ0*hEnB8=?#EYuTAbh;W~(-;6CSz0CwNy@tfrUQ5|Gu zQ)YhOY2cr9;RvLF@mjC<%R0c2-Rf6&7Z@*WYig+nY#Jl;TRK42FKr6m4iMhaxgrL@ wsNS>xkOJ^xvAjYf0nkAQefDy$1Lrxv0N^qpP3%S8I{*Lx07*qoM6N<$f@d0bCIA2c literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_87.png b/solr/client/ruby/flare/public/images/pie_87.png new file mode 100644 index 0000000000000000000000000000000000000000..c67e639e912082a097450b70b829ab491f06afb7 GIT binary patch literal 831 zcmV-F1Hk-=P)f~(S{Pjk64w+{dL($yuhuLtD(jf$RhDtENrV+Bp7-2*dmx=^6>HbWwmkWFFsDehIJFzTluaA-wt`t_ z)|qu<0lwu3yP+*AZ)H}w<6)vARfXCMoEkQ$_J#q@Y(fiN@-BIo957(OU<25%Yo6U$ z1HT@&SL?4i4kon9tCTt5oV2a`Fo3sUv*5D^&|<-LpJKnq zQwMYERh?gcacDdY=oS*SF{WqgfbM&g!OQ$@042uct!ocW9nf>TDm5n42GHY!;+rxT zL~+vWdAXAZb5<=bRX4+o6_KT?%Qk?>NFW8~h8=rW`f~Ds0cZDGPPhP4q)3OZ+x-^z z=GWIha)r*gef!dV91oKQ7xq@Qf~PqAol-n}-tPb4%$-Y@s!b3SQkMN71B5^3{0uxb zagZo8Fm`?)%zJqy^4XcuVPme~urzUiEa>0Sqdr z5W3B>1o(nmc}!Ncj3orZIjUPD4`3(al8hHn5tjYiY002ov JPDHLkV1j!igwg;2 literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_88.png b/solr/client/ruby/flare/public/images/pie_88.png new file mode 100644 index 0000000000000000000000000000000000000000..b04da7f8f3987ac29b160f05b12d4ad897488107 GIT binary patch literal 790 zcmV+x1L^#UP)Feg8Y>{NQ}f?{MI7rly1cG+b8$91k?(71{xdcW+Hs z2Vi_qEiQ2f`h)VHZ@mu~;!`$;T?P6MXDgzQ1IBlkeM)lyOIlxU_W*#G*oA2Ox8vA* z#gkQ(cBl=Yj-GBUNCg<9X>i!h`Q&JZPZ^`ZZtg><3om#P!I^=Qz;39m_Hnyj3_4_B*5RP1PR)0a`m~ zfYK-r?CWqIY62QjT7#qU^{9=4k(V%q#?2+oitx%o0s_jv6+btfcJ@r(wIgTAVLs}EdZ&ldZ?>fI$&^9k-Rbo z009C78*nz1~ ze2y^fTs(*yAdCy;0V3O_&Gl}8M+iS#*gt>0-M~Mga1T1*o1pi0S2r*o*5cK85}16^ zR^M_CVEjI1j3fbWQh`b&0eFdtr_$SjfTpy8JRQIr7Iu=&0DR#K--WkZJaCEg3!9}8 U`k+MmApigX07*qoM6N<$f+bB{M*si- literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_89.png b/solr/client/ruby/flare/public/images/pie_89.png new file mode 100644 index 0000000000000000000000000000000000000000..b63660772fa51ccb9641f8048cc3a52f6669af5b GIT binary patch literal 815 zcmV+~1JL}5P)SNZ<`A>4OPUHZfd5rcr^Fnv_uKSJ4rYM4@IGxs)zi)Fv~{ z+{8KE9ILTub9COm2#U(wYPOzl=kR}de?0Fw@Vb!2;6Dw+*#O4_RgmC0U{K4XgQI{l zs{YaC6hJpT774;pFO+Rottd}EzfJPxoM`Flb&e*x6X1|lRgz?4hr=A8ZZ zgVqc8cSXFdW+y;Z=bbqF6F>;#0#gH6V1XqJFwZ@Kndt7Y)#_Pp( zwBXwL*8nBXZx*r_Vbtn5W4W>1SiY?3l>>A0Z;d_aj)S@Rh>Gsa0L+M_zzll*c~hz> z)s(vQyy_tnEj*Yg|5Tzs;Ohz8|9irAO(5?0$eQ)P7NC+7DuMZ+pnS4(zzqFx@^5cuWRF z23A^GK5e~R^_1uDVS9I=57Yyf#R zujW-?GrSqz3JH9%K%MOTha zY(ILt-mbR;SjRfn%LtN2(nuNsnxPq*IX*$^p&sf1NP!ecp^RWQlnrG=$0t^lKadS% z17!qF#Z=5=ENxN#9<#^n*&P?X;-o+6Px=8CWI-0JJwYzl-{0L`zo@ECO=;RcPLN|9 zkW58c~Ck0G`V`^*3&YA_J}=VkJKT!56i{#L(L)JRQ&bNcMk#H_Qdm|tMqzT z;O@=)lVt=2uizED0B_@MyzTHrAz6M|nt+DF_2+j+0r6h2Darv21K#j88L)Eh+sa4a z%y-YT&Za#ZEY<30ltyV3SngQvSnfDP(GoO|oqg$i3<$3rA6$E`SjAqPYt6OhS^+Jt z#kKh1i^*+ro7@J71+gF&isvi-2gQ__5>tQ@Q6fqN;0O6Ze(>NzJHHoYL0M21N{ju4 z)k=nCScWm;Bu?UF%osPujd8}(tLfGBDr1Sc#9U&Iv2;G2Pv;pkVn)n}F-BTQ3u)Pn fua>I+gD1srYXPToEb;{%00000NkvXXu0mjfD3NQt literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_90.png b/solr/client/ruby/flare/public/images/pie_90.png new file mode 100644 index 0000000000000000000000000000000000000000..3da1cdfb39bdd37d5bcf34cdcf1513180860e1e0 GIT binary patch literal 811 zcmV+`1JwM9P)#JEMaM4nI_n@ z9BngoC{4jRahadpSQBS+qv?9?-V^VS|NDLi()>d3pN8ygfa8H(5FrLIl=OS{GyK(kkxEO`JJ_)5da8~{UTL6hy2V6v1G6Qk@5LVerBQ)#kPZaDBmke%wXp0cmx+ zA|uXP0eZ1wD9OFf12UVrle|zXzzw}26FbEKS?4{w)o1Nsu4&;{4zd8oEp8O=jujw+ zk&Rryzh3J9Flz09e#SgiDFCp+2HV`f^0(OZe*8@(FrvDYbnLq0VeIy*pAAxgd5ZHk zEZuV4_P?1NZJTNi&>qc~W|jj4OE*4v!+sE1zUi^gMqr!n_C!N5K&{&n>SN_~W;5_l zM$wH9U{c?!_|OWB(_gwj-3QDFI&Ug+0b*L$G*|(UIyZZ>WPtnmSV5{B@al^`o}>h5 p9lojlTmT;MfXB+)*$-Uf`~swVET7FtRhj?*002ovPDHLkV1oIXb$$Q< literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_91.png b/solr/client/ruby/flare/public/images/pie_91.png new file mode 100644 index 0000000000000000000000000000000000000000..8ecaae9089348bf55e77b084b815fe1667058433 GIT binary patch literal 817 zcmV-11J3-3P)1Z+9_UKvK`oIaY$#D4S^W_h9m)(*P)Z_1iBu3&3fVBs25Oi=$2Kd|N{zh8 ztT{HNwhp#=K+gWiS~Q)LdR^cDp7-~>JfG+Hybrvjg~i}M4cXCvmEyeWH&kdt6JU^z zWDa-%`sUUYNjEU7?N55j2F#JV=qVq76hT$dssVS=?oi=3fPPUZKg0m2x!Y?)GyuYJ z8$By8N3!>bO<%_)qp^URUtD~0D=>d^f;Dz`^&KJeST)>2fTJxt-H;1-25=wn=Klit zsryO(eKs&jEqInez?aILERT~a0N=!jlGrUvY-rhzna7o*S;WD9M$5>1csK@_&Q{fr zJ+}uqf_hW}>cFDwC;W+n{h3LT{9tD(06p~3Tb^5862rA&!GI2!m#J^OZ6prl6dV*A z_86de9_JS(0qCcFrdI-U<~i>4Yn=dVmzj}sk}0P`Mz8rRS*Yb*VXv7 z<^gW)8|oRu)^)r8W~K!ymoXNgk`?H(8UeFbll<=J>Otf$j4Vzc!0oJJ%0>XU()B|B znC0i&4g8fU^r8ze2#4zWW&r)M4nwmDn5}-fsjCDqZ~U-XTLO^I(>~Jo0$${ls~Kj1 vem3r0avI>#!^#YZ1mFke???IXfNPxZfHoS2Z}mb600000NkvXXu0mjfhuU;4 literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_92.png b/solr/client/ruby/flare/public/images/pie_92.png new file mode 100644 index 0000000000000000000000000000000000000000..615abaa103aead0d3a8659bd33d3478ad061037c GIT binary patch literal 809 zcmV+^1J?YBP)wAKgjRZ?1wn~PVR#5j($tWJk`*dq=E90jhT25LaGG;E zk!;Rd;l&i(<}z~YCNo@a)13CbdtV-SKK$P!$?z-ur;)cBu<^iYj6wp;eU*-gCjbSz z(l7rmFvj6KMYsSXGf@n$ctAFr;t<{lD0Ws3Ts;TOwMu=&3;;^>pu_U*NS0pW`3l9G z57U6+%%z4j4xm>kKgylgF%f}lmU}PYO{z$Fcp9MJW*dn1e*n&wzN-~6fGN!GXykXN1bJ?Ojfu$B2VHB5=(0eUfek`OGPH@*k^ zkcuc^sxzG9ODO^_98Hx1P(cNi381=0#vh^si^Ix#dB62x)TUTLauu+xEq|t_ zA6U>Y=oj=RfU2@rcOE5L4@9}OM-o8>Xv~{kE$sj`J;^pJ7(fZVh#>_uxtd%}t_h&7 zygT&8;f({4lUVmTR{&aRp{}k9u*o|sF75(IY8Sa1&3L_*r{!sR%g06!$!KCsj$blh zDAM(5yRA1#a|H&9L`vmfCP;qllId2kbucQiH!gbm56{X4v>(C4VODtGSsHyTR1+R) z0_d6$U%+47I-ujdAPHAu0%%|2pT4^mpq%i^AX+vLW|O^-&)Wq!|~;~K-v z0+-s}6XqHn0&v(5VmtQ(6t33}B4Pa?N(_6@Hv}MtguH=j!0~NR2e)nY^%nnb{q8ef zLI|j@%RUWV22{_VUTdTQ-@d=^?R^61BUHlaT7bl~>$2Aa_BF@38DhZMA@X$`6QB=z n6ndutU?Chl^V_W*xXJwlJ4GXX593Xu00000NkvXXu0mjfHjrr8 literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_93.png b/solr/client/ruby/flare/public/images/pie_93.png new file mode 100644 index 0000000000000000000000000000000000000000..f48b02f81b5931dadeacb747ef8dfb4770b728be GIT binary patch literal 809 zcmV+^1J?YBP)%~aH_7PQkB!%&;f+SdQ$InUwz-uK~wLm~6Qe;PYm16GQ?*(b)(iI>2vsE^m3 z4opTWXbn|>KK@mDb1Fb?On;sz0qnG!PkJx``YFHR&?10Q8!M0~0Vl-{fv*Ap6M+a@ z`8vvKTdbE4XAU#~Lo#WVBoi>^PK13j|MNffFtE&!sQ57s zJRv~j-vbQOmj9D~icr|rHa50Qi|vSe0lU$5Fq)63H~Qvpp2Y?9eY^LESaCQO$3g2C z7y85mkRdx1AsMwD(4~H-a!;oTpwm0AMqh7$n!w)0&$1qjmMxNrZvoDSy<7d2CV=1~ z3b6o(T3lJ3YCRw)Ax~Qx41fg|Sf+sUaa41S+qnQUztr`Sm*rqAzdOCM*8n|hlfa#| zbbrP7pqo5M9@PqPTWd}zCIPC59m+3UJBWIhr}Nqi@aJSR-5G$3!Dl*{IUMC@QWJIay nB7}S(CFK0-uUqwd&{p{gs7!Q0sW7Q literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_94.png b/solr/client/ruby/flare/public/images/pie_94.png new file mode 100644 index 0000000000000000000000000000000000000000..465007e33e2bc4aeceb6ce0c49ca70a8a388296f GIT binary patch literal 797 zcmV+&1LFLNP)(pd-{FOAEp zra^0hHmu+R*ghD%baS95?^980E(kIwws?y`R2iXHXM(sU{&P|hOh^jy4_gg{bA~&E zKThCii#yjeXlo0>bV~Dd{Xyuiv)rmT4}C?1F}}|NzJc$ZoRX6}V4QMl(2VSV0JasC zpM9W)X34J8$u)o1CVTOzJtDlM0k4&+;`v+q*Z;T&I}6PTZ}LYD_AAYSSzW2Q(3xV& zdVR_fU?UZjf+=YChP2Hi2kRU270uaoOMrJ6d2R>Te)n0+qleJ_4EiKTOh|uelZqo^xj{jkw*v?(pA?)mDGv z)fOnev15;jhMKC=~MVTW#>4+i%s;;2b>wvxlZ-+4?TD(KJ|8=qc* z);Mir!D6rp3{!Mb&~vd}-L?^2K2Eau#KZWYgznTb@Jfs-Ul;=8K1dZZC7|Fc4s>Mb bKqu!rIkX+V-r$k400000NkvXXu0mjf4Ay%x literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_95.png b/solr/client/ruby/flare/public/images/pie_95.png new file mode 100644 index 0000000000000000000000000000000000000000..bda6ea9270ffb56f9c1dc61b25aabe0ab8e9dd0e GIT binary patch literal 798 zcmV+(1L6FMP)AL_t(Y$IaGXNK;V&$MN5tE@YOe z^G|lSx!OxPEu4db*knAC8raI+W9P@9|S*7xn+d+t3S&f&l%X6M3x8pYFqmGXA^jbW&u0L+i| z{ry*f$*1?}&8>j-bKm|~QoufAGLO*#;+VgN9sMatW)v^XT_5T2z z+WgY~WD1z*l8JI7SG7wHBXjGXDcb@6w|vSzt_wg*#z@(Rd|+6f*DdN;d)VT(scuYb ziU69N^@bx~xWDv}j6j19U{qJRtFU1GVe=UR%lMgc00un7T?asOflk`{00}MyNxoFv}%bWy&6xPKl8BWZw%1Cv+Rb2@9?4TP$t}cD<^xupn#Sk}` z2^h}F^m%N+?mH1_XmTCo6|wUqFCAbL)s2dLzz2aH!Ggb!cN+M+0o0%zFgNIe`dWdn z%J%BprGTa4`B2*@VAeRDVP*o960(h*1#C1XDUX~70xW`R(HX#(WQJ^y2f%|G+Lfns c9q8tM0b}MAs&~6cUjP6A07*qoM6N<$g5OeVm;e9( literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_96.png b/solr/client/ruby/flare/public/images/pie_96.png new file mode 100644 index 0000000000000000000000000000000000000000..3742638c32d4ea0b06f1cd4bb00ffe59c098f1d7 GIT binary patch literal 791 zcmV+y1L*vTP)C{S_6EQJCgK2$;j0(XoMIUllDN1Q#7L|w%f=RIH zP?6H9o0c!kh`ANfvM-wAa&x|H{lA^VIrsVT9L~L$valHZry<-8a1;-xcT8gdeSpb- z;HLU1Fc;fd+r|LC|N6k|Rsq%)L*xhnP>%*ya%uru&ta{Q2JDzQcKK8|;KShc$GipL z4;~IWE=O7K5nHOprNcJ>eRaijZWCZtPZ)JZ=Q~1#vufC-KyY?-ui^+mHU?h^i{1qA zYi5t4dmQ*&QTFg$+=focZUScSoNdqnOp!n)^l}1dp3zB$rhu{8oUvs7*27jO?XFVEXv5-qNy7%-$(KDv~){jjAN&dVkF^8orGM}Zxnsbk2feFa$S^@+n1 zt^-=18N|b8z|=J))27=28ddFr2A%6*HudY;v!AS}Z}Xb@(WUWve{XScZR-IP8BIts z0BT-xNyZ6)njh637XwhFf%BX|fVkq#@YSzazxt94UOYY+=>;s5Sfr+1?&~Cvr&LZ( z{^N)#7P63@<35Vu#Zw-en34c^zu~H)QkUn~{E}Ge^*JLgz$iacmsGUQ5Il~p5dmfFG`0++(j@JAB>up1wnm3aWs zx;vird*dLF!u=|N1PBv7FYe<43^l(jUcUDFb^mVt%>-Vf6)>F~?9liDbIeYUwgSMC z^zP$}tH8n+v(Q8U)KBb|$ol}jF1b2g4lo}Ga+9V2dR9<=s0Kg;%5mFqIp={c&L6sB V5W%AquZI8t002ovPDHLkV1iY7ZyEpq literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_97.png b/solr/client/ruby/flare/public/images/pie_97.png new file mode 100644 index 0000000000000000000000000000000000000000..68dfb67617a32ad4dea29e23b4f6a9ce1e4096d5 GIT binary patch literal 757 zcmV|y;rZ}?&Vj?D+3=sn_cUO>1TOCwLnqn+tFSku zBL=Wkw6r%q1Z**SzRnC#ja6Xr1Na7sWaR-s*b`p8B?pjP*x#RV9q`T$-50I`5QHog z%zqru*(~;cA*400DJsYd}jT^!R9vHS<;=Y17JoUx;TKJt6yE}-U--8Ou0kN?gP%WzRw>_ z2CV3->fi!?R5ocElHCWJn`Oh36Zgyqoco>AWz{lA~tvq=9Rk5x;yCSvjcF+6mu@F z%yU3dbX47rcn;w5^1Wvidd~ruO@=d@wsQczRbgi%ngG6^f1GU4eK4={OhSAj;C(r? zCt@835Rp*g1|U-^zuu~GA5hkO`hhA1fD}?lxq!2YRI-b~S-`4f_3b+C!omC(Un#C- z0HMXVZAGHvy7PZC>S6WUaey@!hNQ&;_RMjmX~K1opf-|?UIVPID`Pdefbg|1Ba=$+J=14~00000NkvXXu0mjfK_z0P literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_98.png b/solr/client/ruby/flare/public/images/pie_98.png new file mode 100644 index 0000000000000000000000000000000000000000..ae28b963a185211cb850929c0f00358ddfb8b25f GIT binary patch literal 738 zcmV<80v-K{P)6&uO$> z0%qF#+)sLdpvA&4gn{r~f1&R%KoTc6GOWPH>XO`wejxqSe%B!qKtI1-pPL9E2WN0K z@^YlrjyQP7GvVd}9)&iseg_Z=P1JZkM!zFuQf9wE4hY(AzJAaJ(C!OLvy;~V^v%Bw znq9#7@Hy4deSbG5qPPtmbG-{ds(JUI^lKEr;Ct`Lh#VND)}4^K|2^!_gvM^Q$pO%s z_>Ec2-`AIaBx5k)CE&TO;i-q#ANDiPJB?bM1mF|QFcJX$TkNyeJRsCGT{g*x9WbPw zKJsNN5D+?8n^^=$d3gYg4WPf9M9TL^$j++7mM_4om42MQMDJp{mXaaHB9*7yOLRB?5I zngB>|*ft;@1`=vFE3>Y|4o1DaM_Hx-=yf?BaUuZ_#ps0@;0IcyUEgB|6u6rAwg&+4 zzyps6xD;(%SRuLqa5CGQ9~G@1jMH=L>_Z!{!Ol{#pD({3@jRHuDy)<}1%!{gnWk$1 zc{=kne+@r!%2>Wp)(K?SKO@;3Kw3+2jgr0c^Q%5?{b~v>*a3gXhta`Sz~>mq(3b*p zFDxmBFc8`~wJ>o5NO&VIEV2P9sZy(o12Db$wGtMPLJ=B7W&kW?fD?JS=z%fr2dxtH UhW>E%`~Uy|07*qoM6N<$f?YO6CIA2c literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/pie_99.png b/solr/client/ruby/flare/public/images/pie_99.png new file mode 100644 index 0000000000000000000000000000000000000000..19e27575d2df051388a0d05afa99ad4b60746a41 GIT binary patch literal 743 zcmV?P)Y+ryQQ8_>rvzN_daZ5?KnL6VZ7~4r7I7nD0pOM?E@f{3!m~3@A4>%`tEDRWH2~YO z4>I@Lu{=HEd1}mJyaP-ex>-_008|4xYr5K&0}$Wj&5C(1fD4^uvAG=>-*sjA!{w^{O5eklBRLhK!Yly2P$7WF0wCz{RfjMO`I<1G;(!%H9eYfb@QQtIdP& zldqy0$?v8r24E0>2PHZlI3TAaWl4FF0l0Bo)|8|P9FTiXell$n15gmJs>wA54j8so zS}4E30OZ%k*2MP#!5WcPBJdxKQ;{smiU#rs(mGyQ1B=ariIs*Y%Bn3}1dA-`VXfsmf?#{%>E&wC8~-88;p1epDy=YNUYO002ovPDHLkV1ibUR1yFH literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/rails.png b/solr/client/ruby/flare/public/images/rails.png new file mode 100644 index 0000000000000000000000000000000000000000..b8441f182e06974083cf08f0acaf0e2fd612bd40 GIT binary patch literal 1787 zcmVCLdthj)A!BBmWB&y|X`RY;f`BJ<_ju%@N||NoLFD~mQl$aHGjq>;5dG_D{h(5s}0 z6&=HANU$m__3PuddU(lvR_xWj`}Oho@9EyQt-n!E*P(KhM@X_VFV2l&>deNZJT%y8iwA zoG>u1B`p2=_u9k4v1Mud`1+qvOZoHg#bITJ9U`qBAek?40RR96!AV3xRCwBy*IQ$v zN(=yC9IhRft9V64L`77pqF_Cx@c;kSNoGK)`?Ps*cP(EtGlYZ{D5cxspMQvjKH)Oh6X(pa|J{ zGy1J$Ej7=Z{uvmMfRRsE;v`p;45B~6*ep#hM^ji zl$+7qoWq~}ewG=61uFw0He{tJurMU&4Iv?=B^eR(wAHk!miA)O7p_+YR>lbmU3rmn ze?+ze(+sEd6foB&*l9+?zkr_a-5*v&p*?c}HOGtyHg6r{WFYpQ=#z0Hc7VWLx$>M3|b0|Gn z+5t#z6*ffSVc6DjpmB2?AAR@@vB!wCK?9Yl;33;Q7^%(401QW|k=R8b!OwtLJPjjm zO9Ia;qCq)rOq!1Ia*6#A%#xb}yDx1P*pWla>9j$bnMn3CBqe4`TRll_Iy29kmG?4fbKuF=XqU|?3b@B zA`&a?KIgZ|KJx5eND_c3Em=WZn@xW8hRJ^G&sY^b(FW?WC9W_sb;+lAPdLTdBaKIK;-f}*h4|1aTjw7qX_k~e{TWO7jqcekERN;Jyh%67)q4rKpL*CEYL;|#GY{B@5 zi52XoC?xsoorJKxsliugF#z38MJqrYCWV(t<=G&f;^Me13&AiI9{3jUZ$ zFM`*L(9qc^VMxkz1oaDH!1pcD^IXp>Z0Jb=_qs?Vsrs{mp<^{$N!EC9o+`CO-(o}E zJ`y{*;9s|wr22-QoJ87y^~;)Q@b%P4UgSSsx>2$o@Vd{%Pk0@4qZ^fhB(vt$c1TG> z*{Ad;foraENbld`=MCNm4?9kvlgK~&J>ialpJ7nua zx0oRzwG5;}Qne)Fg(N3kf?JVmB;}y&5(0+~r*aL$0Zof8fe!AtHWH>A^1Y)@G@GsA zup`R{Qg?{+MaxTq#2n{6w|)c&yaJ7{U4ngAH5v6I)*;@rEBE*ehIPBwKBQU)YKE8F0lR!Sm?sE4Xk-sj&E$|A-9n dP56HS1^^A-61FoN)nxzx002ovPDHLkV1kw_Sd9Px literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/images/simile-exhibit.png b/solr/client/ruby/flare/public/images/simile-exhibit.png new file mode 100644 index 0000000000000000000000000000000000000000..0b42e7884a3ae271bfd26d2ee423c97adacfa45e GIT binary patch literal 708 zcmV;#0z3VQP)-7d90007uNklJD*mw!YChKklOj1M5qfLOkc$K$uRF$s;LeXJ3cXO?BnBLhQ4i%F+w zKl1Av2^h_yrSU{M5)ByLqLt&$Z9x3RqUED19SR2~MA5!vOH&AO5(-R+qCH2^)@Wwn z_FJvizBZV!ofheRO={N0gm^y~47LJ;iK5fy^ZD-p^o||xjz**Bfxv_)+Ha^aZalVF zEKV`4KLgPFs@ZHlxHA~tqJ^QOOeQnod#If&?DG%gwrQGgHa@O8ozC)~yzUI+6ViDQ z>T&hok5!s_y*RhKW8@H0B!4 z$ygn_JflW>a7;fHUeG>y11}tABrSO=x7-rq z$?|jr`Zf_t`Voy39ZT;$w0D=W`NShVy>UoAc91T9?XQaJHIx>aA`1=52beDeZbK9r qQobS6b4`~_YfU+cwAAnY)&2>hBU{A_n}@*w0000000K>0kg~*SO5S38FWQhbW?9;ba!ELWdL_~cP?peYja~^ zaAhuUa%Y?FJQ@H19&<@VK~#90?VWj;RK?Z*Kd0(;&%m$@hz!c8sEC4KL@FAVuA~cq@JZ@jVo>*{J#C=q8H;*&G>|0XUPf#X0Owcj_Ajt(x;kw6}C z679^`DB?_s49p>+T=y3 zo~Vx9R6!k}iaK4Mu2{2U@2MX&WXiug;+s7z;XD^nrjgh>A@kJ%r8 zUC&jLI9nOozU+9bT!jJ(>J)XLDyYh|U2;eRP+lHhxRuo@-k)^6!Gh*B*K}h~%b8(he}m07=^TFc^RSM$f7F zN^yts4QJVeR0c`GCW#NhCgSUaEVdPiQ55JGZx0<k%YNak`^hqU*zUX z!t~D$QDrt~xJu0EGUnM;IVD9C_VR3g!VhOA8B3fbuPsf=FACeA`=kBNF$vB$2{3^# zYZ6)V1STQ(&D>|7v$n>wmz3iovGlH^c4#N|agxEHoaFhrWlS>BpNulBfK$XP$a%xm z(us#0W0LeEa%iMr1S02uja!{?oHXdyucdSE`DoOjVK&LFgn$Sl>eVYzypkn=8i(BZ z%0BycIB53UH?`&Smxxm&l0n&JY8aHCDtQveB!U6z6wwTM6r6rgdCpUR&o5`ksKFD( zDiR4A34{v5+1+n=HE-%Y|D1t)1PSwSBoZJrk_JUQVKV(eR2v*_d0Rh!i8w_<%k5%O z$eltdnkR8c*#6=gr{bPCE>GGl|ILvHNg_BHs}KofB#sdf?&*Hr%YVu(Kkt%(mC3+9 zuFwV1cn+5#i*p^Kr$_{WS*S=Jb3}56s@hs3d)j_k`;989 z8rm?rH@6;>NQ}WcPZ%2$ClZLHJ{_*UAg|Hdp+UPuV!EPxdeY=b7s(-yDvG=mxbR2) zU!JpQt33bKsPx|giR2<7U*trQ$qrSk`@g@t!L|dyX=axGRK-so&Ca~m{&9F;N!ah> z1dNjXHL^i0Ki=W;e8=Vzm!2YlJsccH92ugBG!9L1+U*;5Fc%HFb5@>S)Rrg_XNw`S z?x_>1D!x12fA}(4C@!9q#^mV~RAh;hNH9Pp$klC7r@zguQ3JVLj1{7oF!8U7xva~% z&&fewlC|wL;|ax7zPPUo?@&?)4ws#B7wb^ z6Q)Pmd^MLy!X`Zt{6WHgE*+DjbL&}i_xJjkq?sWJ8)S%*B~Oqg&&;Tt!M*N&^VN;| zT`OkJcL!cUf`8h8ekKr%rEm57@1N3|*U@U?h{(@HEc8lmxSr!C5(lv#L4{1-OD?#KXjLy-&j!C?~g|e-noAEz`vOU6HP$U z1Yj&k0Fuz!7kAyYyx4crDji2edIOX21bh$pb!+O|LYxKE0~?v&w-z)Z;{*dDd*!A} zU>5<~18A=C@97K6q?6r)tXG`>E*p0DhnehhtvH9kbwbsLOo$-P)V3c!?WJrTFAfbP zs+_KY>m#vFlkJbcN~3z=_O&nF(Y=X2e|p;Bky@s|F=;w$Jk!Dn8mJHHa9Ukf*>O1V zKr!#6wG!v&z(QaaaG8kY5m7|Ch)9?C{7@0u=ov+%kBA&EB9%?#?H@~+eMH0*H3}*kt2SmF^bxmB8A}5M%;rwB0;? zVVNY%#S&P=W`~>?+t2EAeQvvJ7k9fw0{flpkd%=!erQ{0Y+`7)tN(sSVSiU#Ik@we z=MSDH38tBV&c=$DGFC7_{A<07LXuLnFRN(c#3VQsxCOWo*aa9_P;3nFyokI8JPphf zkxO&xn2jgh1Ym)Pd^acWLEtrDB0jY8n>qC!27C_80KO2B19I-W8+bA9)5jt*q}XQ! zut~ErtH1`P-G*1s*`+blEW{eH29?I4O0D+K`QWP((&6DuUb<7mPbIKDBrwFv^D>h1 z6KZN)rp(&MPC8E#_GcOWh>Wge4U#agX<2e`m&^b9a$#SWUUkqR4QVr53?q#tCP;i( zkoa0M=Y}KR96Y#NG4G`{66XRS4cv+UfS&_nMDpWNfZo9EKo{UPV62E_H2R+c+o@_F z;AP;u03uQi{1`Y%Rd)jVlkW#=0FK2E6Q2RY$Ycydmm)zM_GB~x}uzRzlYk20A z@0h?&$>zs|A+Qk0?<+Uxx3@dz@jQKs+WyIh- zNSpKt#k`lIM7fA`0L}yMR@GH;+En~s`452q%qjk{s{TGMb2o4qus`rBumLztL=Fb_ z0oKO%90x4N6B1SZpMoyDq^dLHI%fidJ$^{Er}s_Jdw$7c>a_=&dLnI1RBN8 zO8;jV3S8XbHHb7-MA&DyS+VIxEp( z$raf~Uiw)V5{V}ya+RJfE~5l5R^v|S@S}(G9HStz^3J2rm!zGfY04#zX~t=MfU!|7 zQQ~u1rfzIXAr#o7a~1~F!>3*H06IPI*Q85sS-x87YfScocEl>v(6~G<6_w(=CzLUMR zH(~A*^hiba$|!L`S+G!@185wkl)c3oRCR+%2GcUh*4Qx)@9f0rd1ZS~eLs_z?zm_w z&KxF!cX9%%{IOlYSseP{hTJ}_nK9yrSnd)h-B6QtXAo$Pw24XN4A|&ZYxU-xdv8>&(JV`*LVa$oxy&X+y9Op_8}`o#{PwHS@0Mp(+bNUzA-Z4FzD{m1nE!9jf|( z*=>>^#y`5ezjMq+RfzO{(|7VuGi{{jTzI}jdaF7G=g>$)bP z1^G*Xr7dk#Ro4M;we-F`%&RTOn!VX+-SNOfRNwTK&b#tqfzwG;>lNPiJ9nCvj+jJZNY1?58tzdARQ9=><+;YQ39HHzR+%u*vdA{cag-aGs1xzi4$ zWBL6Ox!tqtv$D=pz-Fb*N(s1IB6n-o@h^YAxkr5ahXX5Ro&GpV%i8^3m^E#4<#VxB ziKD8sf!Uia_^$}}Rd=z1in&DKApoP?t5!}&WiQ}$m>yxST1t+oK&%Wv$^_aiH1MAWwQ^_0RY zn!|1rKj)Jxp2cuDpI$YDU4A)}T_^s7FUHPun7y*I3d&jAE?YVl^Ox5^8k+-0)5X)4y=SED4IBklZCVnaN$DXOe3>J*rg&nT@)C9@<)P;{`iikCfaPh24 z-2f5u4As~A>qPJ~#H|)BKQ$|XK2O~$Muy>7F5-SHeSY;^w!QZ+>lUTLyTzE}JYgJI znBbeUa>RY zchw$m0bUXRE>_j87S6a;P(9&MT`=wsDsq8=%hts0 zkZN9SIql_qaluG-sPPK^m%q3`4Cmlvwn%UsyWQ|frkU`gD~>X%zsE=q!FbZBATS3p zQ<%*C6E~J>sRBNCgdj2+IXCw0#^ZhZLVyLGQ5{N zm}_yGM0CCwel0<87dywUi(GtoVOeTf>iakyl~8lEiUxk&Rc8H_0dX-Q8q-xETTbu+ zW6~&XycE(XAkkuzNl}(#5jhO+A#PQ`Gq6lmi;4#+79vtl(-LQhs%~s$U3$$(uxsg7 z^B0(5S^HsEI*eVi$wpSx?o5}O&)}{ED;7*99o|Cs@gHULQqg^??hsU8laQO)Dfuwe zZ1hJI)?IXvL^{ev?#D!~Qlr>P9J<(Q96{)C=`i1VoUtIMLNpcO{P`+Pxw)E(>Z6pK z8>XpGr=z*4_2CEorq{Jt$7wNfL?nS9Vw(Hl3VZ-$RuAq1^ul|;3!SQ=swUWmXfbgB zR$McHlwCws&Egg{w(8o$N$E_sukl8|r8Os`+=A*0;BKb-b+2r6&j(|7bCUiEX&t4` z?XHo-uPKj@3}MoZ=Q)INE*;Gy?dEDatk-mUZj^HMx?bmQU-QDkjoNAjiW#c<(>!Tv zfT@M91IYTN_5yx{57O!b1R~NwRo51l|Cd6SYv+)^+II^u!_xVh=Ky78Z6y&CFlHSl zJ&4Nd@^y-juk1`zkx(_(| zsTM1`#h@7C?B7iK`>J|vb2r6)sf-Ka8W9L2$rlg48Q! zE{1v&GB*{u`u5-c?M5pawl#_&&cJ5UTNK+9hh)s!T)EgTbUAP&{sR?8^lcE45AZ=@ z<5jgyHdPdnuD~?~$;(uAe4IZJA77Na^VLcbxf-}#RXuUK{^D~ysp1mX3?ww8P#uY} z-348XlmAiCdV|absmFwKrObUm^-UXXN!wP2w|S=xEnnY$Nzm`Mx}v0gG5z?mne^Tw zawAaNRzr`7`~d%PdTpXuIX+lx0B~a5hY70M+I@gK0T&gdtpUc1$brCLX$(Bepc1$h zZ=-6|m)8Gs2h!2EP{2hFxU2_P5w}F>{f8gVTTc%X{ zcJ2p?UB2-taImU=QJAr%%CU&_0zPS?+|Kxrjpp>cS5>uOWyP&!a6xgp$D3!jZ%9`3 zuwkmoIaei-R%tq_io&SMq+JgehE*E6DosaKnvOcUY)nqr)XWNP@aIvA$Et6CnO){7mj{;#CuEjJs%_GyuQa zIH`a%i!QG~%6mXi1+I!HszSBOMf4Eo&xy23BUh#ED>?*`LybY4q9`h0i0w3zqd8}g zn^d^yBv!BuEf1y0MT?1}s;fohPT<_l7H&lU__z>QBqHO0dzw;eMPwIz=x0+%1NY*m z`fKpR(?~o~3URcEj8@g(HIdgE9JPtOkAO!vN?lo7iP-(X4Z`Ul5*_x$X*KG)p^A!M zX_?(w%QZ6!WDI;6(&Xj&qjATa0wU#~{CUUQUoUEi%GOk*0WiQD05=;4ya~(#zNe~r zGXxQN6u&&C5Q~6gRP~L5@}2RKhD}MA)xb`wx;m$PYlo@~o&|mf{1dOV*8=yd>Tfo2 zPd+}pu#8UI^(0DHqx8gSwR5goBJGK~YK>gAhOP&Zvl%-MStDT7Xbj9=$HP+A2$&hJ z{JCq0&{Vk0rlSGfnpkVz5)>VC953ztGcdFj?LVWcd4ItovK+tkrV!_<>PAam<9l|% z|Ft)@@xdXg`uCi&ttE~E;VJvALwYp6Xn49m8JI4%{A=rQS^Gg{V%jnV30brIPgKV2S9aW07S zRrR*GuAT5JGYhc>zv8pmkPH#|1AbXdAud$aTXV{_mN=_ab)$9eo5kAe`VzTQa5@BK zF}ONH>M*X3ZgW5MAsWSF%@ZTLnXkfXmkO(0%JtMRs&=V#HEGvV8`9OTA*$B2d%XMj zm)d0S<2GC65Dys#0sj#n2jIQoo5Sm>`pvkG@8Q>L72<7GZO(p_X@3lGLlarQRn;?d z%57WXteLPK9mg(fOt>q-vM-K78I5ujs#cxbfa5)NuFg1DN9gLn)Un$QP5luS4-qNc z&c9f+Ss+)u$nNR*$QbUv1sslF=iR57Tl@Qf{afz6fK(vXj>gLEp*+VN-M zbuM&u;#^%oIEO)Z zws_?SKwFUpw(`2FY$GCNz#}w2J>ErCKNFD=#48F4FjG|z+Dsc4;&;v{#Dl6jGN;@& zk~p`Pv2uAkDr>yoZq>D;aXK30HoE9T`QC65=8rqXx$q2eIvl6%)a5%jv*+G3zDUzi zokp%s!>~>^gmc!(`ggk>``DVz_p5DCi~(>vV7a#NMiDsynBGkGeyTc4{DS`Xn#fZjGe{2nI#DIIF+09bZiUnKk!PCKJ|6Yi_u zQFUYfa+Z0QRN|5&UF1d+x&88qlM_;rE(onig{tZ3Q%y&8nvUvRDy)-8UaCCr$rcA` zZ3&9m=Hyi16A`&iM4I{l&7rAqP4JrvWESPU4>aW}*j_}MwV%Yp;*Xli-sDPvwnvR) z^>`oQ*y*Y#adHMLYL5Y6yK(QaS`I*Y0=Pvf|I~dsu9gM0jRwVDH?L&3iN7&>+&Cz5 zZXoEdQC*SyKd-!@MziPYH{lkPUZ@^}>Xm|gM-!=yq7>Q+#YmjffO~S1Uc=i_W&zuT;6mxb$RpxpCt*KfJIk zwK5tm&W+a44bw1^ynwF!Ag%?HN}tv7%qN?gWV9KIAK}slaWyryUkHrHI}tOMa;iI_z1Nlh+c-tg+Vy=X|CPTHNSVYE{& zaY#keB9pqH^T;PQ4nJ*U6hoXt@Y|BK1y%w3s_MLgHm=79!M6r=_?@}(!Xmesa=fDU zX9&HitnpjdExzVpL|#DkRU$h)yJUtD)_u17)#7vA5EBudOxf z_W$q*38T?2j1JH+y0GJjhySz5vReVg5N7~ww;uciReiWHF%D0Cg~pw>T8a(8L516} z#C2~wJI?aj12NQRXI6`^J{#l?b^3`Q->1j5jZ1?*x$@iU+%$DsP2^CQRf=c1P$wln zSv(_mpX9F_#So{O+`y#bupA$^Gj$W~l;by_yb>RCv!!?(xVX81uCL8<9K$WT`Yws| zOjOrM$Vu#S{gc_ucQ5U#f!yOF{Xus6otc)0?h_58$z^ucJq(%twf@g>D9!-55Ey|E zAsq}1!UsKV30C7h-#6tt9-Bh!^Qje)`+&*75%{gznnDE30j^Tjr#I7w_wW*_5QV!u zq-c6Z)+$?xSr(j6W%>o@+#rOD<&!JBuG{}|1&T!DE?$&WME)o&;ha;K=KSsnmzrMhu-GT qE4kQMmbu}I7_0#*o**Fr literal 0 HcmV?d00001 diff --git a/solr/client/ruby/flare/public/javascripts/application.js b/solr/client/ruby/flare/public/javascripts/application.js new file mode 100644 index 00000000000..2cbbc17a8b2 --- /dev/null +++ b/solr/client/ruby/flare/public/javascripts/application.js @@ -0,0 +1,16 @@ +/* +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +*/ + +// Place your application-specific JavaScript functions and classes here +// This file is automatically included by javascript_include_tag :defaults diff --git a/solr/client/ruby/flare/public/javascripts/controls.js b/solr/client/ruby/flare/public/javascripts/controls.js new file mode 100644 index 00000000000..8c273f874f9 --- /dev/null +++ b/solr/client/ruby/flare/public/javascripts/controls.js @@ -0,0 +1,833 @@ +// Copyright (c) 2005, 2006 Thomas Fuchs (http://script.aculo.us, http://mir.aculo.us) +// (c) 2005, 2006 Ivan Krstic (http://blogs.law.harvard.edu/ivan) +// (c) 2005, 2006 Jon Tirsen (http://www.tirsen.com) +// Contributors: +// Richard Livsey +// Rahul Bhargava +// Rob Wills +// +// script.aculo.us is freely distributable under the terms of an MIT-style license. +// For details, see the script.aculo.us web site: http://script.aculo.us/ + +// Autocompleter.Base handles all the autocompletion functionality +// that's independent of the data source for autocompletion. This +// includes drawing the autocompletion menu, observing keyboard +// and mouse events, and similar. +// +// Specific autocompleters need to provide, at the very least, +// a getUpdatedChoices function that will be invoked every time +// the text inside the monitored textbox changes. This method +// should get the text for which to provide autocompletion by +// invoking this.getToken(), NOT by directly accessing +// this.element.value. This is to allow incremental tokenized +// autocompletion. Specific auto-completion logic (AJAX, etc) +// belongs in getUpdatedChoices. +// +// Tokenized incremental autocompletion is enabled automatically +// when an autocompleter is instantiated with the 'tokens' option +// in the options parameter, e.g.: +// new Ajax.Autocompleter('id','upd', '/url/', { tokens: ',' }); +// will incrementally autocomplete with a comma as the token. +// Additionally, ',' in the above example can be replaced with +// a token array, e.g. { tokens: [',', '\n'] } which +// enables autocompletion on multiple tokens. This is most +// useful when one of the tokens is \n (a newline), as it +// allows smart autocompletion after linebreaks. + +if(typeof Effect == 'undefined') + throw("controls.js requires including script.aculo.us' effects.js library"); + +var Autocompleter = {} +Autocompleter.Base = function() {}; +Autocompleter.Base.prototype = { + baseInitialize: function(element, update, options) { + this.element = $(element); + this.update = $(update); + this.hasFocus = false; + this.changed = false; + this.active = false; + this.index = 0; + this.entryCount = 0; + + if(this.setOptions) + this.setOptions(options); + else + this.options = options || {}; + + this.options.paramName = this.options.paramName || this.element.name; + this.options.tokens = this.options.tokens || []; + this.options.frequency = this.options.frequency || 0.4; + this.options.minChars = this.options.minChars || 1; + this.options.onShow = this.options.onShow || + function(element, update){ + if(!update.style.position || update.style.position=='absolute') { + update.style.position = 'absolute'; + Position.clone(element, update, { + setHeight: false, + offsetTop: element.offsetHeight + }); + } + Effect.Appear(update,{duration:0.15}); + }; + this.options.onHide = this.options.onHide || + function(element, update){ new Effect.Fade(update,{duration:0.15}) }; + + if(typeof(this.options.tokens) == 'string') + this.options.tokens = new Array(this.options.tokens); + + this.observer = null; + + this.element.setAttribute('autocomplete','off'); + + Element.hide(this.update); + + Event.observe(this.element, "blur", this.onBlur.bindAsEventListener(this)); + Event.observe(this.element, "keypress", this.onKeyPress.bindAsEventListener(this)); + }, + + show: function() { + if(Element.getStyle(this.update, 'display')=='none') this.options.onShow(this.element, this.update); + if(!this.iefix && + (navigator.appVersion.indexOf('MSIE')>0) && + (navigator.userAgent.indexOf('Opera')<0) && + (Element.getStyle(this.update, 'position')=='absolute')) { + new Insertion.After(this.update, + ''); + this.iefix = $(this.update.id+'_iefix'); + } + if(this.iefix) setTimeout(this.fixIEOverlapping.bind(this), 50); + }, + + fixIEOverlapping: function() { + Position.clone(this.update, this.iefix, {setTop:(!this.update.style.height)}); + this.iefix.style.zIndex = 1; + this.update.style.zIndex = 2; + Element.show(this.iefix); + }, + + hide: function() { + this.stopIndicator(); + if(Element.getStyle(this.update, 'display')!='none') this.options.onHide(this.element, this.update); + if(this.iefix) Element.hide(this.iefix); + }, + + startIndicator: function() { + if(this.options.indicator) Element.show(this.options.indicator); + }, + + stopIndicator: function() { + if(this.options.indicator) Element.hide(this.options.indicator); + }, + + onKeyPress: function(event) { + if(this.active) + switch(event.keyCode) { + case Event.KEY_TAB: + case Event.KEY_RETURN: + this.selectEntry(); + Event.stop(event); + case Event.KEY_ESC: + this.hide(); + this.active = false; + Event.stop(event); + return; + case Event.KEY_LEFT: + case Event.KEY_RIGHT: + return; + case Event.KEY_UP: + this.markPrevious(); + this.render(); + if(navigator.appVersion.indexOf('AppleWebKit')>0) Event.stop(event); + return; + case Event.KEY_DOWN: + this.markNext(); + this.render(); + if(navigator.appVersion.indexOf('AppleWebKit')>0) Event.stop(event); + return; + } + else + if(event.keyCode==Event.KEY_TAB || event.keyCode==Event.KEY_RETURN || + (navigator.appVersion.indexOf('AppleWebKit') > 0 && event.keyCode == 0)) return; + + this.changed = true; + this.hasFocus = true; + + if(this.observer) clearTimeout(this.observer); + this.observer = + setTimeout(this.onObserverEvent.bind(this), this.options.frequency*1000); + }, + + activate: function() { + this.changed = false; + this.hasFocus = true; + this.getUpdatedChoices(); + }, + + onHover: function(event) { + var element = Event.findElement(event, 'LI'); + if(this.index != element.autocompleteIndex) + { + this.index = element.autocompleteIndex; + this.render(); + } + Event.stop(event); + }, + + onClick: function(event) { + var element = Event.findElement(event, 'LI'); + this.index = element.autocompleteIndex; + this.selectEntry(); + this.hide(); + }, + + onBlur: function(event) { + // needed to make click events working + setTimeout(this.hide.bind(this), 250); + this.hasFocus = false; + this.active = false; + }, + + render: function() { + if(this.entryCount > 0) { + for (var i = 0; i < this.entryCount; i++) + this.index==i ? + Element.addClassName(this.getEntry(i),"selected") : + Element.removeClassName(this.getEntry(i),"selected"); + + if(this.hasFocus) { + this.show(); + this.active = true; + } + } else { + this.active = false; + this.hide(); + } + }, + + markPrevious: function() { + if(this.index > 0) this.index-- + else this.index = this.entryCount-1; + this.getEntry(this.index).scrollIntoView(true); + }, + + markNext: function() { + if(this.index < this.entryCount-1) this.index++ + else this.index = 0; + this.getEntry(this.index).scrollIntoView(false); + }, + + getEntry: function(index) { + return this.update.firstChild.childNodes[index]; + }, + + getCurrentEntry: function() { + return this.getEntry(this.index); + }, + + selectEntry: function() { + this.active = false; + this.updateElement(this.getCurrentEntry()); + }, + + updateElement: function(selectedElement) { + if (this.options.updateElement) { + this.options.updateElement(selectedElement); + return; + } + var value = ''; + if (this.options.select) { + var nodes = document.getElementsByClassName(this.options.select, selectedElement) || []; + if(nodes.length>0) value = Element.collectTextNodes(nodes[0], this.options.select); + } else + value = Element.collectTextNodesIgnoreClass(selectedElement, 'informal'); + + var lastTokenPos = this.findLastToken(); + if (lastTokenPos != -1) { + var newValue = this.element.value.substr(0, lastTokenPos + 1); + var whitespace = this.element.value.substr(lastTokenPos + 1).match(/^\s+/); + if (whitespace) + newValue += whitespace[0]; + this.element.value = newValue + value; + } else { + this.element.value = value; + } + this.element.focus(); + + if (this.options.afterUpdateElement) + this.options.afterUpdateElement(this.element, selectedElement); + }, + + updateChoices: function(choices) { + if(!this.changed && this.hasFocus) { + this.update.innerHTML = choices; + Element.cleanWhitespace(this.update); + Element.cleanWhitespace(this.update.down()); + + if(this.update.firstChild && this.update.down().childNodes) { + this.entryCount = + this.update.down().childNodes.length; + for (var i = 0; i < this.entryCount; i++) { + var entry = this.getEntry(i); + entry.autocompleteIndex = i; + this.addObservers(entry); + } + } else { + this.entryCount = 0; + } + + this.stopIndicator(); + this.index = 0; + + if(this.entryCount==1 && this.options.autoSelect) { + this.selectEntry(); + this.hide(); + } else { + this.render(); + } + } + }, + + addObservers: function(element) { + Event.observe(element, "mouseover", this.onHover.bindAsEventListener(this)); + Event.observe(element, "click", this.onClick.bindAsEventListener(this)); + }, + + onObserverEvent: function() { + this.changed = false; + if(this.getToken().length>=this.options.minChars) { + this.startIndicator(); + this.getUpdatedChoices(); + } else { + this.active = false; + this.hide(); + } + }, + + getToken: function() { + var tokenPos = this.findLastToken(); + if (tokenPos != -1) + var ret = this.element.value.substr(tokenPos + 1).replace(/^\s+/,'').replace(/\s+$/,''); + else + var ret = this.element.value; + + return /\n/.test(ret) ? '' : ret; + }, + + findLastToken: function() { + var lastTokenPos = -1; + + for (var i=0; i lastTokenPos) + lastTokenPos = thisTokenPos; + } + return lastTokenPos; + } +} + +Ajax.Autocompleter = Class.create(); +Object.extend(Object.extend(Ajax.Autocompleter.prototype, Autocompleter.Base.prototype), { + initialize: function(element, update, url, options) { + this.baseInitialize(element, update, options); + this.options.asynchronous = true; + this.options.onComplete = this.onComplete.bind(this); + this.options.defaultParams = this.options.parameters || null; + this.url = url; + }, + + getUpdatedChoices: function() { + entry = encodeURIComponent(this.options.paramName) + '=' + + encodeURIComponent(this.getToken()); + + this.options.parameters = this.options.callback ? + this.options.callback(this.element, entry) : entry; + + if(this.options.defaultParams) + this.options.parameters += '&' + this.options.defaultParams; + + new Ajax.Request(this.url, this.options); + }, + + onComplete: function(request) { + this.updateChoices(request.responseText); + } + +}); + +// The local array autocompleter. Used when you'd prefer to +// inject an array of autocompletion options into the page, rather +// than sending out Ajax queries, which can be quite slow sometimes. +// +// The constructor takes four parameters. The first two are, as usual, +// the id of the monitored textbox, and id of the autocompletion menu. +// The third is the array you want to autocomplete from, and the fourth +// is the options block. +// +// Extra local autocompletion options: +// - choices - How many autocompletion choices to offer +// +// - partialSearch - If false, the autocompleter will match entered +// text only at the beginning of strings in the +// autocomplete array. Defaults to true, which will +// match text at the beginning of any *word* in the +// strings in the autocomplete array. If you want to +// search anywhere in the string, additionally set +// the option fullSearch to true (default: off). +// +// - fullSsearch - Search anywhere in autocomplete array strings. +// +// - partialChars - How many characters to enter before triggering +// a partial match (unlike minChars, which defines +// how many characters are required to do any match +// at all). Defaults to 2. +// +// - ignoreCase - Whether to ignore case when autocompleting. +// Defaults to true. +// +// It's possible to pass in a custom function as the 'selector' +// option, if you prefer to write your own autocompletion logic. +// In that case, the other options above will not apply unless +// you support them. + +Autocompleter.Local = Class.create(); +Autocompleter.Local.prototype = Object.extend(new Autocompleter.Base(), { + initialize: function(element, update, array, options) { + this.baseInitialize(element, update, options); + this.options.array = array; + }, + + getUpdatedChoices: function() { + this.updateChoices(this.options.selector(this)); + }, + + setOptions: function(options) { + this.options = Object.extend({ + choices: 10, + partialSearch: true, + partialChars: 2, + ignoreCase: true, + fullSearch: false, + selector: function(instance) { + var ret = []; // Beginning matches + var partial = []; // Inside matches + var entry = instance.getToken(); + var count = 0; + + for (var i = 0; i < instance.options.array.length && + ret.length < instance.options.choices ; i++) { + + var elem = instance.options.array[i]; + var foundPos = instance.options.ignoreCase ? + elem.toLowerCase().indexOf(entry.toLowerCase()) : + elem.indexOf(entry); + + while (foundPos != -1) { + if (foundPos == 0 && elem.length != entry.length) { + ret.push("
  • " + elem.substr(0, entry.length) + "" + + elem.substr(entry.length) + "
  • "); + break; + } else if (entry.length >= instance.options.partialChars && + instance.options.partialSearch && foundPos != -1) { + if (instance.options.fullSearch || /\s/.test(elem.substr(foundPos-1,1))) { + partial.push("
  • " + elem.substr(0, foundPos) + "" + + elem.substr(foundPos, entry.length) + "" + elem.substr( + foundPos + entry.length) + "
  • "); + break; + } + } + + foundPos = instance.options.ignoreCase ? + elem.toLowerCase().indexOf(entry.toLowerCase(), foundPos + 1) : + elem.indexOf(entry, foundPos + 1); + + } + } + if (partial.length) + ret = ret.concat(partial.slice(0, instance.options.choices - ret.length)) + return "
      " + ret.join('') + "
    "; + } + }, options || {}); + } +}); + +// AJAX in-place editor +// +// see documentation on http://wiki.script.aculo.us/scriptaculous/show/Ajax.InPlaceEditor + +// Use this if you notice weird scrolling problems on some browsers, +// the DOM might be a bit confused when this gets called so do this +// waits 1 ms (with setTimeout) until it does the activation +Field.scrollFreeActivate = function(field) { + setTimeout(function() { + Field.activate(field); + }, 1); +} + +Ajax.InPlaceEditor = Class.create(); +Ajax.InPlaceEditor.defaultHighlightColor = "#FFFF99"; +Ajax.InPlaceEditor.prototype = { + initialize: function(element, url, options) { + this.url = url; + this.element = $(element); + + this.options = Object.extend({ + paramName: "value", + okButton: true, + okText: "ok", + cancelLink: true, + cancelText: "cancel", + savingText: "Saving...", + clickToEditText: "Click to edit", + okText: "ok", + rows: 1, + onComplete: function(transport, element) { + new Effect.Highlight(element, {startcolor: this.options.highlightcolor}); + }, + onFailure: function(transport) { + alert("Error communicating with the server: " + transport.responseText.stripTags()); + }, + callback: function(form) { + return Form.serialize(form); + }, + handleLineBreaks: true, + loadingText: 'Loading...', + savingClassName: 'inplaceeditor-saving', + loadingClassName: 'inplaceeditor-loading', + formClassName: 'inplaceeditor-form', + highlightcolor: Ajax.InPlaceEditor.defaultHighlightColor, + highlightendcolor: "#FFFFFF", + externalControl: null, + submitOnBlur: false, + ajaxOptions: {}, + evalScripts: false + }, options || {}); + + if(!this.options.formId && this.element.id) { + this.options.formId = this.element.id + "-inplaceeditor"; + if ($(this.options.formId)) { + // there's already a form with that name, don't specify an id + this.options.formId = null; + } + } + + if (this.options.externalControl) { + this.options.externalControl = $(this.options.externalControl); + } + + this.originalBackground = Element.getStyle(this.element, 'background-color'); + if (!this.originalBackground) { + this.originalBackground = "transparent"; + } + + this.element.title = this.options.clickToEditText; + + this.onclickListener = this.enterEditMode.bindAsEventListener(this); + this.mouseoverListener = this.enterHover.bindAsEventListener(this); + this.mouseoutListener = this.leaveHover.bindAsEventListener(this); + Event.observe(this.element, 'click', this.onclickListener); + Event.observe(this.element, 'mouseover', this.mouseoverListener); + Event.observe(this.element, 'mouseout', this.mouseoutListener); + if (this.options.externalControl) { + Event.observe(this.options.externalControl, 'click', this.onclickListener); + Event.observe(this.options.externalControl, 'mouseover', this.mouseoverListener); + Event.observe(this.options.externalControl, 'mouseout', this.mouseoutListener); + } + }, + enterEditMode: function(evt) { + if (this.saving) return; + if (this.editing) return; + this.editing = true; + this.onEnterEditMode(); + if (this.options.externalControl) { + Element.hide(this.options.externalControl); + } + Element.hide(this.element); + this.createForm(); + this.element.parentNode.insertBefore(this.form, this.element); + if (!this.options.loadTextURL) Field.scrollFreeActivate(this.editField); + // stop the event to avoid a page refresh in Safari + if (evt) { + Event.stop(evt); + } + return false; + }, + createForm: function() { + this.form = document.createElement("form"); + this.form.id = this.options.formId; + Element.addClassName(this.form, this.options.formClassName) + this.form.onsubmit = this.onSubmit.bind(this); + + this.createEditField(); + + if (this.options.textarea) { + var br = document.createElement("br"); + this.form.appendChild(br); + } + + if (this.options.okButton) { + okButton = document.createElement("input"); + okButton.type = "submit"; + okButton.value = this.options.okText; + okButton.className = 'editor_ok_button'; + this.form.appendChild(okButton); + } + + if (this.options.cancelLink) { + cancelLink = document.createElement("a"); + cancelLink.href = "#"; + cancelLink.appendChild(document.createTextNode(this.options.cancelText)); + cancelLink.onclick = this.onclickCancel.bind(this); + cancelLink.className = 'editor_cancel'; + this.form.appendChild(cancelLink); + } + }, + hasHTMLLineBreaks: function(string) { + if (!this.options.handleLineBreaks) return false; + return string.match(/
    /i); + }, + convertHTMLLineBreaks: function(string) { + return string.replace(/
    /gi, "\n").replace(//gi, "\n").replace(/<\/p>/gi, "\n").replace(/

    /gi, ""); + }, + createEditField: function() { + var text; + if(this.options.loadTextURL) { + text = this.options.loadingText; + } else { + text = this.getText(); + } + + var obj = this; + + if (this.options.rows == 1 && !this.hasHTMLLineBreaks(text)) { + this.options.textarea = false; + var textField = document.createElement("input"); + textField.obj = this; + textField.type = "text"; + textField.name = this.options.paramName; + textField.value = text; + textField.style.backgroundColor = this.options.highlightcolor; + textField.className = 'editor_field'; + var size = this.options.size || this.options.cols || 0; + if (size != 0) textField.size = size; + if (this.options.submitOnBlur) + textField.onblur = this.onSubmit.bind(this); + this.editField = textField; + } else { + this.options.textarea = true; + var textArea = document.createElement("textarea"); + textArea.obj = this; + textArea.name = this.options.paramName; + textArea.value = this.convertHTMLLineBreaks(text); + textArea.rows = this.options.rows; + textArea.cols = this.options.cols || 40; + textArea.className = 'editor_field'; + if (this.options.submitOnBlur) + textArea.onblur = this.onSubmit.bind(this); + this.editField = textArea; + } + + if(this.options.loadTextURL) { + this.loadExternalText(); + } + this.form.appendChild(this.editField); + }, + getText: function() { + return this.element.innerHTML; + }, + loadExternalText: function() { + Element.addClassName(this.form, this.options.loadingClassName); + this.editField.disabled = true; + new Ajax.Request( + this.options.loadTextURL, + Object.extend({ + asynchronous: true, + onComplete: this.onLoadedExternalText.bind(this) + }, this.options.ajaxOptions) + ); + }, + onLoadedExternalText: function(transport) { + Element.removeClassName(this.form, this.options.loadingClassName); + this.editField.disabled = false; + this.editField.value = transport.responseText.stripTags(); + Field.scrollFreeActivate(this.editField); + }, + onclickCancel: function() { + this.onComplete(); + this.leaveEditMode(); + return false; + }, + onFailure: function(transport) { + this.options.onFailure(transport); + if (this.oldInnerHTML) { + this.element.innerHTML = this.oldInnerHTML; + this.oldInnerHTML = null; + } + return false; + }, + onSubmit: function() { + // onLoading resets these so we need to save them away for the Ajax call + var form = this.form; + var value = this.editField.value; + + // do this first, sometimes the ajax call returns before we get a chance to switch on Saving... + // which means this will actually switch on Saving... *after* we've left edit mode causing Saving... + // to be displayed indefinitely + this.onLoading(); + + if (this.options.evalScripts) { + new Ajax.Request( + this.url, Object.extend({ + parameters: this.options.callback(form, value), + onComplete: this.onComplete.bind(this), + onFailure: this.onFailure.bind(this), + asynchronous:true, + evalScripts:true + }, this.options.ajaxOptions)); + } else { + new Ajax.Updater( + { success: this.element, + // don't update on failure (this could be an option) + failure: null }, + this.url, Object.extend({ + parameters: this.options.callback(form, value), + onComplete: this.onComplete.bind(this), + onFailure: this.onFailure.bind(this) + }, this.options.ajaxOptions)); + } + // stop the event to avoid a page refresh in Safari + if (arguments.length > 1) { + Event.stop(arguments[0]); + } + return false; + }, + onLoading: function() { + this.saving = true; + this.removeForm(); + this.leaveHover(); + this.showSaving(); + }, + showSaving: function() { + this.oldInnerHTML = this.element.innerHTML; + this.element.innerHTML = this.options.savingText; + Element.addClassName(this.element, this.options.savingClassName); + this.element.style.backgroundColor = this.originalBackground; + Element.show(this.element); + }, + removeForm: function() { + if(this.form) { + if (this.form.parentNode) Element.remove(this.form); + this.form = null; + } + }, + enterHover: function() { + if (this.saving) return; + this.element.style.backgroundColor = this.options.highlightcolor; + if (this.effect) { + this.effect.cancel(); + } + Element.addClassName(this.element, this.options.hoverClassName) + }, + leaveHover: function() { + if (this.options.backgroundColor) { + this.element.style.backgroundColor = this.oldBackground; + } + Element.removeClassName(this.element, this.options.hoverClassName) + if (this.saving) return; + this.effect = new Effect.Highlight(this.element, { + startcolor: this.options.highlightcolor, + endcolor: this.options.highlightendcolor, + restorecolor: this.originalBackground + }); + }, + leaveEditMode: function() { + Element.removeClassName(this.element, this.options.savingClassName); + this.removeForm(); + this.leaveHover(); + this.element.style.backgroundColor = this.originalBackground; + Element.show(this.element); + if (this.options.externalControl) { + Element.show(this.options.externalControl); + } + this.editing = false; + this.saving = false; + this.oldInnerHTML = null; + this.onLeaveEditMode(); + }, + onComplete: function(transport) { + this.leaveEditMode(); + this.options.onComplete.bind(this)(transport, this.element); + }, + onEnterEditMode: function() {}, + onLeaveEditMode: function() {}, + dispose: function() { + if (this.oldInnerHTML) { + this.element.innerHTML = this.oldInnerHTML; + } + this.leaveEditMode(); + Event.stopObserving(this.element, 'click', this.onclickListener); + Event.stopObserving(this.element, 'mouseover', this.mouseoverListener); + Event.stopObserving(this.element, 'mouseout', this.mouseoutListener); + if (this.options.externalControl) { + Event.stopObserving(this.options.externalControl, 'click', this.onclickListener); + Event.stopObserving(this.options.externalControl, 'mouseover', this.mouseoverListener); + Event.stopObserving(this.options.externalControl, 'mouseout', this.mouseoutListener); + } + } +}; + +Ajax.InPlaceCollectionEditor = Class.create(); +Object.extend(Ajax.InPlaceCollectionEditor.prototype, Ajax.InPlaceEditor.prototype); +Object.extend(Ajax.InPlaceCollectionEditor.prototype, { + createEditField: function() { + if (!this.cached_selectTag) { + var selectTag = document.createElement("select"); + var collection = this.options.collection || []; + var optionTag; + collection.each(function(e,i) { + optionTag = document.createElement("option"); + optionTag.value = (e instanceof Array) ? e[0] : e; + if((typeof this.options.value == 'undefined') && + ((e instanceof Array) ? this.element.innerHTML == e[1] : e == optionTag.value)) optionTag.selected = true; + if(this.options.value==optionTag.value) optionTag.selected = true; + optionTag.appendChild(document.createTextNode((e instanceof Array) ? e[1] : e)); + selectTag.appendChild(optionTag); + }.bind(this)); + this.cached_selectTag = selectTag; + } + + this.editField = this.cached_selectTag; + if(this.options.loadTextURL) this.loadExternalText(); + this.form.appendChild(this.editField); + this.options.callback = function(form, value) { + return "value=" + encodeURIComponent(value); + } + } +}); + +// Delayed observer, like Form.Element.Observer, +// but waits for delay after last key input +// Ideal for live-search fields + +Form.Element.DelayedObserver = Class.create(); +Form.Element.DelayedObserver.prototype = { + initialize: function(element, delay, callback) { + this.delay = delay || 0.5; + this.element = $(element); + this.callback = callback; + this.timer = null; + this.lastValue = $F(this.element); + Event.observe(this.element,'keyup',this.delayedListener.bindAsEventListener(this)); + }, + delayedListener: function(event) { + if(this.lastValue == $F(this.element)) return; + if(this.timer) clearTimeout(this.timer); + this.timer = setTimeout(this.onTimerEvent.bind(this), this.delay * 1000); + this.lastValue = $F(this.element); + }, + onTimerEvent: function() { + this.timer = null; + this.callback(this.element, $F(this.element)); + } +}; diff --git a/solr/client/ruby/flare/public/javascripts/dragdrop.js b/solr/client/ruby/flare/public/javascripts/dragdrop.js new file mode 100644 index 00000000000..c71ddb82746 --- /dev/null +++ b/solr/client/ruby/flare/public/javascripts/dragdrop.js @@ -0,0 +1,942 @@ +// Copyright (c) 2005, 2006 Thomas Fuchs (http://script.aculo.us, http://mir.aculo.us) +// (c) 2005, 2006 Sammi Williams (http://www.oriontransfer.co.nz, sammi@oriontransfer.co.nz) +// +// script.aculo.us is freely distributable under the terms of an MIT-style license. +// For details, see the script.aculo.us web site: http://script.aculo.us/ + +if(typeof Effect == 'undefined') + throw("dragdrop.js requires including script.aculo.us' effects.js library"); + +var Droppables = { + drops: [], + + remove: function(element) { + this.drops = this.drops.reject(function(d) { return d.element==$(element) }); + }, + + add: function(element) { + element = $(element); + var options = Object.extend({ + greedy: true, + hoverclass: null, + tree: false + }, arguments[1] || {}); + + // cache containers + if(options.containment) { + options._containers = []; + var containment = options.containment; + if((typeof containment == 'object') && + (containment.constructor == Array)) { + containment.each( function(c) { options._containers.push($(c)) }); + } else { + options._containers.push($(containment)); + } + } + + if(options.accept) options.accept = [options.accept].flatten(); + + Element.makePositioned(element); // fix IE + options.element = element; + + this.drops.push(options); + }, + + findDeepestChild: function(drops) { + deepest = drops[0]; + + for (i = 1; i < drops.length; ++i) + if (Element.isParent(drops[i].element, deepest.element)) + deepest = drops[i]; + + return deepest; + }, + + isContained: function(element, drop) { + var containmentNode; + if(drop.tree) { + containmentNode = element.treeNode; + } else { + containmentNode = element.parentNode; + } + return drop._containers.detect(function(c) { return containmentNode == c }); + }, + + isAffected: function(point, element, drop) { + return ( + (drop.element!=element) && + ((!drop._containers) || + this.isContained(element, drop)) && + ((!drop.accept) || + (Element.classNames(element).detect( + function(v) { return drop.accept.include(v) } ) )) && + Position.within(drop.element, point[0], point[1]) ); + }, + + deactivate: function(drop) { + if(drop.hoverclass) + Element.removeClassName(drop.element, drop.hoverclass); + this.last_active = null; + }, + + activate: function(drop) { + if(drop.hoverclass) + Element.addClassName(drop.element, drop.hoverclass); + this.last_active = drop; + }, + + show: function(point, element) { + if(!this.drops.length) return; + var affected = []; + + if(this.last_active) this.deactivate(this.last_active); + this.drops.each( function(drop) { + if(Droppables.isAffected(point, element, drop)) + affected.push(drop); + }); + + if(affected.length>0) { + drop = Droppables.findDeepestChild(affected); + Position.within(drop.element, point[0], point[1]); + if(drop.onHover) + drop.onHover(element, drop.element, Position.overlap(drop.overlap, drop.element)); + + Droppables.activate(drop); + } + }, + + fire: function(event, element) { + if(!this.last_active) return; + Position.prepare(); + + if (this.isAffected([Event.pointerX(event), Event.pointerY(event)], element, this.last_active)) + if (this.last_active.onDrop) + this.last_active.onDrop(element, this.last_active.element, event); + }, + + reset: function() { + if(this.last_active) + this.deactivate(this.last_active); + } +} + +var Draggables = { + drags: [], + observers: [], + + register: function(draggable) { + if(this.drags.length == 0) { + this.eventMouseUp = this.endDrag.bindAsEventListener(this); + this.eventMouseMove = this.updateDrag.bindAsEventListener(this); + this.eventKeypress = this.keyPress.bindAsEventListener(this); + + Event.observe(document, "mouseup", this.eventMouseUp); + Event.observe(document, "mousemove", this.eventMouseMove); + Event.observe(document, "keypress", this.eventKeypress); + } + this.drags.push(draggable); + }, + + unregister: function(draggable) { + this.drags = this.drags.reject(function(d) { return d==draggable }); + if(this.drags.length == 0) { + Event.stopObserving(document, "mouseup", this.eventMouseUp); + Event.stopObserving(document, "mousemove", this.eventMouseMove); + Event.stopObserving(document, "keypress", this.eventKeypress); + } + }, + + activate: function(draggable) { + if(draggable.options.delay) { + this._timeout = setTimeout(function() { + Draggables._timeout = null; + window.focus(); + Draggables.activeDraggable = draggable; + }.bind(this), draggable.options.delay); + } else { + window.focus(); // allows keypress events if window isn't currently focused, fails for Safari + this.activeDraggable = draggable; + } + }, + + deactivate: function() { + this.activeDraggable = null; + }, + + updateDrag: function(event) { + if(!this.activeDraggable) return; + var pointer = [Event.pointerX(event), Event.pointerY(event)]; + // Mozilla-based browsers fire successive mousemove events with + // the same coordinates, prevent needless redrawing (moz bug?) + if(this._lastPointer && (this._lastPointer.inspect() == pointer.inspect())) return; + this._lastPointer = pointer; + + this.activeDraggable.updateDrag(event, pointer); + }, + + endDrag: function(event) { + if(this._timeout) { + clearTimeout(this._timeout); + this._timeout = null; + } + if(!this.activeDraggable) return; + this._lastPointer = null; + this.activeDraggable.endDrag(event); + this.activeDraggable = null; + }, + + keyPress: function(event) { + if(this.activeDraggable) + this.activeDraggable.keyPress(event); + }, + + addObserver: function(observer) { + this.observers.push(observer); + this._cacheObserverCallbacks(); + }, + + removeObserver: function(element) { // element instead of observer fixes mem leaks + this.observers = this.observers.reject( function(o) { return o.element==element }); + this._cacheObserverCallbacks(); + }, + + notify: function(eventName, draggable, event) { // 'onStart', 'onEnd', 'onDrag' + if(this[eventName+'Count'] > 0) + this.observers.each( function(o) { + if(o[eventName]) o[eventName](eventName, draggable, event); + }); + if(draggable.options[eventName]) draggable.options[eventName](draggable, event); + }, + + _cacheObserverCallbacks: function() { + ['onStart','onEnd','onDrag'].each( function(eventName) { + Draggables[eventName+'Count'] = Draggables.observers.select( + function(o) { return o[eventName]; } + ).length; + }); + } +} + +/*--------------------------------------------------------------------------*/ + +var Draggable = Class.create(); +Draggable._dragging = {}; + +Draggable.prototype = { + initialize: function(element) { + var defaults = { + handle: false, + reverteffect: function(element, top_offset, left_offset) { + var dur = Math.sqrt(Math.abs(top_offset^2)+Math.abs(left_offset^2))*0.02; + new Effect.Move(element, { x: -left_offset, y: -top_offset, duration: dur, + queue: {scope:'_draggable', position:'end'} + }); + }, + endeffect: function(element) { + var toOpacity = typeof element._opacity == 'number' ? element._opacity : 1.0; + new Effect.Opacity(element, {duration:0.2, from:0.7, to:toOpacity, + queue: {scope:'_draggable', position:'end'}, + afterFinish: function(){ + Draggable._dragging[element] = false + } + }); + }, + zindex: 1000, + revert: false, + scroll: false, + scrollSensitivity: 20, + scrollSpeed: 15, + snap: false, // false, or xy or [x,y] or function(x,y){ return [x,y] } + delay: 0 + }; + + if(!arguments[1] || typeof arguments[1].endeffect == 'undefined') + Object.extend(defaults, { + starteffect: function(element) { + element._opacity = Element.getOpacity(element); + Draggable._dragging[element] = true; + new Effect.Opacity(element, {duration:0.2, from:element._opacity, to:0.7}); + } + }); + + var options = Object.extend(defaults, arguments[1] || {}); + + this.element = $(element); + + if(options.handle && (typeof options.handle == 'string')) + this.handle = this.element.down('.'+options.handle, 0); + + if(!this.handle) this.handle = $(options.handle); + if(!this.handle) this.handle = this.element; + + if(options.scroll && !options.scroll.scrollTo && !options.scroll.outerHTML) { + options.scroll = $(options.scroll); + this._isScrollChild = Element.childOf(this.element, options.scroll); + } + + Element.makePositioned(this.element); // fix IE + + this.delta = this.currentDelta(); + this.options = options; + this.dragging = false; + + this.eventMouseDown = this.initDrag.bindAsEventListener(this); + Event.observe(this.handle, "mousedown", this.eventMouseDown); + + Draggables.register(this); + }, + + destroy: function() { + Event.stopObserving(this.handle, "mousedown", this.eventMouseDown); + Draggables.unregister(this); + }, + + currentDelta: function() { + return([ + parseInt(Element.getStyle(this.element,'left') || '0'), + parseInt(Element.getStyle(this.element,'top') || '0')]); + }, + + initDrag: function(event) { + if(typeof Draggable._dragging[this.element] != 'undefined' && + Draggable._dragging[this.element]) return; + if(Event.isLeftClick(event)) { + // abort on form elements, fixes a Firefox issue + var src = Event.element(event); + if(src.tagName && ( + src.tagName=='INPUT' || + src.tagName=='SELECT' || + src.tagName=='OPTION' || + src.tagName=='BUTTON' || + src.tagName=='TEXTAREA')) return; + + var pointer = [Event.pointerX(event), Event.pointerY(event)]; + var pos = Position.cumulativeOffset(this.element); + this.offset = [0,1].map( function(i) { return (pointer[i] - pos[i]) }); + + Draggables.activate(this); + Event.stop(event); + } + }, + + startDrag: function(event) { + this.dragging = true; + + if(this.options.zindex) { + this.originalZ = parseInt(Element.getStyle(this.element,'z-index') || 0); + this.element.style.zIndex = this.options.zindex; + } + + if(this.options.ghosting) { + this._clone = this.element.cloneNode(true); + Position.absolutize(this.element); + this.element.parentNode.insertBefore(this._clone, this.element); + } + + if(this.options.scroll) { + if (this.options.scroll == window) { + var where = this._getWindowScroll(this.options.scroll); + this.originalScrollLeft = where.left; + this.originalScrollTop = where.top; + } else { + this.originalScrollLeft = this.options.scroll.scrollLeft; + this.originalScrollTop = this.options.scroll.scrollTop; + } + } + + Draggables.notify('onStart', this, event); + + if(this.options.starteffect) this.options.starteffect(this.element); + }, + + updateDrag: function(event, pointer) { + if(!this.dragging) this.startDrag(event); + Position.prepare(); + Droppables.show(pointer, this.element); + Draggables.notify('onDrag', this, event); + + this.draw(pointer); + if(this.options.change) this.options.change(this); + + if(this.options.scroll) { + this.stopScrolling(); + + var p; + if (this.options.scroll == window) { + with(this._getWindowScroll(this.options.scroll)) { p = [ left, top, left+width, top+height ]; } + } else { + p = Position.page(this.options.scroll); + p[0] += this.options.scroll.scrollLeft + Position.deltaX; + p[1] += this.options.scroll.scrollTop + Position.deltaY; + p.push(p[0]+this.options.scroll.offsetWidth); + p.push(p[1]+this.options.scroll.offsetHeight); + } + var speed = [0,0]; + if(pointer[0] < (p[0]+this.options.scrollSensitivity)) speed[0] = pointer[0]-(p[0]+this.options.scrollSensitivity); + if(pointer[1] < (p[1]+this.options.scrollSensitivity)) speed[1] = pointer[1]-(p[1]+this.options.scrollSensitivity); + if(pointer[0] > (p[2]-this.options.scrollSensitivity)) speed[0] = pointer[0]-(p[2]-this.options.scrollSensitivity); + if(pointer[1] > (p[3]-this.options.scrollSensitivity)) speed[1] = pointer[1]-(p[3]-this.options.scrollSensitivity); + this.startScrolling(speed); + } + + // fix AppleWebKit rendering + if(navigator.appVersion.indexOf('AppleWebKit')>0) window.scrollBy(0,0); + + Event.stop(event); + }, + + finishDrag: function(event, success) { + this.dragging = false; + + if(this.options.ghosting) { + Position.relativize(this.element); + Element.remove(this._clone); + this._clone = null; + } + + if(success) Droppables.fire(event, this.element); + Draggables.notify('onEnd', this, event); + + var revert = this.options.revert; + if(revert && typeof revert == 'function') revert = revert(this.element); + + var d = this.currentDelta(); + if(revert && this.options.reverteffect) { + this.options.reverteffect(this.element, + d[1]-this.delta[1], d[0]-this.delta[0]); + } else { + this.delta = d; + } + + if(this.options.zindex) + this.element.style.zIndex = this.originalZ; + + if(this.options.endeffect) + this.options.endeffect(this.element); + + Draggables.deactivate(this); + Droppables.reset(); + }, + + keyPress: function(event) { + if(event.keyCode!=Event.KEY_ESC) return; + this.finishDrag(event, false); + Event.stop(event); + }, + + endDrag: function(event) { + if(!this.dragging) return; + this.stopScrolling(); + this.finishDrag(event, true); + Event.stop(event); + }, + + draw: function(point) { + var pos = Position.cumulativeOffset(this.element); + if(this.options.ghosting) { + var r = Position.realOffset(this.element); + pos[0] += r[0] - Position.deltaX; pos[1] += r[1] - Position.deltaY; + } + + var d = this.currentDelta(); + pos[0] -= d[0]; pos[1] -= d[1]; + + if(this.options.scroll && (this.options.scroll != window && this._isScrollChild)) { + pos[0] -= this.options.scroll.scrollLeft-this.originalScrollLeft; + pos[1] -= this.options.scroll.scrollTop-this.originalScrollTop; + } + + var p = [0,1].map(function(i){ + return (point[i]-pos[i]-this.offset[i]) + }.bind(this)); + + if(this.options.snap) { + if(typeof this.options.snap == 'function') { + p = this.options.snap(p[0],p[1],this); + } else { + if(this.options.snap instanceof Array) { + p = p.map( function(v, i) { + return Math.round(v/this.options.snap[i])*this.options.snap[i] }.bind(this)) + } else { + p = p.map( function(v) { + return Math.round(v/this.options.snap)*this.options.snap }.bind(this)) + } + }} + + var style = this.element.style; + if((!this.options.constraint) || (this.options.constraint=='horizontal')) + style.left = p[0] + "px"; + if((!this.options.constraint) || (this.options.constraint=='vertical')) + style.top = p[1] + "px"; + + if(style.visibility=="hidden") style.visibility = ""; // fix gecko rendering + }, + + stopScrolling: function() { + if(this.scrollInterval) { + clearInterval(this.scrollInterval); + this.scrollInterval = null; + Draggables._lastScrollPointer = null; + } + }, + + startScrolling: function(speed) { + if(!(speed[0] || speed[1])) return; + this.scrollSpeed = [speed[0]*this.options.scrollSpeed,speed[1]*this.options.scrollSpeed]; + this.lastScrolled = new Date(); + this.scrollInterval = setInterval(this.scroll.bind(this), 10); + }, + + scroll: function() { + var current = new Date(); + var delta = current - this.lastScrolled; + this.lastScrolled = current; + if(this.options.scroll == window) { + with (this._getWindowScroll(this.options.scroll)) { + if (this.scrollSpeed[0] || this.scrollSpeed[1]) { + var d = delta / 1000; + this.options.scroll.scrollTo( left + d*this.scrollSpeed[0], top + d*this.scrollSpeed[1] ); + } + } + } else { + this.options.scroll.scrollLeft += this.scrollSpeed[0] * delta / 1000; + this.options.scroll.scrollTop += this.scrollSpeed[1] * delta / 1000; + } + + Position.prepare(); + Droppables.show(Draggables._lastPointer, this.element); + Draggables.notify('onDrag', this); + if (this._isScrollChild) { + Draggables._lastScrollPointer = Draggables._lastScrollPointer || $A(Draggables._lastPointer); + Draggables._lastScrollPointer[0] += this.scrollSpeed[0] * delta / 1000; + Draggables._lastScrollPointer[1] += this.scrollSpeed[1] * delta / 1000; + if (Draggables._lastScrollPointer[0] < 0) + Draggables._lastScrollPointer[0] = 0; + if (Draggables._lastScrollPointer[1] < 0) + Draggables._lastScrollPointer[1] = 0; + this.draw(Draggables._lastScrollPointer); + } + + if(this.options.change) this.options.change(this); + }, + + _getWindowScroll: function(w) { + var T, L, W, H; + with (w.document) { + if (w.document.documentElement && documentElement.scrollTop) { + T = documentElement.scrollTop; + L = documentElement.scrollLeft; + } else if (w.document.body) { + T = body.scrollTop; + L = body.scrollLeft; + } + if (w.innerWidth) { + W = w.innerWidth; + H = w.innerHeight; + } else if (w.document.documentElement && documentElement.clientWidth) { + W = documentElement.clientWidth; + H = documentElement.clientHeight; + } else { + W = body.offsetWidth; + H = body.offsetHeight + } + } + return { top: T, left: L, width: W, height: H }; + } +} + +/*--------------------------------------------------------------------------*/ + +var SortableObserver = Class.create(); +SortableObserver.prototype = { + initialize: function(element, observer) { + this.element = $(element); + this.observer = observer; + this.lastValue = Sortable.serialize(this.element); + }, + + onStart: function() { + this.lastValue = Sortable.serialize(this.element); + }, + + onEnd: function() { + Sortable.unmark(); + if(this.lastValue != Sortable.serialize(this.element)) + this.observer(this.element) + } +} + +var Sortable = { + SERIALIZE_RULE: /^[^_\-](?:[A-Za-z0-9\-\_]*)[_](.*)$/, + + sortables: {}, + + _findRootElement: function(element) { + while (element.tagName != "BODY") { + if(element.id && Sortable.sortables[element.id]) return element; + element = element.parentNode; + } + }, + + options: function(element) { + element = Sortable._findRootElement($(element)); + if(!element) return; + return Sortable.sortables[element.id]; + }, + + destroy: function(element){ + var s = Sortable.options(element); + + if(s) { + Draggables.removeObserver(s.element); + s.droppables.each(function(d){ Droppables.remove(d) }); + s.draggables.invoke('destroy'); + + delete Sortable.sortables[s.element.id]; + } + }, + + create: function(element) { + element = $(element); + var options = Object.extend({ + element: element, + tag: 'li', // assumes li children, override with tag: 'tagname' + dropOnEmpty: false, + tree: false, + treeTag: 'ul', + overlap: 'vertical', // one of 'vertical', 'horizontal' + constraint: 'vertical', // one of 'vertical', 'horizontal', false + containment: element, // also takes array of elements (or id's); or false + handle: false, // or a CSS class + only: false, + delay: 0, + hoverclass: null, + ghosting: false, + scroll: false, + scrollSensitivity: 20, + scrollSpeed: 15, + format: this.SERIALIZE_RULE, + onChange: Prototype.emptyFunction, + onUpdate: Prototype.emptyFunction + }, arguments[1] || {}); + + // clear any old sortable with same element + this.destroy(element); + + // build options for the draggables + var options_for_draggable = { + revert: true, + scroll: options.scroll, + scrollSpeed: options.scrollSpeed, + scrollSensitivity: options.scrollSensitivity, + delay: options.delay, + ghosting: options.ghosting, + constraint: options.constraint, + handle: options.handle }; + + if(options.starteffect) + options_for_draggable.starteffect = options.starteffect; + + if(options.reverteffect) + options_for_draggable.reverteffect = options.reverteffect; + else + if(options.ghosting) options_for_draggable.reverteffect = function(element) { + element.style.top = 0; + element.style.left = 0; + }; + + if(options.endeffect) + options_for_draggable.endeffect = options.endeffect; + + if(options.zindex) + options_for_draggable.zindex = options.zindex; + + // build options for the droppables + var options_for_droppable = { + overlap: options.overlap, + containment: options.containment, + tree: options.tree, + hoverclass: options.hoverclass, + onHover: Sortable.onHover + } + + var options_for_tree = { + onHover: Sortable.onEmptyHover, + overlap: options.overlap, + containment: options.containment, + hoverclass: options.hoverclass + } + + // fix for gecko engine + Element.cleanWhitespace(element); + + options.draggables = []; + options.droppables = []; + + // drop on empty handling + if(options.dropOnEmpty || options.tree) { + Droppables.add(element, options_for_tree); + options.droppables.push(element); + } + + (this.findElements(element, options) || []).each( function(e) { + // handles are per-draggable + var handle = options.handle ? + $(e).down('.'+options.handle,0) : e; + options.draggables.push( + new Draggable(e, Object.extend(options_for_draggable, { handle: handle }))); + Droppables.add(e, options_for_droppable); + if(options.tree) e.treeNode = element; + options.droppables.push(e); + }); + + if(options.tree) { + (Sortable.findTreeElements(element, options) || []).each( function(e) { + Droppables.add(e, options_for_tree); + e.treeNode = element; + options.droppables.push(e); + }); + } + + // keep reference + this.sortables[element.id] = options; + + // for onupdate + Draggables.addObserver(new SortableObserver(element, options.onUpdate)); + + }, + + // return all suitable-for-sortable elements in a guaranteed order + findElements: function(element, options) { + return Element.findChildren( + element, options.only, options.tree ? true : false, options.tag); + }, + + findTreeElements: function(element, options) { + return Element.findChildren( + element, options.only, options.tree ? true : false, options.treeTag); + }, + + onHover: function(element, dropon, overlap) { + if(Element.isParent(dropon, element)) return; + + if(overlap > .33 && overlap < .66 && Sortable.options(dropon).tree) { + return; + } else if(overlap>0.5) { + Sortable.mark(dropon, 'before'); + if(dropon.previousSibling != element) { + var oldParentNode = element.parentNode; + element.style.visibility = "hidden"; // fix gecko rendering + dropon.parentNode.insertBefore(element, dropon); + if(dropon.parentNode!=oldParentNode) + Sortable.options(oldParentNode).onChange(element); + Sortable.options(dropon.parentNode).onChange(element); + } + } else { + Sortable.mark(dropon, 'after'); + var nextElement = dropon.nextSibling || null; + if(nextElement != element) { + var oldParentNode = element.parentNode; + element.style.visibility = "hidden"; // fix gecko rendering + dropon.parentNode.insertBefore(element, nextElement); + if(dropon.parentNode!=oldParentNode) + Sortable.options(oldParentNode).onChange(element); + Sortable.options(dropon.parentNode).onChange(element); + } + } + }, + + onEmptyHover: function(element, dropon, overlap) { + var oldParentNode = element.parentNode; + var droponOptions = Sortable.options(dropon); + + if(!Element.isParent(dropon, element)) { + var index; + + var children = Sortable.findElements(dropon, {tag: droponOptions.tag, only: droponOptions.only}); + var child = null; + + if(children) { + var offset = Element.offsetSize(dropon, droponOptions.overlap) * (1.0 - overlap); + + for (index = 0; index < children.length; index += 1) { + if (offset - Element.offsetSize (children[index], droponOptions.overlap) >= 0) { + offset -= Element.offsetSize (children[index], droponOptions.overlap); + } else if (offset - (Element.offsetSize (children[index], droponOptions.overlap) / 2) >= 0) { + child = index + 1 < children.length ? children[index + 1] : null; + break; + } else { + child = children[index]; + break; + } + } + } + + dropon.insertBefore(element, child); + + Sortable.options(oldParentNode).onChange(element); + droponOptions.onChange(element); + } + }, + + unmark: function() { + if(Sortable._marker) Sortable._marker.hide(); + }, + + mark: function(dropon, position) { + // mark on ghosting only + var sortable = Sortable.options(dropon.parentNode); + if(sortable && !sortable.ghosting) return; + + if(!Sortable._marker) { + Sortable._marker = + ($('dropmarker') || Element.extend(document.createElement('DIV'))). + hide().addClassName('dropmarker').setStyle({position:'absolute'}); + document.getElementsByTagName("body").item(0).appendChild(Sortable._marker); + } + var offsets = Position.cumulativeOffset(dropon); + Sortable._marker.setStyle({left: offsets[0]+'px', top: offsets[1] + 'px'}); + + if(position=='after') + if(sortable.overlap == 'horizontal') + Sortable._marker.setStyle({left: (offsets[0]+dropon.clientWidth) + 'px'}); + else + Sortable._marker.setStyle({top: (offsets[1]+dropon.clientHeight) + 'px'}); + + Sortable._marker.show(); + }, + + _tree: function(element, options, parent) { + var children = Sortable.findElements(element, options) || []; + + for (var i = 0; i < children.length; ++i) { + var match = children[i].id.match(options.format); + + if (!match) continue; + + var child = { + id: encodeURIComponent(match ? match[1] : null), + element: element, + parent: parent, + children: [], + position: parent.children.length, + container: $(children[i]).down(options.treeTag) + } + + /* Get the element containing the children and recurse over it */ + if (child.container) + this._tree(child.container, options, child) + + parent.children.push (child); + } + + return parent; + }, + + tree: function(element) { + element = $(element); + var sortableOptions = this.options(element); + var options = Object.extend({ + tag: sortableOptions.tag, + treeTag: sortableOptions.treeTag, + only: sortableOptions.only, + name: element.id, + format: sortableOptions.format + }, arguments[1] || {}); + + var root = { + id: null, + parent: null, + children: [], + container: element, + position: 0 + } + + return Sortable._tree(element, options, root); + }, + + /* Construct a [i] index for a particular node */ + _constructIndex: function(node) { + var index = ''; + do { + if (node.id) index = '[' + node.position + ']' + index; + } while ((node = node.parent) != null); + return index; + }, + + sequence: function(element) { + element = $(element); + var options = Object.extend(this.options(element), arguments[1] || {}); + + return $(this.findElements(element, options) || []).map( function(item) { + return item.id.match(options.format) ? item.id.match(options.format)[1] : ''; + }); + }, + + setSequence: function(element, new_sequence) { + element = $(element); + var options = Object.extend(this.options(element), arguments[2] || {}); + + var nodeMap = {}; + this.findElements(element, options).each( function(n) { + if (n.id.match(options.format)) + nodeMap[n.id.match(options.format)[1]] = [n, n.parentNode]; + n.parentNode.removeChild(n); + }); + + new_sequence.each(function(ident) { + var n = nodeMap[ident]; + if (n) { + n[1].appendChild(n[0]); + delete nodeMap[ident]; + } + }); + }, + + serialize: function(element) { + element = $(element); + var options = Object.extend(Sortable.options(element), arguments[1] || {}); + var name = encodeURIComponent( + (arguments[1] && arguments[1].name) ? arguments[1].name : element.id); + + if (options.tree) { + return Sortable.tree(element, arguments[1]).children.map( function (item) { + return [name + Sortable._constructIndex(item) + "[id]=" + + encodeURIComponent(item.id)].concat(item.children.map(arguments.callee)); + }).flatten().join('&'); + } else { + return Sortable.sequence(element, arguments[1]).map( function(item) { + return name + "[]=" + encodeURIComponent(item); + }).join('&'); + } + } +} + +// Returns true if child is contained within element +Element.isParent = function(child, element) { + if (!child.parentNode || child == element) return false; + if (child.parentNode == element) return true; + return Element.isParent(child.parentNode, element); +} + +Element.findChildren = function(element, only, recursive, tagName) { + if(!element.hasChildNodes()) return null; + tagName = tagName.toUpperCase(); + if(only) only = [only].flatten(); + var elements = []; + $A(element.childNodes).each( function(e) { + if(e.tagName && e.tagName.toUpperCase()==tagName && + (!only || (Element.classNames(e).detect(function(v) { return only.include(v) })))) + elements.push(e); + if(recursive) { + var grandchildren = Element.findChildren(e, only, recursive, tagName); + if(grandchildren) elements.push(grandchildren); + } + }); + + return (elements.length>0 ? elements.flatten() : []); +} + +Element.offsetSize = function (element, type) { + return element['offset' + ((type=='vertical' || type=='height') ? 'Height' : 'Width')]; +} diff --git a/solr/client/ruby/flare/public/javascripts/effects.js b/solr/client/ruby/flare/public/javascripts/effects.js new file mode 100644 index 00000000000..3b02eda2b29 --- /dev/null +++ b/solr/client/ruby/flare/public/javascripts/effects.js @@ -0,0 +1,1088 @@ +// Copyright (c) 2005, 2006 Thomas Fuchs (http://script.aculo.us, http://mir.aculo.us) +// Contributors: +// Justin Palmer (http://encytemedia.com/) +// Mark Pilgrim (http://diveintomark.org/) +// Martin Bialasinki +// +// script.aculo.us is freely distributable under the terms of an MIT-style license. +// For details, see the script.aculo.us web site: http://script.aculo.us/ + +// converts rgb() and #xxx to #xxxxxx format, +// returns self (or first argument) if not convertable +String.prototype.parseColor = function() { + var color = '#'; + if(this.slice(0,4) == 'rgb(') { + var cols = this.slice(4,this.length-1).split(','); + var i=0; do { color += parseInt(cols[i]).toColorPart() } while (++i<3); + } else { + if(this.slice(0,1) == '#') { + if(this.length==4) for(var i=1;i<4;i++) color += (this.charAt(i) + this.charAt(i)).toLowerCase(); + if(this.length==7) color = this.toLowerCase(); + } + } + return(color.length==7 ? color : (arguments[0] || this)); +} + +/*--------------------------------------------------------------------------*/ + +Element.collectTextNodes = function(element) { + return $A($(element).childNodes).collect( function(node) { + return (node.nodeType==3 ? node.nodeValue : + (node.hasChildNodes() ? Element.collectTextNodes(node) : '')); + }).flatten().join(''); +} + +Element.collectTextNodesIgnoreClass = function(element, className) { + return $A($(element).childNodes).collect( function(node) { + return (node.nodeType==3 ? node.nodeValue : + ((node.hasChildNodes() && !Element.hasClassName(node,className)) ? + Element.collectTextNodesIgnoreClass(node, className) : '')); + }).flatten().join(''); +} + +Element.setContentZoom = function(element, percent) { + element = $(element); + element.setStyle({fontSize: (percent/100) + 'em'}); + if(navigator.appVersion.indexOf('AppleWebKit')>0) window.scrollBy(0,0); + return element; +} + +Element.getOpacity = function(element){ + element = $(element); + var opacity; + if (opacity = element.getStyle('opacity')) + return parseFloat(opacity); + if (opacity = (element.getStyle('filter') || '').match(/alpha\(opacity=(.*)\)/)) + if(opacity[1]) return parseFloat(opacity[1]) / 100; + return 1.0; +} + +Element.setOpacity = function(element, value){ + element= $(element); + if (value == 1){ + element.setStyle({ opacity: + (/Gecko/.test(navigator.userAgent) && !/Konqueror|Safari|KHTML/.test(navigator.userAgent)) ? + 0.999999 : 1.0 }); + if(/MSIE/.test(navigator.userAgent) && !window.opera) + element.setStyle({filter: Element.getStyle(element,'filter').replace(/alpha\([^\)]*\)/gi,'')}); + } else { + if(value < 0.00001) value = 0; + element.setStyle({opacity: value}); + if(/MSIE/.test(navigator.userAgent) && !window.opera) + element.setStyle( + { filter: element.getStyle('filter').replace(/alpha\([^\)]*\)/gi,'') + + 'alpha(opacity='+value*100+')' }); + } + return element; +} + +Element.getInlineOpacity = function(element){ + return $(element).style.opacity || ''; +} + +Element.forceRerendering = function(element) { + try { + element = $(element); + var n = document.createTextNode(' '); + element.appendChild(n); + element.removeChild(n); + } catch(e) { } +}; + +/*--------------------------------------------------------------------------*/ + +Array.prototype.call = function() { + var args = arguments; + this.each(function(f){ f.apply(this, args) }); +} + +/*--------------------------------------------------------------------------*/ + +var Effect = { + _elementDoesNotExistError: { + name: 'ElementDoesNotExistError', + message: 'The specified DOM element does not exist, but is required for this effect to operate' + }, + tagifyText: function(element) { + if(typeof Builder == 'undefined') + throw("Effect.tagifyText requires including script.aculo.us' builder.js library"); + + var tagifyStyle = 'position:relative'; + if(/MSIE/.test(navigator.userAgent) && !window.opera) tagifyStyle += ';zoom:1'; + + element = $(element); + $A(element.childNodes).each( function(child) { + if(child.nodeType==3) { + child.nodeValue.toArray().each( function(character) { + element.insertBefore( + Builder.node('span',{style: tagifyStyle}, + character == ' ' ? String.fromCharCode(160) : character), + child); + }); + Element.remove(child); + } + }); + }, + multiple: function(element, effect) { + var elements; + if(((typeof element == 'object') || + (typeof element == 'function')) && + (element.length)) + elements = element; + else + elements = $(element).childNodes; + + var options = Object.extend({ + speed: 0.1, + delay: 0.0 + }, arguments[2] || {}); + var masterDelay = options.delay; + + $A(elements).each( function(element, index) { + new effect(element, Object.extend(options, { delay: index * options.speed + masterDelay })); + }); + }, + PAIRS: { + 'slide': ['SlideDown','SlideUp'], + 'blind': ['BlindDown','BlindUp'], + 'appear': ['Appear','Fade'] + }, + toggle: function(element, effect) { + element = $(element); + effect = (effect || 'appear').toLowerCase(); + var options = Object.extend({ + queue: { position:'end', scope:(element.id || 'global'), limit: 1 } + }, arguments[2] || {}); + Effect[element.visible() ? + Effect.PAIRS[effect][1] : Effect.PAIRS[effect][0]](element, options); + } +}; + +var Effect2 = Effect; // deprecated + +/* ------------- transitions ------------- */ + +Effect.Transitions = { + linear: Prototype.K, + sinoidal: function(pos) { + return (-Math.cos(pos*Math.PI)/2) + 0.5; + }, + reverse: function(pos) { + return 1-pos; + }, + flicker: function(pos) { + return ((-Math.cos(pos*Math.PI)/4) + 0.75) + Math.random()/4; + }, + wobble: function(pos) { + return (-Math.cos(pos*Math.PI*(9*pos))/2) + 0.5; + }, + pulse: function(pos, pulses) { + pulses = pulses || 5; + return ( + Math.round((pos % (1/pulses)) * pulses) == 0 ? + ((pos * pulses * 2) - Math.floor(pos * pulses * 2)) : + 1 - ((pos * pulses * 2) - Math.floor(pos * pulses * 2)) + ); + }, + none: function(pos) { + return 0; + }, + full: function(pos) { + return 1; + } +}; + +/* ------------- core effects ------------- */ + +Effect.ScopedQueue = Class.create(); +Object.extend(Object.extend(Effect.ScopedQueue.prototype, Enumerable), { + initialize: function() { + this.effects = []; + this.interval = null; + }, + _each: function(iterator) { + this.effects._each(iterator); + }, + add: function(effect) { + var timestamp = new Date().getTime(); + + var position = (typeof effect.options.queue == 'string') ? + effect.options.queue : effect.options.queue.position; + + switch(position) { + case 'front': + // move unstarted effects after this effect + this.effects.findAll(function(e){ return e.state=='idle' }).each( function(e) { + e.startOn += effect.finishOn; + e.finishOn += effect.finishOn; + }); + break; + case 'with-last': + timestamp = this.effects.pluck('startOn').max() || timestamp; + break; + case 'end': + // start effect after last queued effect has finished + timestamp = this.effects.pluck('finishOn').max() || timestamp; + break; + } + + effect.startOn += timestamp; + effect.finishOn += timestamp; + + if(!effect.options.queue.limit || (this.effects.length < effect.options.queue.limit)) + this.effects.push(effect); + + if(!this.interval) + this.interval = setInterval(this.loop.bind(this), 40); + }, + remove: function(effect) { + this.effects = this.effects.reject(function(e) { return e==effect }); + if(this.effects.length == 0) { + clearInterval(this.interval); + this.interval = null; + } + }, + loop: function() { + var timePos = new Date().getTime(); + this.effects.invoke('loop', timePos); + } +}); + +Effect.Queues = { + instances: $H(), + get: function(queueName) { + if(typeof queueName != 'string') return queueName; + + if(!this.instances[queueName]) + this.instances[queueName] = new Effect.ScopedQueue(); + + return this.instances[queueName]; + } +} +Effect.Queue = Effect.Queues.get('global'); + +Effect.DefaultOptions = { + transition: Effect.Transitions.sinoidal, + duration: 1.0, // seconds + fps: 25.0, // max. 25fps due to Effect.Queue implementation + sync: false, // true for combining + from: 0.0, + to: 1.0, + delay: 0.0, + queue: 'parallel' +} + +Effect.Base = function() {}; +Effect.Base.prototype = { + position: null, + start: function(options) { + this.options = Object.extend(Object.extend({},Effect.DefaultOptions), options || {}); + this.currentFrame = 0; + this.state = 'idle'; + this.startOn = this.options.delay*1000; + this.finishOn = this.startOn + (this.options.duration*1000); + this.event('beforeStart'); + if(!this.options.sync) + Effect.Queues.get(typeof this.options.queue == 'string' ? + 'global' : this.options.queue.scope).add(this); + }, + loop: function(timePos) { + if(timePos >= this.startOn) { + if(timePos >= this.finishOn) { + this.render(1.0); + this.cancel(); + this.event('beforeFinish'); + if(this.finish) this.finish(); + this.event('afterFinish'); + return; + } + var pos = (timePos - this.startOn) / (this.finishOn - this.startOn); + var frame = Math.round(pos * this.options.fps * this.options.duration); + if(frame > this.currentFrame) { + this.render(pos); + this.currentFrame = frame; + } + } + }, + render: function(pos) { + if(this.state == 'idle') { + this.state = 'running'; + this.event('beforeSetup'); + if(this.setup) this.setup(); + this.event('afterSetup'); + } + if(this.state == 'running') { + if(this.options.transition) pos = this.options.transition(pos); + pos *= (this.options.to-this.options.from); + pos += this.options.from; + this.position = pos; + this.event('beforeUpdate'); + if(this.update) this.update(pos); + this.event('afterUpdate'); + } + }, + cancel: function() { + if(!this.options.sync) + Effect.Queues.get(typeof this.options.queue == 'string' ? + 'global' : this.options.queue.scope).remove(this); + this.state = 'finished'; + }, + event: function(eventName) { + if(this.options[eventName + 'Internal']) this.options[eventName + 'Internal'](this); + if(this.options[eventName]) this.options[eventName](this); + }, + inspect: function() { + return '#'; + } +} + +Effect.Parallel = Class.create(); +Object.extend(Object.extend(Effect.Parallel.prototype, Effect.Base.prototype), { + initialize: function(effects) { + this.effects = effects || []; + this.start(arguments[1]); + }, + update: function(position) { + this.effects.invoke('render', position); + }, + finish: function(position) { + this.effects.each( function(effect) { + effect.render(1.0); + effect.cancel(); + effect.event('beforeFinish'); + if(effect.finish) effect.finish(position); + effect.event('afterFinish'); + }); + } +}); + +Effect.Event = Class.create(); +Object.extend(Object.extend(Effect.Event.prototype, Effect.Base.prototype), { + initialize: function() { + var options = Object.extend({ + duration: 0 + }, arguments[0] || {}); + this.start(options); + }, + update: Prototype.emptyFunction +}); + +Effect.Opacity = Class.create(); +Object.extend(Object.extend(Effect.Opacity.prototype, Effect.Base.prototype), { + initialize: function(element) { + this.element = $(element); + if(!this.element) throw(Effect._elementDoesNotExistError); + // make this work on IE on elements without 'layout' + if(/MSIE/.test(navigator.userAgent) && !window.opera && (!this.element.currentStyle.hasLayout)) + this.element.setStyle({zoom: 1}); + var options = Object.extend({ + from: this.element.getOpacity() || 0.0, + to: 1.0 + }, arguments[1] || {}); + this.start(options); + }, + update: function(position) { + this.element.setOpacity(position); + } +}); + +Effect.Move = Class.create(); +Object.extend(Object.extend(Effect.Move.prototype, Effect.Base.prototype), { + initialize: function(element) { + this.element = $(element); + if(!this.element) throw(Effect._elementDoesNotExistError); + var options = Object.extend({ + x: 0, + y: 0, + mode: 'relative' + }, arguments[1] || {}); + this.start(options); + }, + setup: function() { + // Bug in Opera: Opera returns the "real" position of a static element or + // relative element that does not have top/left explicitly set. + // ==> Always set top and left for position relative elements in your stylesheets + // (to 0 if you do not need them) + this.element.makePositioned(); + this.originalLeft = parseFloat(this.element.getStyle('left') || '0'); + this.originalTop = parseFloat(this.element.getStyle('top') || '0'); + if(this.options.mode == 'absolute') { + // absolute movement, so we need to calc deltaX and deltaY + this.options.x = this.options.x - this.originalLeft; + this.options.y = this.options.y - this.originalTop; + } + }, + update: function(position) { + this.element.setStyle({ + left: Math.round(this.options.x * position + this.originalLeft) + 'px', + top: Math.round(this.options.y * position + this.originalTop) + 'px' + }); + } +}); + +// for backwards compatibility +Effect.MoveBy = function(element, toTop, toLeft) { + return new Effect.Move(element, + Object.extend({ x: toLeft, y: toTop }, arguments[3] || {})); +}; + +Effect.Scale = Class.create(); +Object.extend(Object.extend(Effect.Scale.prototype, Effect.Base.prototype), { + initialize: function(element, percent) { + this.element = $(element); + if(!this.element) throw(Effect._elementDoesNotExistError); + var options = Object.extend({ + scaleX: true, + scaleY: true, + scaleContent: true, + scaleFromCenter: false, + scaleMode: 'box', // 'box' or 'contents' or {} with provided values + scaleFrom: 100.0, + scaleTo: percent + }, arguments[2] || {}); + this.start(options); + }, + setup: function() { + this.restoreAfterFinish = this.options.restoreAfterFinish || false; + this.elementPositioning = this.element.getStyle('position'); + + this.originalStyle = {}; + ['top','left','width','height','fontSize'].each( function(k) { + this.originalStyle[k] = this.element.style[k]; + }.bind(this)); + + this.originalTop = this.element.offsetTop; + this.originalLeft = this.element.offsetLeft; + + var fontSize = this.element.getStyle('font-size') || '100%'; + ['em','px','%','pt'].each( function(fontSizeType) { + if(fontSize.indexOf(fontSizeType)>0) { + this.fontSize = parseFloat(fontSize); + this.fontSizeType = fontSizeType; + } + }.bind(this)); + + this.factor = (this.options.scaleTo - this.options.scaleFrom)/100; + + this.dims = null; + if(this.options.scaleMode=='box') + this.dims = [this.element.offsetHeight, this.element.offsetWidth]; + if(/^content/.test(this.options.scaleMode)) + this.dims = [this.element.scrollHeight, this.element.scrollWidth]; + if(!this.dims) + this.dims = [this.options.scaleMode.originalHeight, + this.options.scaleMode.originalWidth]; + }, + update: function(position) { + var currentScale = (this.options.scaleFrom/100.0) + (this.factor * position); + if(this.options.scaleContent && this.fontSize) + this.element.setStyle({fontSize: this.fontSize * currentScale + this.fontSizeType }); + this.setDimensions(this.dims[0] * currentScale, this.dims[1] * currentScale); + }, + finish: function(position) { + if(this.restoreAfterFinish) this.element.setStyle(this.originalStyle); + }, + setDimensions: function(height, width) { + var d = {}; + if(this.options.scaleX) d.width = Math.round(width) + 'px'; + if(this.options.scaleY) d.height = Math.round(height) + 'px'; + if(this.options.scaleFromCenter) { + var topd = (height - this.dims[0])/2; + var leftd = (width - this.dims[1])/2; + if(this.elementPositioning == 'absolute') { + if(this.options.scaleY) d.top = this.originalTop-topd + 'px'; + if(this.options.scaleX) d.left = this.originalLeft-leftd + 'px'; + } else { + if(this.options.scaleY) d.top = -topd + 'px'; + if(this.options.scaleX) d.left = -leftd + 'px'; + } + } + this.element.setStyle(d); + } +}); + +Effect.Highlight = Class.create(); +Object.extend(Object.extend(Effect.Highlight.prototype, Effect.Base.prototype), { + initialize: function(element) { + this.element = $(element); + if(!this.element) throw(Effect._elementDoesNotExistError); + var options = Object.extend({ startcolor: '#ffff99' }, arguments[1] || {}); + this.start(options); + }, + setup: function() { + // Prevent executing on elements not in the layout flow + if(this.element.getStyle('display')=='none') { this.cancel(); return; } + // Disable background image during the effect + this.oldStyle = { + backgroundImage: this.element.getStyle('background-image') }; + this.element.setStyle({backgroundImage: 'none'}); + if(!this.options.endcolor) + this.options.endcolor = this.element.getStyle('background-color').parseColor('#ffffff'); + if(!this.options.restorecolor) + this.options.restorecolor = this.element.getStyle('background-color'); + // init color calculations + this._base = $R(0,2).map(function(i){ return parseInt(this.options.startcolor.slice(i*2+1,i*2+3),16) }.bind(this)); + this._delta = $R(0,2).map(function(i){ return parseInt(this.options.endcolor.slice(i*2+1,i*2+3),16)-this._base[i] }.bind(this)); + }, + update: function(position) { + this.element.setStyle({backgroundColor: $R(0,2).inject('#',function(m,v,i){ + return m+(Math.round(this._base[i]+(this._delta[i]*position)).toColorPart()); }.bind(this)) }); + }, + finish: function() { + this.element.setStyle(Object.extend(this.oldStyle, { + backgroundColor: this.options.restorecolor + })); + } +}); + +Effect.ScrollTo = Class.create(); +Object.extend(Object.extend(Effect.ScrollTo.prototype, Effect.Base.prototype), { + initialize: function(element) { + this.element = $(element); + this.start(arguments[1] || {}); + }, + setup: function() { + Position.prepare(); + var offsets = Position.cumulativeOffset(this.element); + if(this.options.offset) offsets[1] += this.options.offset; + var max = window.innerHeight ? + window.height - window.innerHeight : + document.body.scrollHeight - + (document.documentElement.clientHeight ? + document.documentElement.clientHeight : document.body.clientHeight); + this.scrollStart = Position.deltaY; + this.delta = (offsets[1] > max ? max : offsets[1]) - this.scrollStart; + }, + update: function(position) { + Position.prepare(); + window.scrollTo(Position.deltaX, + this.scrollStart + (position*this.delta)); + } +}); + +/* ------------- combination effects ------------- */ + +Effect.Fade = function(element) { + element = $(element); + var oldOpacity = element.getInlineOpacity(); + var options = Object.extend({ + from: element.getOpacity() || 1.0, + to: 0.0, + afterFinishInternal: function(effect) { + if(effect.options.to!=0) return; + effect.element.hide().setStyle({opacity: oldOpacity}); + }}, arguments[1] || {}); + return new Effect.Opacity(element,options); +} + +Effect.Appear = function(element) { + element = $(element); + var options = Object.extend({ + from: (element.getStyle('display') == 'none' ? 0.0 : element.getOpacity() || 0.0), + to: 1.0, + // force Safari to render floated elements properly + afterFinishInternal: function(effect) { + effect.element.forceRerendering(); + }, + beforeSetup: function(effect) { + effect.element.setOpacity(effect.options.from).show(); + }}, arguments[1] || {}); + return new Effect.Opacity(element,options); +} + +Effect.Puff = function(element) { + element = $(element); + var oldStyle = { + opacity: element.getInlineOpacity(), + position: element.getStyle('position'), + top: element.style.top, + left: element.style.left, + width: element.style.width, + height: element.style.height + }; + return new Effect.Parallel( + [ new Effect.Scale(element, 200, + { sync: true, scaleFromCenter: true, scaleContent: true, restoreAfterFinish: true }), + new Effect.Opacity(element, { sync: true, to: 0.0 } ) ], + Object.extend({ duration: 1.0, + beforeSetupInternal: function(effect) { + Position.absolutize(effect.effects[0].element) + }, + afterFinishInternal: function(effect) { + effect.effects[0].element.hide().setStyle(oldStyle); } + }, arguments[1] || {}) + ); +} + +Effect.BlindUp = function(element) { + element = $(element); + element.makeClipping(); + return new Effect.Scale(element, 0, + Object.extend({ scaleContent: false, + scaleX: false, + restoreAfterFinish: true, + afterFinishInternal: function(effect) { + effect.element.hide().undoClipping(); + } + }, arguments[1] || {}) + ); +} + +Effect.BlindDown = function(element) { + element = $(element); + var elementDimensions = element.getDimensions(); + return new Effect.Scale(element, 100, Object.extend({ + scaleContent: false, + scaleX: false, + scaleFrom: 0, + scaleMode: {originalHeight: elementDimensions.height, originalWidth: elementDimensions.width}, + restoreAfterFinish: true, + afterSetup: function(effect) { + effect.element.makeClipping().setStyle({height: '0px'}).show(); + }, + afterFinishInternal: function(effect) { + effect.element.undoClipping(); + } + }, arguments[1] || {})); +} + +Effect.SwitchOff = function(element) { + element = $(element); + var oldOpacity = element.getInlineOpacity(); + return new Effect.Appear(element, Object.extend({ + duration: 0.4, + from: 0, + transition: Effect.Transitions.flicker, + afterFinishInternal: function(effect) { + new Effect.Scale(effect.element, 1, { + duration: 0.3, scaleFromCenter: true, + scaleX: false, scaleContent: false, restoreAfterFinish: true, + beforeSetup: function(effect) { + effect.element.makePositioned().makeClipping(); + }, + afterFinishInternal: function(effect) { + effect.element.hide().undoClipping().undoPositioned().setStyle({opacity: oldOpacity}); + } + }) + } + }, arguments[1] || {})); +} + +Effect.DropOut = function(element) { + element = $(element); + var oldStyle = { + top: element.getStyle('top'), + left: element.getStyle('left'), + opacity: element.getInlineOpacity() }; + return new Effect.Parallel( + [ new Effect.Move(element, {x: 0, y: 100, sync: true }), + new Effect.Opacity(element, { sync: true, to: 0.0 }) ], + Object.extend( + { duration: 0.5, + beforeSetup: function(effect) { + effect.effects[0].element.makePositioned(); + }, + afterFinishInternal: function(effect) { + effect.effects[0].element.hide().undoPositioned().setStyle(oldStyle); + } + }, arguments[1] || {})); +} + +Effect.Shake = function(element) { + element = $(element); + var oldStyle = { + top: element.getStyle('top'), + left: element.getStyle('left') }; + return new Effect.Move(element, + { x: 20, y: 0, duration: 0.05, afterFinishInternal: function(effect) { + new Effect.Move(effect.element, + { x: -40, y: 0, duration: 0.1, afterFinishInternal: function(effect) { + new Effect.Move(effect.element, + { x: 40, y: 0, duration: 0.1, afterFinishInternal: function(effect) { + new Effect.Move(effect.element, + { x: -40, y: 0, duration: 0.1, afterFinishInternal: function(effect) { + new Effect.Move(effect.element, + { x: 40, y: 0, duration: 0.1, afterFinishInternal: function(effect) { + new Effect.Move(effect.element, + { x: -20, y: 0, duration: 0.05, afterFinishInternal: function(effect) { + effect.element.undoPositioned().setStyle(oldStyle); + }}) }}) }}) }}) }}) }}); +} + +Effect.SlideDown = function(element) { + element = $(element).cleanWhitespace(); + // SlideDown need to have the content of the element wrapped in a container element with fixed height! + var oldInnerBottom = element.down().getStyle('bottom'); + var elementDimensions = element.getDimensions(); + return new Effect.Scale(element, 100, Object.extend({ + scaleContent: false, + scaleX: false, + scaleFrom: window.opera ? 0 : 1, + scaleMode: {originalHeight: elementDimensions.height, originalWidth: elementDimensions.width}, + restoreAfterFinish: true, + afterSetup: function(effect) { + effect.element.makePositioned(); + effect.element.down().makePositioned(); + if(window.opera) effect.element.setStyle({top: ''}); + effect.element.makeClipping().setStyle({height: '0px'}).show(); + }, + afterUpdateInternal: function(effect) { + effect.element.down().setStyle({bottom: + (effect.dims[0] - effect.element.clientHeight) + 'px' }); + }, + afterFinishInternal: function(effect) { + effect.element.undoClipping().undoPositioned(); + effect.element.down().undoPositioned().setStyle({bottom: oldInnerBottom}); } + }, arguments[1] || {}) + ); +} + +Effect.SlideUp = function(element) { + element = $(element).cleanWhitespace(); + var oldInnerBottom = element.down().getStyle('bottom'); + return new Effect.Scale(element, window.opera ? 0 : 1, + Object.extend({ scaleContent: false, + scaleX: false, + scaleMode: 'box', + scaleFrom: 100, + restoreAfterFinish: true, + beforeStartInternal: function(effect) { + effect.element.makePositioned(); + effect.element.down().makePositioned(); + if(window.opera) effect.element.setStyle({top: ''}); + effect.element.makeClipping().show(); + }, + afterUpdateInternal: function(effect) { + effect.element.down().setStyle({bottom: + (effect.dims[0] - effect.element.clientHeight) + 'px' }); + }, + afterFinishInternal: function(effect) { + effect.element.hide().undoClipping().undoPositioned().setStyle({bottom: oldInnerBottom}); + effect.element.down().undoPositioned(); + } + }, arguments[1] || {}) + ); +} + +// Bug in opera makes the TD containing this element expand for a instance after finish +Effect.Squish = function(element) { + return new Effect.Scale(element, window.opera ? 1 : 0, { + restoreAfterFinish: true, + beforeSetup: function(effect) { + effect.element.makeClipping(); + }, + afterFinishInternal: function(effect) { + effect.element.hide().undoClipping(); + } + }); +} + +Effect.Grow = function(element) { + element = $(element); + var options = Object.extend({ + direction: 'center', + moveTransition: Effect.Transitions.sinoidal, + scaleTransition: Effect.Transitions.sinoidal, + opacityTransition: Effect.Transitions.full + }, arguments[1] || {}); + var oldStyle = { + top: element.style.top, + left: element.style.left, + height: element.style.height, + width: element.style.width, + opacity: element.getInlineOpacity() }; + + var dims = element.getDimensions(); + var initialMoveX, initialMoveY; + var moveX, moveY; + + switch (options.direction) { + case 'top-left': + initialMoveX = initialMoveY = moveX = moveY = 0; + break; + case 'top-right': + initialMoveX = dims.width; + initialMoveY = moveY = 0; + moveX = -dims.width; + break; + case 'bottom-left': + initialMoveX = moveX = 0; + initialMoveY = dims.height; + moveY = -dims.height; + break; + case 'bottom-right': + initialMoveX = dims.width; + initialMoveY = dims.height; + moveX = -dims.width; + moveY = -dims.height; + break; + case 'center': + initialMoveX = dims.width / 2; + initialMoveY = dims.height / 2; + moveX = -dims.width / 2; + moveY = -dims.height / 2; + break; + } + + return new Effect.Move(element, { + x: initialMoveX, + y: initialMoveY, + duration: 0.01, + beforeSetup: function(effect) { + effect.element.hide().makeClipping().makePositioned(); + }, + afterFinishInternal: function(effect) { + new Effect.Parallel( + [ new Effect.Opacity(effect.element, { sync: true, to: 1.0, from: 0.0, transition: options.opacityTransition }), + new Effect.Move(effect.element, { x: moveX, y: moveY, sync: true, transition: options.moveTransition }), + new Effect.Scale(effect.element, 100, { + scaleMode: { originalHeight: dims.height, originalWidth: dims.width }, + sync: true, scaleFrom: window.opera ? 1 : 0, transition: options.scaleTransition, restoreAfterFinish: true}) + ], Object.extend({ + beforeSetup: function(effect) { + effect.effects[0].element.setStyle({height: '0px'}).show(); + }, + afterFinishInternal: function(effect) { + effect.effects[0].element.undoClipping().undoPositioned().setStyle(oldStyle); + } + }, options) + ) + } + }); +} + +Effect.Shrink = function(element) { + element = $(element); + var options = Object.extend({ + direction: 'center', + moveTransition: Effect.Transitions.sinoidal, + scaleTransition: Effect.Transitions.sinoidal, + opacityTransition: Effect.Transitions.none + }, arguments[1] || {}); + var oldStyle = { + top: element.style.top, + left: element.style.left, + height: element.style.height, + width: element.style.width, + opacity: element.getInlineOpacity() }; + + var dims = element.getDimensions(); + var moveX, moveY; + + switch (options.direction) { + case 'top-left': + moveX = moveY = 0; + break; + case 'top-right': + moveX = dims.width; + moveY = 0; + break; + case 'bottom-left': + moveX = 0; + moveY = dims.height; + break; + case 'bottom-right': + moveX = dims.width; + moveY = dims.height; + break; + case 'center': + moveX = dims.width / 2; + moveY = dims.height / 2; + break; + } + + return new Effect.Parallel( + [ new Effect.Opacity(element, { sync: true, to: 0.0, from: 1.0, transition: options.opacityTransition }), + new Effect.Scale(element, window.opera ? 1 : 0, { sync: true, transition: options.scaleTransition, restoreAfterFinish: true}), + new Effect.Move(element, { x: moveX, y: moveY, sync: true, transition: options.moveTransition }) + ], Object.extend({ + beforeStartInternal: function(effect) { + effect.effects[0].element.makePositioned().makeClipping(); + }, + afterFinishInternal: function(effect) { + effect.effects[0].element.hide().undoClipping().undoPositioned().setStyle(oldStyle); } + }, options) + ); +} + +Effect.Pulsate = function(element) { + element = $(element); + var options = arguments[1] || {}; + var oldOpacity = element.getInlineOpacity(); + var transition = options.transition || Effect.Transitions.sinoidal; + var reverser = function(pos){ return transition(1-Effect.Transitions.pulse(pos, options.pulses)) }; + reverser.bind(transition); + return new Effect.Opacity(element, + Object.extend(Object.extend({ duration: 2.0, from: 0, + afterFinishInternal: function(effect) { effect.element.setStyle({opacity: oldOpacity}); } + }, options), {transition: reverser})); +} + +Effect.Fold = function(element) { + element = $(element); + var oldStyle = { + top: element.style.top, + left: element.style.left, + width: element.style.width, + height: element.style.height }; + element.makeClipping(); + return new Effect.Scale(element, 5, Object.extend({ + scaleContent: false, + scaleX: false, + afterFinishInternal: function(effect) { + new Effect.Scale(element, 1, { + scaleContent: false, + scaleY: false, + afterFinishInternal: function(effect) { + effect.element.hide().undoClipping().setStyle(oldStyle); + } }); + }}, arguments[1] || {})); +}; + +Effect.Morph = Class.create(); +Object.extend(Object.extend(Effect.Morph.prototype, Effect.Base.prototype), { + initialize: function(element) { + this.element = $(element); + if(!this.element) throw(Effect._elementDoesNotExistError); + var options = Object.extend({ + style: '' + }, arguments[1] || {}); + this.start(options); + }, + setup: function(){ + function parseColor(color){ + if(!color || ['rgba(0, 0, 0, 0)','transparent'].include(color)) color = '#ffffff'; + color = color.parseColor(); + return $R(0,2).map(function(i){ + return parseInt( color.slice(i*2+1,i*2+3), 16 ) + }); + } + this.transforms = this.options.style.parseStyle().map(function(property){ + var originalValue = this.element.getStyle(property[0]); + return $H({ + style: property[0], + originalValue: property[1].unit=='color' ? + parseColor(originalValue) : parseFloat(originalValue || 0), + targetValue: property[1].unit=='color' ? + parseColor(property[1].value) : property[1].value, + unit: property[1].unit + }); + }.bind(this)).reject(function(transform){ + return ( + (transform.originalValue == transform.targetValue) || + ( + transform.unit != 'color' && + (isNaN(transform.originalValue) || isNaN(transform.targetValue)) + ) + ) + }); + }, + update: function(position) { + var style = $H(), value = null; + this.transforms.each(function(transform){ + value = transform.unit=='color' ? + $R(0,2).inject('#',function(m,v,i){ + return m+(Math.round(transform.originalValue[i]+ + (transform.targetValue[i] - transform.originalValue[i])*position)).toColorPart() }) : + transform.originalValue + Math.round( + ((transform.targetValue - transform.originalValue) * position) * 1000)/1000 + transform.unit; + style[transform.style] = value; + }); + this.element.setStyle(style); + } +}); + +Effect.Transform = Class.create(); +Object.extend(Effect.Transform.prototype, { + initialize: function(tracks){ + this.tracks = []; + this.options = arguments[1] || {}; + this.addTracks(tracks); + }, + addTracks: function(tracks){ + tracks.each(function(track){ + var data = $H(track).values().first(); + this.tracks.push($H({ + ids: $H(track).keys().first(), + effect: Effect.Morph, + options: { style: data } + })); + }.bind(this)); + return this; + }, + play: function(){ + return new Effect.Parallel( + this.tracks.map(function(track){ + var elements = [$(track.ids) || $$(track.ids)].flatten(); + return elements.map(function(e){ return new track.effect(e, Object.extend({ sync:true }, track.options)) }); + }).flatten(), + this.options + ); + } +}); + +Element.CSS_PROPERTIES = ['azimuth', 'backgroundAttachment', 'backgroundColor', 'backgroundImage', + 'backgroundPosition', 'backgroundRepeat', 'borderBottomColor', 'borderBottomStyle', + 'borderBottomWidth', 'borderCollapse', 'borderLeftColor', 'borderLeftStyle', 'borderLeftWidth', + 'borderRightColor', 'borderRightStyle', 'borderRightWidth', 'borderSpacing', 'borderTopColor', + 'borderTopStyle', 'borderTopWidth', 'bottom', 'captionSide', 'clear', 'clip', 'color', 'content', + 'counterIncrement', 'counterReset', 'cssFloat', 'cueAfter', 'cueBefore', 'cursor', 'direction', + 'display', 'elevation', 'emptyCells', 'fontFamily', 'fontSize', 'fontSizeAdjust', 'fontStretch', + 'fontStyle', 'fontVariant', 'fontWeight', 'height', 'left', 'letterSpacing', 'lineHeight', + 'listStyleImage', 'listStylePosition', 'listStyleType', 'marginBottom', 'marginLeft', 'marginRight', + 'marginTop', 'markerOffset', 'marks', 'maxHeight', 'maxWidth', 'minHeight', 'minWidth', 'opacity', + 'orphans', 'outlineColor', 'outlineOffset', 'outlineStyle', 'outlineWidth', 'overflowX', 'overflowY', + 'paddingBottom', 'paddingLeft', 'paddingRight', 'paddingTop', 'page', 'pageBreakAfter', 'pageBreakBefore', + 'pageBreakInside', 'pauseAfter', 'pauseBefore', 'pitch', 'pitchRange', 'position', 'quotes', + 'richness', 'right', 'size', 'speakHeader', 'speakNumeral', 'speakPunctuation', 'speechRate', 'stress', + 'tableLayout', 'textAlign', 'textDecoration', 'textIndent', 'textShadow', 'textTransform', 'top', + 'unicodeBidi', 'verticalAlign', 'visibility', 'voiceFamily', 'volume', 'whiteSpace', 'widows', + 'width', 'wordSpacing', 'zIndex']; + +Element.CSS_LENGTH = /^(([\+\-]?[0-9\.]+)(em|ex|px|in|cm|mm|pt|pc|\%))|0$/; + +String.prototype.parseStyle = function(){ + var element = Element.extend(document.createElement('div')); + element.innerHTML = '

    '; + var style = element.down().style, styleRules = $H(); + + Element.CSS_PROPERTIES.each(function(property){ + if(style[property]) styleRules[property] = style[property]; + }); + + var result = $H(); + + styleRules.each(function(pair){ + var property = pair[0], value = pair[1], unit = null; + + if(value.parseColor('#zzzzzz') != '#zzzzzz') { + value = value.parseColor(); + unit = 'color'; + } else if(Element.CSS_LENGTH.test(value)) + var components = value.match(/^([\+\-]?[0-9\.]+)(.*)$/), + value = parseFloat(components[1]), unit = (components.length == 3) ? components[2] : null; + + result[property.underscore().dasherize()] = $H({ value:value, unit:unit }); + }.bind(this)); + + return result; +}; + +Element.morph = function(element, style) { + new Effect.Morph(element, Object.extend({ style: style }, arguments[2] || {})); + return element; +}; + +['setOpacity','getOpacity','getInlineOpacity','forceRerendering','setContentZoom', + 'collectTextNodes','collectTextNodesIgnoreClass','morph'].each( + function(f) { Element.Methods[f] = Element[f]; } +); + +Element.Methods.visualEffect = function(element, effect, options) { + s = effect.gsub(/_/, '-').camelize(); + effect_class = s.charAt(0).toUpperCase() + s.substring(1); + new Effect[effect_class](element, options); + return $(element); +}; + +Element.addMethods(); \ No newline at end of file diff --git a/solr/client/ruby/flare/public/javascripts/prototype.js b/solr/client/ruby/flare/public/javascripts/prototype.js new file mode 100644 index 00000000000..505822177b8 --- /dev/null +++ b/solr/client/ruby/flare/public/javascripts/prototype.js @@ -0,0 +1,2515 @@ +/* Prototype JavaScript framework, version 1.5.0 + * (c) 2005-2007 Sam Stephenson + * + * Prototype is freely distributable under the terms of an MIT-style license. + * For details, see the Prototype web site: http://prototype.conio.net/ + * +/*--------------------------------------------------------------------------*/ + +var Prototype = { + Version: '1.5.0', + BrowserFeatures: { + XPath: !!document.evaluate + }, + + ScriptFragment: '(?:)((\n|\r|.)*?)(?:<\/script>)', + emptyFunction: function() {}, + K: function(x) { return x } +} + +var Class = { + create: function() { + return function() { + this.initialize.apply(this, arguments); + } + } +} + +var Abstract = new Object(); + +Object.extend = function(destination, source) { + for (var property in source) { + destination[property] = source[property]; + } + return destination; +} + +Object.extend(Object, { + inspect: function(object) { + try { + if (object === undefined) return 'undefined'; + if (object === null) return 'null'; + return object.inspect ? object.inspect() : object.toString(); + } catch (e) { + if (e instanceof RangeError) return '...'; + throw e; + } + }, + + keys: function(object) { + var keys = []; + for (var property in object) + keys.push(property); + return keys; + }, + + values: function(object) { + var values = []; + for (var property in object) + values.push(object[property]); + return values; + }, + + clone: function(object) { + return Object.extend({}, object); + } +}); + +Function.prototype.bind = function() { + var __method = this, args = $A(arguments), object = args.shift(); + return function() { + return __method.apply(object, args.concat($A(arguments))); + } +} + +Function.prototype.bindAsEventListener = function(object) { + var __method = this, args = $A(arguments), object = args.shift(); + return function(event) { + return __method.apply(object, [( event || window.event)].concat(args).concat($A(arguments))); + } +} + +Object.extend(Number.prototype, { + toColorPart: function() { + var digits = this.toString(16); + if (this < 16) return '0' + digits; + return digits; + }, + + succ: function() { + return this + 1; + }, + + times: function(iterator) { + $R(0, this, true).each(iterator); + return this; + } +}); + +var Try = { + these: function() { + var returnValue; + + for (var i = 0, length = arguments.length; i < length; i++) { + var lambda = arguments[i]; + try { + returnValue = lambda(); + break; + } catch (e) {} + } + + return returnValue; + } +} + +/*--------------------------------------------------------------------------*/ + +var PeriodicalExecuter = Class.create(); +PeriodicalExecuter.prototype = { + initialize: function(callback, frequency) { + this.callback = callback; + this.frequency = frequency; + this.currentlyExecuting = false; + + this.registerCallback(); + }, + + registerCallback: function() { + this.timer = setInterval(this.onTimerEvent.bind(this), this.frequency * 1000); + }, + + stop: function() { + if (!this.timer) return; + clearInterval(this.timer); + this.timer = null; + }, + + onTimerEvent: function() { + if (!this.currentlyExecuting) { + try { + this.currentlyExecuting = true; + this.callback(this); + } finally { + this.currentlyExecuting = false; + } + } + } +} +String.interpret = function(value){ + return value == null ? '' : String(value); +} + +Object.extend(String.prototype, { + gsub: function(pattern, replacement) { + var result = '', source = this, match; + replacement = arguments.callee.prepareReplacement(replacement); + + while (source.length > 0) { + if (match = source.match(pattern)) { + result += source.slice(0, match.index); + result += String.interpret(replacement(match)); + source = source.slice(match.index + match[0].length); + } else { + result += source, source = ''; + } + } + return result; + }, + + sub: function(pattern, replacement, count) { + replacement = this.gsub.prepareReplacement(replacement); + count = count === undefined ? 1 : count; + + return this.gsub(pattern, function(match) { + if (--count < 0) return match[0]; + return replacement(match); + }); + }, + + scan: function(pattern, iterator) { + this.gsub(pattern, iterator); + return this; + }, + + truncate: function(length, truncation) { + length = length || 30; + truncation = truncation === undefined ? '...' : truncation; + return this.length > length ? + this.slice(0, length - truncation.length) + truncation : this; + }, + + strip: function() { + return this.replace(/^\s+/, '').replace(/\s+$/, ''); + }, + + stripTags: function() { + return this.replace(/<\/?[^>]+>/gi, ''); + }, + + stripScripts: function() { + return this.replace(new RegExp(Prototype.ScriptFragment, 'img'), ''); + }, + + extractScripts: function() { + var matchAll = new RegExp(Prototype.ScriptFragment, 'img'); + var matchOne = new RegExp(Prototype.ScriptFragment, 'im'); + return (this.match(matchAll) || []).map(function(scriptTag) { + return (scriptTag.match(matchOne) || ['', ''])[1]; + }); + }, + + evalScripts: function() { + return this.extractScripts().map(function(script) { return eval(script) }); + }, + + escapeHTML: function() { + var div = document.createElement('div'); + var text = document.createTextNode(this); + div.appendChild(text); + return div.innerHTML; + }, + + unescapeHTML: function() { + var div = document.createElement('div'); + div.innerHTML = this.stripTags(); + return div.childNodes[0] ? (div.childNodes.length > 1 ? + $A(div.childNodes).inject('',function(memo,node){ return memo+node.nodeValue }) : + div.childNodes[0].nodeValue) : ''; + }, + + toQueryParams: function(separator) { + var match = this.strip().match(/([^?#]*)(#.*)?$/); + if (!match) return {}; + + return match[1].split(separator || '&').inject({}, function(hash, pair) { + if ((pair = pair.split('='))[0]) { + var name = decodeURIComponent(pair[0]); + var value = pair[1] ? decodeURIComponent(pair[1]) : undefined; + + if (hash[name] !== undefined) { + if (hash[name].constructor != Array) + hash[name] = [hash[name]]; + if (value) hash[name].push(value); + } + else hash[name] = value; + } + return hash; + }); + }, + + toArray: function() { + return this.split(''); + }, + + succ: function() { + return this.slice(0, this.length - 1) + + String.fromCharCode(this.charCodeAt(this.length - 1) + 1); + }, + + camelize: function() { + var parts = this.split('-'), len = parts.length; + if (len == 1) return parts[0]; + + var camelized = this.charAt(0) == '-' + ? parts[0].charAt(0).toUpperCase() + parts[0].substring(1) + : parts[0]; + + for (var i = 1; i < len; i++) + camelized += parts[i].charAt(0).toUpperCase() + parts[i].substring(1); + + return camelized; + }, + + capitalize: function(){ + return this.charAt(0).toUpperCase() + this.substring(1).toLowerCase(); + }, + + underscore: function() { + return this.gsub(/::/, '/').gsub(/([A-Z]+)([A-Z][a-z])/,'#{1}_#{2}').gsub(/([a-z\d])([A-Z])/,'#{1}_#{2}').gsub(/-/,'_').toLowerCase(); + }, + + dasherize: function() { + return this.gsub(/_/,'-'); + }, + + inspect: function(useDoubleQuotes) { + var escapedString = this.replace(/\\/g, '\\\\'); + if (useDoubleQuotes) + return '"' + escapedString.replace(/"/g, '\\"') + '"'; + else + return "'" + escapedString.replace(/'/g, '\\\'') + "'"; + } +}); + +String.prototype.gsub.prepareReplacement = function(replacement) { + if (typeof replacement == 'function') return replacement; + var template = new Template(replacement); + return function(match) { return template.evaluate(match) }; +} + +String.prototype.parseQuery = String.prototype.toQueryParams; + +var Template = Class.create(); +Template.Pattern = /(^|.|\r|\n)(#\{(.*?)\})/; +Template.prototype = { + initialize: function(template, pattern) { + this.template = template.toString(); + this.pattern = pattern || Template.Pattern; + }, + + evaluate: function(object) { + return this.template.gsub(this.pattern, function(match) { + var before = match[1]; + if (before == '\\') return match[2]; + return before + String.interpret(object[match[3]]); + }); + } +} + +var $break = new Object(); +var $continue = new Object(); + +var Enumerable = { + each: function(iterator) { + var index = 0; + try { + this._each(function(value) { + try { + iterator(value, index++); + } catch (e) { + if (e != $continue) throw e; + } + }); + } catch (e) { + if (e != $break) throw e; + } + return this; + }, + + eachSlice: function(number, iterator) { + var index = -number, slices = [], array = this.toArray(); + while ((index += number) < array.length) + slices.push(array.slice(index, index+number)); + return slices.map(iterator); + }, + + all: function(iterator) { + var result = true; + this.each(function(value, index) { + result = result && !!(iterator || Prototype.K)(value, index); + if (!result) throw $break; + }); + return result; + }, + + any: function(iterator) { + var result = false; + this.each(function(value, index) { + if (result = !!(iterator || Prototype.K)(value, index)) + throw $break; + }); + return result; + }, + + collect: function(iterator) { + var results = []; + this.each(function(value, index) { + results.push((iterator || Prototype.K)(value, index)); + }); + return results; + }, + + detect: function(iterator) { + var result; + this.each(function(value, index) { + if (iterator(value, index)) { + result = value; + throw $break; + } + }); + return result; + }, + + findAll: function(iterator) { + var results = []; + this.each(function(value, index) { + if (iterator(value, index)) + results.push(value); + }); + return results; + }, + + grep: function(pattern, iterator) { + var results = []; + this.each(function(value, index) { + var stringValue = value.toString(); + if (stringValue.match(pattern)) + results.push((iterator || Prototype.K)(value, index)); + }) + return results; + }, + + include: function(object) { + var found = false; + this.each(function(value) { + if (value == object) { + found = true; + throw $break; + } + }); + return found; + }, + + inGroupsOf: function(number, fillWith) { + fillWith = fillWith === undefined ? null : fillWith; + return this.eachSlice(number, function(slice) { + while(slice.length < number) slice.push(fillWith); + return slice; + }); + }, + + inject: function(memo, iterator) { + this.each(function(value, index) { + memo = iterator(memo, value, index); + }); + return memo; + }, + + invoke: function(method) { + var args = $A(arguments).slice(1); + return this.map(function(value) { + return value[method].apply(value, args); + }); + }, + + max: function(iterator) { + var result; + this.each(function(value, index) { + value = (iterator || Prototype.K)(value, index); + if (result == undefined || value >= result) + result = value; + }); + return result; + }, + + min: function(iterator) { + var result; + this.each(function(value, index) { + value = (iterator || Prototype.K)(value, index); + if (result == undefined || value < result) + result = value; + }); + return result; + }, + + partition: function(iterator) { + var trues = [], falses = []; + this.each(function(value, index) { + ((iterator || Prototype.K)(value, index) ? + trues : falses).push(value); + }); + return [trues, falses]; + }, + + pluck: function(property) { + var results = []; + this.each(function(value, index) { + results.push(value[property]); + }); + return results; + }, + + reject: function(iterator) { + var results = []; + this.each(function(value, index) { + if (!iterator(value, index)) + results.push(value); + }); + return results; + }, + + sortBy: function(iterator) { + return this.map(function(value, index) { + return {value: value, criteria: iterator(value, index)}; + }).sort(function(left, right) { + var a = left.criteria, b = right.criteria; + return a < b ? -1 : a > b ? 1 : 0; + }).pluck('value'); + }, + + toArray: function() { + return this.map(); + }, + + zip: function() { + var iterator = Prototype.K, args = $A(arguments); + if (typeof args.last() == 'function') + iterator = args.pop(); + + var collections = [this].concat(args).map($A); + return this.map(function(value, index) { + return iterator(collections.pluck(index)); + }); + }, + + size: function() { + return this.toArray().length; + }, + + inspect: function() { + return '#'; + } +} + +Object.extend(Enumerable, { + map: Enumerable.collect, + find: Enumerable.detect, + select: Enumerable.findAll, + member: Enumerable.include, + entries: Enumerable.toArray +}); +var $A = Array.from = function(iterable) { + if (!iterable) return []; + if (iterable.toArray) { + return iterable.toArray(); + } else { + var results = []; + for (var i = 0, length = iterable.length; i < length; i++) + results.push(iterable[i]); + return results; + } +} + +Object.extend(Array.prototype, Enumerable); + +if (!Array.prototype._reverse) + Array.prototype._reverse = Array.prototype.reverse; + +Object.extend(Array.prototype, { + _each: function(iterator) { + for (var i = 0, length = this.length; i < length; i++) + iterator(this[i]); + }, + + clear: function() { + this.length = 0; + return this; + }, + + first: function() { + return this[0]; + }, + + last: function() { + return this[this.length - 1]; + }, + + compact: function() { + return this.select(function(value) { + return value != null; + }); + }, + + flatten: function() { + return this.inject([], function(array, value) { + return array.concat(value && value.constructor == Array ? + value.flatten() : [value]); + }); + }, + + without: function() { + var values = $A(arguments); + return this.select(function(value) { + return !values.include(value); + }); + }, + + indexOf: function(object) { + for (var i = 0, length = this.length; i < length; i++) + if (this[i] == object) return i; + return -1; + }, + + reverse: function(inline) { + return (inline !== false ? this : this.toArray())._reverse(); + }, + + reduce: function() { + return this.length > 1 ? this : this[0]; + }, + + uniq: function() { + return this.inject([], function(array, value) { + return array.include(value) ? array : array.concat([value]); + }); + }, + + clone: function() { + return [].concat(this); + }, + + size: function() { + return this.length; + }, + + inspect: function() { + return '[' + this.map(Object.inspect).join(', ') + ']'; + } +}); + +Array.prototype.toArray = Array.prototype.clone; + +function $w(string){ + string = string.strip(); + return string ? string.split(/\s+/) : []; +} + +if(window.opera){ + Array.prototype.concat = function(){ + var array = []; + for(var i = 0, length = this.length; i < length; i++) array.push(this[i]); + for(var i = 0, length = arguments.length; i < length; i++) { + if(arguments[i].constructor == Array) { + for(var j = 0, arrayLength = arguments[i].length; j < arrayLength; j++) + array.push(arguments[i][j]); + } else { + array.push(arguments[i]); + } + } + return array; + } +} +var Hash = function(obj) { + Object.extend(this, obj || {}); +}; + +Object.extend(Hash, { + toQueryString: function(obj) { + var parts = []; + + this.prototype._each.call(obj, function(pair) { + if (!pair.key) return; + + if (pair.value && pair.value.constructor == Array) { + var values = pair.value.compact(); + if (values.length < 2) pair.value = values.reduce(); + else { + key = encodeURIComponent(pair.key); + values.each(function(value) { + value = value != undefined ? encodeURIComponent(value) : ''; + parts.push(key + '=' + encodeURIComponent(value)); + }); + return; + } + } + if (pair.value == undefined) pair[1] = ''; + parts.push(pair.map(encodeURIComponent).join('=')); + }); + + return parts.join('&'); + } +}); + +Object.extend(Hash.prototype, Enumerable); +Object.extend(Hash.prototype, { + _each: function(iterator) { + for (var key in this) { + var value = this[key]; + if (value && value == Hash.prototype[key]) continue; + + var pair = [key, value]; + pair.key = key; + pair.value = value; + iterator(pair); + } + }, + + keys: function() { + return this.pluck('key'); + }, + + values: function() { + return this.pluck('value'); + }, + + merge: function(hash) { + return $H(hash).inject(this, function(mergedHash, pair) { + mergedHash[pair.key] = pair.value; + return mergedHash; + }); + }, + + remove: function() { + var result; + for(var i = 0, length = arguments.length; i < length; i++) { + var value = this[arguments[i]]; + if (value !== undefined){ + if (result === undefined) result = value; + else { + if (result.constructor != Array) result = [result]; + result.push(value) + } + } + delete this[arguments[i]]; + } + return result; + }, + + toQueryString: function() { + return Hash.toQueryString(this); + }, + + inspect: function() { + return '#'; + } +}); + +function $H(object) { + if (object && object.constructor == Hash) return object; + return new Hash(object); +}; +ObjectRange = Class.create(); +Object.extend(ObjectRange.prototype, Enumerable); +Object.extend(ObjectRange.prototype, { + initialize: function(start, end, exclusive) { + this.start = start; + this.end = end; + this.exclusive = exclusive; + }, + + _each: function(iterator) { + var value = this.start; + while (this.include(value)) { + iterator(value); + value = value.succ(); + } + }, + + include: function(value) { + if (value < this.start) + return false; + if (this.exclusive) + return value < this.end; + return value <= this.end; + } +}); + +var $R = function(start, end, exclusive) { + return new ObjectRange(start, end, exclusive); +} + +var Ajax = { + getTransport: function() { + return Try.these( + function() {return new XMLHttpRequest()}, + function() {return new ActiveXObject('Msxml2.XMLHTTP')}, + function() {return new ActiveXObject('Microsoft.XMLHTTP')} + ) || false; + }, + + activeRequestCount: 0 +} + +Ajax.Responders = { + responders: [], + + _each: function(iterator) { + this.responders._each(iterator); + }, + + register: function(responder) { + if (!this.include(responder)) + this.responders.push(responder); + }, + + unregister: function(responder) { + this.responders = this.responders.without(responder); + }, + + dispatch: function(callback, request, transport, json) { + this.each(function(responder) { + if (typeof responder[callback] == 'function') { + try { + responder[callback].apply(responder, [request, transport, json]); + } catch (e) {} + } + }); + } +}; + +Object.extend(Ajax.Responders, Enumerable); + +Ajax.Responders.register({ + onCreate: function() { + Ajax.activeRequestCount++; + }, + onComplete: function() { + Ajax.activeRequestCount--; + } +}); + +Ajax.Base = function() {}; +Ajax.Base.prototype = { + setOptions: function(options) { + this.options = { + method: 'post', + asynchronous: true, + contentType: 'application/x-www-form-urlencoded', + encoding: 'UTF-8', + parameters: '' + } + Object.extend(this.options, options || {}); + + this.options.method = this.options.method.toLowerCase(); + if (typeof this.options.parameters == 'string') + this.options.parameters = this.options.parameters.toQueryParams(); + } +} + +Ajax.Request = Class.create(); +Ajax.Request.Events = + ['Uninitialized', 'Loading', 'Loaded', 'Interactive', 'Complete']; + +Ajax.Request.prototype = Object.extend(new Ajax.Base(), { + _complete: false, + + initialize: function(url, options) { + this.transport = Ajax.getTransport(); + this.setOptions(options); + this.request(url); + }, + + request: function(url) { + this.url = url; + this.method = this.options.method; + var params = this.options.parameters; + + if (!['get', 'post'].include(this.method)) { + // simulate other verbs over post + params['_method'] = this.method; + this.method = 'post'; + } + + params = Hash.toQueryString(params); + if (params && /Konqueror|Safari|KHTML/.test(navigator.userAgent)) params += '&_=' + + // when GET, append parameters to URL + if (this.method == 'get' && params) + this.url += (this.url.indexOf('?') > -1 ? '&' : '?') + params; + + try { + Ajax.Responders.dispatch('onCreate', this, this.transport); + + this.transport.open(this.method.toUpperCase(), this.url, + this.options.asynchronous); + + if (this.options.asynchronous) + setTimeout(function() { this.respondToReadyState(1) }.bind(this), 10); + + this.transport.onreadystatechange = this.onStateChange.bind(this); + this.setRequestHeaders(); + + var body = this.method == 'post' ? (this.options.postBody || params) : null; + + this.transport.send(body); + + /* Force Firefox to handle ready state 4 for synchronous requests */ + if (!this.options.asynchronous && this.transport.overrideMimeType) + this.onStateChange(); + + } + catch (e) { + this.dispatchException(e); + } + }, + + onStateChange: function() { + var readyState = this.transport.readyState; + if (readyState > 1 && !((readyState == 4) && this._complete)) + this.respondToReadyState(this.transport.readyState); + }, + + setRequestHeaders: function() { + var headers = { + 'X-Requested-With': 'XMLHttpRequest', + 'X-Prototype-Version': Prototype.Version, + 'Accept': 'text/javascript, text/html, application/xml, text/xml, */*' + }; + + if (this.method == 'post') { + headers['Content-type'] = this.options.contentType + + (this.options.encoding ? '; charset=' + this.options.encoding : ''); + + /* Force "Connection: close" for older Mozilla browsers to work + * around a bug where XMLHttpRequest sends an incorrect + * Content-length header. See Mozilla Bugzilla #246651. + */ + if (this.transport.overrideMimeType && + (navigator.userAgent.match(/Gecko\/(\d{4})/) || [0,2005])[1] < 2005) + headers['Connection'] = 'close'; + } + + // user-defined headers + if (typeof this.options.requestHeaders == 'object') { + var extras = this.options.requestHeaders; + + if (typeof extras.push == 'function') + for (var i = 0, length = extras.length; i < length; i += 2) + headers[extras[i]] = extras[i+1]; + else + $H(extras).each(function(pair) { headers[pair.key] = pair.value }); + } + + for (var name in headers) + this.transport.setRequestHeader(name, headers[name]); + }, + + success: function() { + return !this.transport.status + || (this.transport.status >= 200 && this.transport.status < 300); + }, + + respondToReadyState: function(readyState) { + var state = Ajax.Request.Events[readyState]; + var transport = this.transport, json = this.evalJSON(); + + if (state == 'Complete') { + try { + this._complete = true; + (this.options['on' + this.transport.status] + || this.options['on' + (this.success() ? 'Success' : 'Failure')] + || Prototype.emptyFunction)(transport, json); + } catch (e) { + this.dispatchException(e); + } + + if ((this.getHeader('Content-type') || 'text/javascript').strip(). + match(/^(text|application)\/(x-)?(java|ecma)script(;.*)?$/i)) + this.evalResponse(); + } + + try { + (this.options['on' + state] || Prototype.emptyFunction)(transport, json); + Ajax.Responders.dispatch('on' + state, this, transport, json); + } catch (e) { + this.dispatchException(e); + } + + if (state == 'Complete') { + // avoid memory leak in MSIE: clean up + this.transport.onreadystatechange = Prototype.emptyFunction; + } + }, + + getHeader: function(name) { + try { + return this.transport.getResponseHeader(name); + } catch (e) { return null } + }, + + evalJSON: function() { + try { + var json = this.getHeader('X-JSON'); + return json ? eval('(' + json + ')') : null; + } catch (e) { return null } + }, + + evalResponse: function() { + try { + return eval(this.transport.responseText); + } catch (e) { + this.dispatchException(e); + } + }, + + dispatchException: function(exception) { + (this.options.onException || Prototype.emptyFunction)(this, exception); + Ajax.Responders.dispatch('onException', this, exception); + } +}); + +Ajax.Updater = Class.create(); + +Object.extend(Object.extend(Ajax.Updater.prototype, Ajax.Request.prototype), { + initialize: function(container, url, options) { + this.container = { + success: (container.success || container), + failure: (container.failure || (container.success ? null : container)) + } + + this.transport = Ajax.getTransport(); + this.setOptions(options); + + var onComplete = this.options.onComplete || Prototype.emptyFunction; + this.options.onComplete = (function(transport, param) { + this.updateContent(); + onComplete(transport, param); + }).bind(this); + + this.request(url); + }, + + updateContent: function() { + var receiver = this.container[this.success() ? 'success' : 'failure']; + var response = this.transport.responseText; + + if (!this.options.evalScripts) response = response.stripScripts(); + + if (receiver = $(receiver)) { + if (this.options.insertion) + new this.options.insertion(receiver, response); + else + receiver.update(response); + } + + if (this.success()) { + if (this.onComplete) + setTimeout(this.onComplete.bind(this), 10); + } + } +}); + +Ajax.PeriodicalUpdater = Class.create(); +Ajax.PeriodicalUpdater.prototype = Object.extend(new Ajax.Base(), { + initialize: function(container, url, options) { + this.setOptions(options); + this.onComplete = this.options.onComplete; + + this.frequency = (this.options.frequency || 2); + this.decay = (this.options.decay || 1); + + this.updater = {}; + this.container = container; + this.url = url; + + this.start(); + }, + + start: function() { + this.options.onComplete = this.updateComplete.bind(this); + this.onTimerEvent(); + }, + + stop: function() { + this.updater.options.onComplete = undefined; + clearTimeout(this.timer); + (this.onComplete || Prototype.emptyFunction).apply(this, arguments); + }, + + updateComplete: function(request) { + if (this.options.decay) { + this.decay = (request.responseText == this.lastText ? + this.decay * this.options.decay : 1); + + this.lastText = request.responseText; + } + this.timer = setTimeout(this.onTimerEvent.bind(this), + this.decay * this.frequency * 1000); + }, + + onTimerEvent: function() { + this.updater = new Ajax.Updater(this.container, this.url, this.options); + } +}); +function $(element) { + if (arguments.length > 1) { + for (var i = 0, elements = [], length = arguments.length; i < length; i++) + elements.push($(arguments[i])); + return elements; + } + if (typeof element == 'string') + element = document.getElementById(element); + return Element.extend(element); +} + +if (Prototype.BrowserFeatures.XPath) { + document._getElementsByXPath = function(expression, parentElement) { + var results = []; + var query = document.evaluate(expression, $(parentElement) || document, + null, XPathResult.ORDERED_NODE_SNAPSHOT_TYPE, null); + for (var i = 0, length = query.snapshotLength; i < length; i++) + results.push(query.snapshotItem(i)); + return results; + }; +} + +document.getElementsByClassName = function(className, parentElement) { + if (Prototype.BrowserFeatures.XPath) { + var q = ".//*[contains(concat(' ', @class, ' '), ' " + className + " ')]"; + return document._getElementsByXPath(q, parentElement); + } else { + var children = ($(parentElement) || document.body).getElementsByTagName('*'); + var elements = [], child; + for (var i = 0, length = children.length; i < length; i++) { + child = children[i]; + if (Element.hasClassName(child, className)) + elements.push(Element.extend(child)); + } + return elements; + } +}; + +/*--------------------------------------------------------------------------*/ + +if (!window.Element) + var Element = new Object(); + +Element.extend = function(element) { + if (!element || _nativeExtensions || element.nodeType == 3) return element; + + if (!element._extended && element.tagName && element != window) { + var methods = Object.clone(Element.Methods), cache = Element.extend.cache; + + if (element.tagName == 'FORM') + Object.extend(methods, Form.Methods); + if (['INPUT', 'TEXTAREA', 'SELECT'].include(element.tagName)) + Object.extend(methods, Form.Element.Methods); + + Object.extend(methods, Element.Methods.Simulated); + + for (var property in methods) { + var value = methods[property]; + if (typeof value == 'function' && !(property in element)) + element[property] = cache.findOrStore(value); + } + } + + element._extended = true; + return element; +}; + +Element.extend.cache = { + findOrStore: function(value) { + return this[value] = this[value] || function() { + return value.apply(null, [this].concat($A(arguments))); + } + } +}; + +Element.Methods = { + visible: function(element) { + return $(element).style.display != 'none'; + }, + + toggle: function(element) { + element = $(element); + Element[Element.visible(element) ? 'hide' : 'show'](element); + return element; + }, + + hide: function(element) { + $(element).style.display = 'none'; + return element; + }, + + show: function(element) { + $(element).style.display = ''; + return element; + }, + + remove: function(element) { + element = $(element); + element.parentNode.removeChild(element); + return element; + }, + + update: function(element, html) { + html = typeof html == 'undefined' ? '' : html.toString(); + $(element).innerHTML = html.stripScripts(); + setTimeout(function() {html.evalScripts()}, 10); + return element; + }, + + replace: function(element, html) { + element = $(element); + html = typeof html == 'undefined' ? '' : html.toString(); + if (element.outerHTML) { + element.outerHTML = html.stripScripts(); + } else { + var range = element.ownerDocument.createRange(); + range.selectNodeContents(element); + element.parentNode.replaceChild( + range.createContextualFragment(html.stripScripts()), element); + } + setTimeout(function() {html.evalScripts()}, 10); + return element; + }, + + inspect: function(element) { + element = $(element); + var result = '<' + element.tagName.toLowerCase(); + $H({'id': 'id', 'className': 'class'}).each(function(pair) { + var property = pair.first(), attribute = pair.last(); + var value = (element[property] || '').toString(); + if (value) result += ' ' + attribute + '=' + value.inspect(true); + }); + return result + '>'; + }, + + recursivelyCollect: function(element, property) { + element = $(element); + var elements = []; + while (element = element[property]) + if (element.nodeType == 1) + elements.push(Element.extend(element)); + return elements; + }, + + ancestors: function(element) { + return $(element).recursivelyCollect('parentNode'); + }, + + descendants: function(element) { + return $A($(element).getElementsByTagName('*')); + }, + + immediateDescendants: function(element) { + if (!(element = $(element).firstChild)) return []; + while (element && element.nodeType != 1) element = element.nextSibling; + if (element) return [element].concat($(element).nextSiblings()); + return []; + }, + + previousSiblings: function(element) { + return $(element).recursivelyCollect('previousSibling'); + }, + + nextSiblings: function(element) { + return $(element).recursivelyCollect('nextSibling'); + }, + + siblings: function(element) { + element = $(element); + return element.previousSiblings().reverse().concat(element.nextSiblings()); + }, + + match: function(element, selector) { + if (typeof selector == 'string') + selector = new Selector(selector); + return selector.match($(element)); + }, + + up: function(element, expression, index) { + return Selector.findElement($(element).ancestors(), expression, index); + }, + + down: function(element, expression, index) { + return Selector.findElement($(element).descendants(), expression, index); + }, + + previous: function(element, expression, index) { + return Selector.findElement($(element).previousSiblings(), expression, index); + }, + + next: function(element, expression, index) { + return Selector.findElement($(element).nextSiblings(), expression, index); + }, + + getElementsBySelector: function() { + var args = $A(arguments), element = $(args.shift()); + return Selector.findChildElements(element, args); + }, + + getElementsByClassName: function(element, className) { + return document.getElementsByClassName(className, element); + }, + + readAttribute: function(element, name) { + element = $(element); + if (document.all && !window.opera) { + var t = Element._attributeTranslations; + if (t.values[name]) return t.values[name](element, name); + if (t.names[name]) name = t.names[name]; + var attribute = element.attributes[name]; + if(attribute) return attribute.nodeValue; + } + return element.getAttribute(name); + }, + + getHeight: function(element) { + return $(element).getDimensions().height; + }, + + getWidth: function(element) { + return $(element).getDimensions().width; + }, + + classNames: function(element) { + return new Element.ClassNames(element); + }, + + hasClassName: function(element, className) { + if (!(element = $(element))) return; + var elementClassName = element.className; + if (elementClassName.length == 0) return false; + if (elementClassName == className || + elementClassName.match(new RegExp("(^|\\s)" + className + "(\\s|$)"))) + return true; + return false; + }, + + addClassName: function(element, className) { + if (!(element = $(element))) return; + Element.classNames(element).add(className); + return element; + }, + + removeClassName: function(element, className) { + if (!(element = $(element))) return; + Element.classNames(element).remove(className); + return element; + }, + + toggleClassName: function(element, className) { + if (!(element = $(element))) return; + Element.classNames(element)[element.hasClassName(className) ? 'remove' : 'add'](className); + return element; + }, + + observe: function() { + Event.observe.apply(Event, arguments); + return $A(arguments).first(); + }, + + stopObserving: function() { + Event.stopObserving.apply(Event, arguments); + return $A(arguments).first(); + }, + + // removes whitespace-only text node children + cleanWhitespace: function(element) { + element = $(element); + var node = element.firstChild; + while (node) { + var nextNode = node.nextSibling; + if (node.nodeType == 3 && !/\S/.test(node.nodeValue)) + element.removeChild(node); + node = nextNode; + } + return element; + }, + + empty: function(element) { + return $(element).innerHTML.match(/^\s*$/); + }, + + descendantOf: function(element, ancestor) { + element = $(element), ancestor = $(ancestor); + while (element = element.parentNode) + if (element == ancestor) return true; + return false; + }, + + scrollTo: function(element) { + element = $(element); + var pos = Position.cumulativeOffset(element); + window.scrollTo(pos[0], pos[1]); + return element; + }, + + getStyle: function(element, style) { + element = $(element); + if (['float','cssFloat'].include(style)) + style = (typeof element.style.styleFloat != 'undefined' ? 'styleFloat' : 'cssFloat'); + style = style.camelize(); + var value = element.style[style]; + if (!value) { + if (document.defaultView && document.defaultView.getComputedStyle) { + var css = document.defaultView.getComputedStyle(element, null); + value = css ? css[style] : null; + } else if (element.currentStyle) { + value = element.currentStyle[style]; + } + } + + if((value == 'auto') && ['width','height'].include(style) && (element.getStyle('display') != 'none')) + value = element['offset'+style.capitalize()] + 'px'; + + if (window.opera && ['left', 'top', 'right', 'bottom'].include(style)) + if (Element.getStyle(element, 'position') == 'static') value = 'auto'; + if(style == 'opacity') { + if(value) return parseFloat(value); + if(value = (element.getStyle('filter') || '').match(/alpha\(opacity=(.*)\)/)) + if(value[1]) return parseFloat(value[1]) / 100; + return 1.0; + } + return value == 'auto' ? null : value; + }, + + setStyle: function(element, style) { + element = $(element); + for (var name in style) { + var value = style[name]; + if(name == 'opacity') { + if (value == 1) { + value = (/Gecko/.test(navigator.userAgent) && + !/Konqueror|Safari|KHTML/.test(navigator.userAgent)) ? 0.999999 : 1.0; + if(/MSIE/.test(navigator.userAgent) && !window.opera) + element.style.filter = element.getStyle('filter').replace(/alpha\([^\)]*\)/gi,''); + } else if(value == '') { + if(/MSIE/.test(navigator.userAgent) && !window.opera) + element.style.filter = element.getStyle('filter').replace(/alpha\([^\)]*\)/gi,''); + } else { + if(value < 0.00001) value = 0; + if(/MSIE/.test(navigator.userAgent) && !window.opera) + element.style.filter = element.getStyle('filter').replace(/alpha\([^\)]*\)/gi,'') + + 'alpha(opacity='+value*100+')'; + } + } else if(['float','cssFloat'].include(name)) name = (typeof element.style.styleFloat != 'undefined') ? 'styleFloat' : 'cssFloat'; + element.style[name.camelize()] = value; + } + return element; + }, + + getDimensions: function(element) { + element = $(element); + var display = $(element).getStyle('display'); + if (display != 'none' && display != null) // Safari bug + return {width: element.offsetWidth, height: element.offsetHeight}; + + // All *Width and *Height properties give 0 on elements with display none, + // so enable the element temporarily + var els = element.style; + var originalVisibility = els.visibility; + var originalPosition = els.position; + var originalDisplay = els.display; + els.visibility = 'hidden'; + els.position = 'absolute'; + els.display = 'block'; + var originalWidth = element.clientWidth; + var originalHeight = element.clientHeight; + els.display = originalDisplay; + els.position = originalPosition; + els.visibility = originalVisibility; + return {width: originalWidth, height: originalHeight}; + }, + + makePositioned: function(element) { + element = $(element); + var pos = Element.getStyle(element, 'position'); + if (pos == 'static' || !pos) { + element._madePositioned = true; + element.style.position = 'relative'; + // Opera returns the offset relative to the positioning context, when an + // element is position relative but top and left have not been defined + if (window.opera) { + element.style.top = 0; + element.style.left = 0; + } + } + return element; + }, + + undoPositioned: function(element) { + element = $(element); + if (element._madePositioned) { + element._madePositioned = undefined; + element.style.position = + element.style.top = + element.style.left = + element.style.bottom = + element.style.right = ''; + } + return element; + }, + + makeClipping: function(element) { + element = $(element); + if (element._overflow) return element; + element._overflow = element.style.overflow || 'auto'; + if ((Element.getStyle(element, 'overflow') || 'visible') != 'hidden') + element.style.overflow = 'hidden'; + return element; + }, + + undoClipping: function(element) { + element = $(element); + if (!element._overflow) return element; + element.style.overflow = element._overflow == 'auto' ? '' : element._overflow; + element._overflow = null; + return element; + } +}; + +Object.extend(Element.Methods, {childOf: Element.Methods.descendantOf}); + +Element._attributeTranslations = {}; + +Element._attributeTranslations.names = { + colspan: "colSpan", + rowspan: "rowSpan", + valign: "vAlign", + datetime: "dateTime", + accesskey: "accessKey", + tabindex: "tabIndex", + enctype: "encType", + maxlength: "maxLength", + readonly: "readOnly", + longdesc: "longDesc" +}; + +Element._attributeTranslations.values = { + _getAttr: function(element, attribute) { + return element.getAttribute(attribute, 2); + }, + + _flag: function(element, attribute) { + return $(element).hasAttribute(attribute) ? attribute : null; + }, + + style: function(element) { + return element.style.cssText.toLowerCase(); + }, + + title: function(element) { + var node = element.getAttributeNode('title'); + return node.specified ? node.nodeValue : null; + } +}; + +Object.extend(Element._attributeTranslations.values, { + href: Element._attributeTranslations.values._getAttr, + src: Element._attributeTranslations.values._getAttr, + disabled: Element._attributeTranslations.values._flag, + checked: Element._attributeTranslations.values._flag, + readonly: Element._attributeTranslations.values._flag, + multiple: Element._attributeTranslations.values._flag +}); + +Element.Methods.Simulated = { + hasAttribute: function(element, attribute) { + var t = Element._attributeTranslations; + attribute = t.names[attribute] || attribute; + return $(element).getAttributeNode(attribute).specified; + } +}; + +// IE is missing .innerHTML support for TABLE-related elements +if (document.all && !window.opera){ + Element.Methods.update = function(element, html) { + element = $(element); + html = typeof html == 'undefined' ? '' : html.toString(); + var tagName = element.tagName.toUpperCase(); + if (['THEAD','TBODY','TR','TD'].include(tagName)) { + var div = document.createElement('div'); + switch (tagName) { + case 'THEAD': + case 'TBODY': + div.innerHTML = '' + html.stripScripts() + '
    '; + depth = 2; + break; + case 'TR': + div.innerHTML = '' + html.stripScripts() + '
    '; + depth = 3; + break; + case 'TD': + div.innerHTML = '
    ' + html.stripScripts() + '
    '; + depth = 4; + } + $A(element.childNodes).each(function(node){ + element.removeChild(node) + }); + depth.times(function(){ div = div.firstChild }); + + $A(div.childNodes).each( + function(node){ element.appendChild(node) }); + } else { + element.innerHTML = html.stripScripts(); + } + setTimeout(function() {html.evalScripts()}, 10); + return element; + } +}; + +Object.extend(Element, Element.Methods); + +var _nativeExtensions = false; + +if(/Konqueror|Safari|KHTML/.test(navigator.userAgent)) + ['', 'Form', 'Input', 'TextArea', 'Select'].each(function(tag) { + var className = 'HTML' + tag + 'Element'; + if(window[className]) return; + var klass = window[className] = {}; + klass.prototype = document.createElement(tag ? tag.toLowerCase() : 'div').__proto__; + }); + +Element.addMethods = function(methods) { + Object.extend(Element.Methods, methods || {}); + + function copy(methods, destination, onlyIfAbsent) { + onlyIfAbsent = onlyIfAbsent || false; + var cache = Element.extend.cache; + for (var property in methods) { + var value = methods[property]; + if (!onlyIfAbsent || !(property in destination)) + destination[property] = cache.findOrStore(value); + } + } + + if (typeof HTMLElement != 'undefined') { + copy(Element.Methods, HTMLElement.prototype); + copy(Element.Methods.Simulated, HTMLElement.prototype, true); + copy(Form.Methods, HTMLFormElement.prototype); + [HTMLInputElement, HTMLTextAreaElement, HTMLSelectElement].each(function(klass) { + copy(Form.Element.Methods, klass.prototype); + }); + _nativeExtensions = true; + } +} + +var Toggle = new Object(); +Toggle.display = Element.toggle; + +/*--------------------------------------------------------------------------*/ + +Abstract.Insertion = function(adjacency) { + this.adjacency = adjacency; +} + +Abstract.Insertion.prototype = { + initialize: function(element, content) { + this.element = $(element); + this.content = content.stripScripts(); + + if (this.adjacency && this.element.insertAdjacentHTML) { + try { + this.element.insertAdjacentHTML(this.adjacency, this.content); + } catch (e) { + var tagName = this.element.tagName.toUpperCase(); + if (['TBODY', 'TR'].include(tagName)) { + this.insertContent(this.contentFromAnonymousTable()); + } else { + throw e; + } + } + } else { + this.range = this.element.ownerDocument.createRange(); + if (this.initializeRange) this.initializeRange(); + this.insertContent([this.range.createContextualFragment(this.content)]); + } + + setTimeout(function() {content.evalScripts()}, 10); + }, + + contentFromAnonymousTable: function() { + var div = document.createElement('div'); + div.innerHTML = '' + this.content + '
    '; + return $A(div.childNodes[0].childNodes[0].childNodes); + } +} + +var Insertion = new Object(); + +Insertion.Before = Class.create(); +Insertion.Before.prototype = Object.extend(new Abstract.Insertion('beforeBegin'), { + initializeRange: function() { + this.range.setStartBefore(this.element); + }, + + insertContent: function(fragments) { + fragments.each((function(fragment) { + this.element.parentNode.insertBefore(fragment, this.element); + }).bind(this)); + } +}); + +Insertion.Top = Class.create(); +Insertion.Top.prototype = Object.extend(new Abstract.Insertion('afterBegin'), { + initializeRange: function() { + this.range.selectNodeContents(this.element); + this.range.collapse(true); + }, + + insertContent: function(fragments) { + fragments.reverse(false).each((function(fragment) { + this.element.insertBefore(fragment, this.element.firstChild); + }).bind(this)); + } +}); + +Insertion.Bottom = Class.create(); +Insertion.Bottom.prototype = Object.extend(new Abstract.Insertion('beforeEnd'), { + initializeRange: function() { + this.range.selectNodeContents(this.element); + this.range.collapse(this.element); + }, + + insertContent: function(fragments) { + fragments.each((function(fragment) { + this.element.appendChild(fragment); + }).bind(this)); + } +}); + +Insertion.After = Class.create(); +Insertion.After.prototype = Object.extend(new Abstract.Insertion('afterEnd'), { + initializeRange: function() { + this.range.setStartAfter(this.element); + }, + + insertContent: function(fragments) { + fragments.each((function(fragment) { + this.element.parentNode.insertBefore(fragment, + this.element.nextSibling); + }).bind(this)); + } +}); + +/*--------------------------------------------------------------------------*/ + +Element.ClassNames = Class.create(); +Element.ClassNames.prototype = { + initialize: function(element) { + this.element = $(element); + }, + + _each: function(iterator) { + this.element.className.split(/\s+/).select(function(name) { + return name.length > 0; + })._each(iterator); + }, + + set: function(className) { + this.element.className = className; + }, + + add: function(classNameToAdd) { + if (this.include(classNameToAdd)) return; + this.set($A(this).concat(classNameToAdd).join(' ')); + }, + + remove: function(classNameToRemove) { + if (!this.include(classNameToRemove)) return; + this.set($A(this).without(classNameToRemove).join(' ')); + }, + + toString: function() { + return $A(this).join(' '); + } +}; + +Object.extend(Element.ClassNames.prototype, Enumerable); +var Selector = Class.create(); +Selector.prototype = { + initialize: function(expression) { + this.params = {classNames: []}; + this.expression = expression.toString().strip(); + this.parseExpression(); + this.compileMatcher(); + }, + + parseExpression: function() { + function abort(message) { throw 'Parse error in selector: ' + message; } + + if (this.expression == '') abort('empty expression'); + + var params = this.params, expr = this.expression, match, modifier, clause, rest; + while (match = expr.match(/^(.*)\[([a-z0-9_:-]+?)(?:([~\|!]?=)(?:"([^"]*)"|([^\]\s]*)))?\]$/i)) { + params.attributes = params.attributes || []; + params.attributes.push({name: match[2], operator: match[3], value: match[4] || match[5] || ''}); + expr = match[1]; + } + + if (expr == '*') return this.params.wildcard = true; + + while (match = expr.match(/^([^a-z0-9_-])?([a-z0-9_-]+)(.*)/i)) { + modifier = match[1], clause = match[2], rest = match[3]; + switch (modifier) { + case '#': params.id = clause; break; + case '.': params.classNames.push(clause); break; + case '': + case undefined: params.tagName = clause.toUpperCase(); break; + default: abort(expr.inspect()); + } + expr = rest; + } + + if (expr.length > 0) abort(expr.inspect()); + }, + + buildMatchExpression: function() { + var params = this.params, conditions = [], clause; + + if (params.wildcard) + conditions.push('true'); + if (clause = params.id) + conditions.push('element.readAttribute("id") == ' + clause.inspect()); + if (clause = params.tagName) + conditions.push('element.tagName.toUpperCase() == ' + clause.inspect()); + if ((clause = params.classNames).length > 0) + for (var i = 0, length = clause.length; i < length; i++) + conditions.push('element.hasClassName(' + clause[i].inspect() + ')'); + if (clause = params.attributes) { + clause.each(function(attribute) { + var value = 'element.readAttribute(' + attribute.name.inspect() + ')'; + var splitValueBy = function(delimiter) { + return value + ' && ' + value + '.split(' + delimiter.inspect() + ')'; + } + + switch (attribute.operator) { + case '=': conditions.push(value + ' == ' + attribute.value.inspect()); break; + case '~=': conditions.push(splitValueBy(' ') + '.include(' + attribute.value.inspect() + ')'); break; + case '|=': conditions.push( + splitValueBy('-') + '.first().toUpperCase() == ' + attribute.value.toUpperCase().inspect() + ); break; + case '!=': conditions.push(value + ' != ' + attribute.value.inspect()); break; + case '': + case undefined: conditions.push('element.hasAttribute(' + attribute.name.inspect() + ')'); break; + default: throw 'Unknown operator ' + attribute.operator + ' in selector'; + } + }); + } + + return conditions.join(' && '); + }, + + compileMatcher: function() { + this.match = new Function('element', 'if (!element.tagName) return false; \ + element = $(element); \ + return ' + this.buildMatchExpression()); + }, + + findElements: function(scope) { + var element; + + if (element = $(this.params.id)) + if (this.match(element)) + if (!scope || Element.childOf(element, scope)) + return [element]; + + scope = (scope || document).getElementsByTagName(this.params.tagName || '*'); + + var results = []; + for (var i = 0, length = scope.length; i < length; i++) + if (this.match(element = scope[i])) + results.push(Element.extend(element)); + + return results; + }, + + toString: function() { + return this.expression; + } +} + +Object.extend(Selector, { + matchElements: function(elements, expression) { + var selector = new Selector(expression); + return elements.select(selector.match.bind(selector)).map(Element.extend); + }, + + findElement: function(elements, expression, index) { + if (typeof expression == 'number') index = expression, expression = false; + return Selector.matchElements(elements, expression || '*')[index || 0]; + }, + + findChildElements: function(element, expressions) { + return expressions.map(function(expression) { + return expression.match(/[^\s"]+(?:"[^"]*"[^\s"]+)*/g).inject([null], function(results, expr) { + var selector = new Selector(expr); + return results.inject([], function(elements, result) { + return elements.concat(selector.findElements(result || element)); + }); + }); + }).flatten(); + } +}); + +function $$() { + return Selector.findChildElements(document, $A(arguments)); +} +var Form = { + reset: function(form) { + $(form).reset(); + return form; + }, + + serializeElements: function(elements, getHash) { + var data = elements.inject({}, function(result, element) { + if (!element.disabled && element.name) { + var key = element.name, value = $(element).getValue(); + if (value != undefined) { + if (result[key]) { + if (result[key].constructor != Array) result[key] = [result[key]]; + result[key].push(value); + } + else result[key] = value; + } + } + return result; + }); + + return getHash ? data : Hash.toQueryString(data); + } +}; + +Form.Methods = { + serialize: function(form, getHash) { + return Form.serializeElements(Form.getElements(form), getHash); + }, + + getElements: function(form) { + return $A($(form).getElementsByTagName('*')).inject([], + function(elements, child) { + if (Form.Element.Serializers[child.tagName.toLowerCase()]) + elements.push(Element.extend(child)); + return elements; + } + ); + }, + + getInputs: function(form, typeName, name) { + form = $(form); + var inputs = form.getElementsByTagName('input'); + + if (!typeName && !name) return $A(inputs).map(Element.extend); + + for (var i = 0, matchingInputs = [], length = inputs.length; i < length; i++) { + var input = inputs[i]; + if ((typeName && input.type != typeName) || (name && input.name != name)) + continue; + matchingInputs.push(Element.extend(input)); + } + + return matchingInputs; + }, + + disable: function(form) { + form = $(form); + form.getElements().each(function(element) { + element.blur(); + element.disabled = 'true'; + }); + return form; + }, + + enable: function(form) { + form = $(form); + form.getElements().each(function(element) { + element.disabled = ''; + }); + return form; + }, + + findFirstElement: function(form) { + return $(form).getElements().find(function(element) { + return element.type != 'hidden' && !element.disabled && + ['input', 'select', 'textarea'].include(element.tagName.toLowerCase()); + }); + }, + + focusFirstElement: function(form) { + form = $(form); + form.findFirstElement().activate(); + return form; + } +} + +Object.extend(Form, Form.Methods); + +/*--------------------------------------------------------------------------*/ + +Form.Element = { + focus: function(element) { + $(element).focus(); + return element; + }, + + select: function(element) { + $(element).select(); + return element; + } +} + +Form.Element.Methods = { + serialize: function(element) { + element = $(element); + if (!element.disabled && element.name) { + var value = element.getValue(); + if (value != undefined) { + var pair = {}; + pair[element.name] = value; + return Hash.toQueryString(pair); + } + } + return ''; + }, + + getValue: function(element) { + element = $(element); + var method = element.tagName.toLowerCase(); + return Form.Element.Serializers[method](element); + }, + + clear: function(element) { + $(element).value = ''; + return element; + }, + + present: function(element) { + return $(element).value != ''; + }, + + activate: function(element) { + element = $(element); + element.focus(); + if (element.select && ( element.tagName.toLowerCase() != 'input' || + !['button', 'reset', 'submit'].include(element.type) ) ) + element.select(); + return element; + }, + + disable: function(element) { + element = $(element); + element.disabled = true; + return element; + }, + + enable: function(element) { + element = $(element); + element.blur(); + element.disabled = false; + return element; + } +} + +Object.extend(Form.Element, Form.Element.Methods); +var Field = Form.Element; +var $F = Form.Element.getValue; + +/*--------------------------------------------------------------------------*/ + +Form.Element.Serializers = { + input: function(element) { + switch (element.type.toLowerCase()) { + case 'checkbox': + case 'radio': + return Form.Element.Serializers.inputSelector(element); + default: + return Form.Element.Serializers.textarea(element); + } + }, + + inputSelector: function(element) { + return element.checked ? element.value : null; + }, + + textarea: function(element) { + return element.value; + }, + + select: function(element) { + return this[element.type == 'select-one' ? + 'selectOne' : 'selectMany'](element); + }, + + selectOne: function(element) { + var index = element.selectedIndex; + return index >= 0 ? this.optionValue(element.options[index]) : null; + }, + + selectMany: function(element) { + var values, length = element.length; + if (!length) return null; + + for (var i = 0, values = []; i < length; i++) { + var opt = element.options[i]; + if (opt.selected) values.push(this.optionValue(opt)); + } + return values; + }, + + optionValue: function(opt) { + // extend element because hasAttribute may not be native + return Element.extend(opt).hasAttribute('value') ? opt.value : opt.text; + } +} + +/*--------------------------------------------------------------------------*/ + +Abstract.TimedObserver = function() {} +Abstract.TimedObserver.prototype = { + initialize: function(element, frequency, callback) { + this.frequency = frequency; + this.element = $(element); + this.callback = callback; + + this.lastValue = this.getValue(); + this.registerCallback(); + }, + + registerCallback: function() { + setInterval(this.onTimerEvent.bind(this), this.frequency * 1000); + }, + + onTimerEvent: function() { + var value = this.getValue(); + var changed = ('string' == typeof this.lastValue && 'string' == typeof value + ? this.lastValue != value : String(this.lastValue) != String(value)); + if (changed) { + this.callback(this.element, value); + this.lastValue = value; + } + } +} + +Form.Element.Observer = Class.create(); +Form.Element.Observer.prototype = Object.extend(new Abstract.TimedObserver(), { + getValue: function() { + return Form.Element.getValue(this.element); + } +}); + +Form.Observer = Class.create(); +Form.Observer.prototype = Object.extend(new Abstract.TimedObserver(), { + getValue: function() { + return Form.serialize(this.element); + } +}); + +/*--------------------------------------------------------------------------*/ + +Abstract.EventObserver = function() {} +Abstract.EventObserver.prototype = { + initialize: function(element, callback) { + this.element = $(element); + this.callback = callback; + + this.lastValue = this.getValue(); + if (this.element.tagName.toLowerCase() == 'form') + this.registerFormCallbacks(); + else + this.registerCallback(this.element); + }, + + onElementEvent: function() { + var value = this.getValue(); + if (this.lastValue != value) { + this.callback(this.element, value); + this.lastValue = value; + } + }, + + registerFormCallbacks: function() { + Form.getElements(this.element).each(this.registerCallback.bind(this)); + }, + + registerCallback: function(element) { + if (element.type) { + switch (element.type.toLowerCase()) { + case 'checkbox': + case 'radio': + Event.observe(element, 'click', this.onElementEvent.bind(this)); + break; + default: + Event.observe(element, 'change', this.onElementEvent.bind(this)); + break; + } + } + } +} + +Form.Element.EventObserver = Class.create(); +Form.Element.EventObserver.prototype = Object.extend(new Abstract.EventObserver(), { + getValue: function() { + return Form.Element.getValue(this.element); + } +}); + +Form.EventObserver = Class.create(); +Form.EventObserver.prototype = Object.extend(new Abstract.EventObserver(), { + getValue: function() { + return Form.serialize(this.element); + } +}); +if (!window.Event) { + var Event = new Object(); +} + +Object.extend(Event, { + KEY_BACKSPACE: 8, + KEY_TAB: 9, + KEY_RETURN: 13, + KEY_ESC: 27, + KEY_LEFT: 37, + KEY_UP: 38, + KEY_RIGHT: 39, + KEY_DOWN: 40, + KEY_DELETE: 46, + KEY_HOME: 36, + KEY_END: 35, + KEY_PAGEUP: 33, + KEY_PAGEDOWN: 34, + + element: function(event) { + return event.target || event.srcElement; + }, + + isLeftClick: function(event) { + return (((event.which) && (event.which == 1)) || + ((event.button) && (event.button == 1))); + }, + + pointerX: function(event) { + return event.pageX || (event.clientX + + (document.documentElement.scrollLeft || document.body.scrollLeft)); + }, + + pointerY: function(event) { + return event.pageY || (event.clientY + + (document.documentElement.scrollTop || document.body.scrollTop)); + }, + + stop: function(event) { + if (event.preventDefault) { + event.preventDefault(); + event.stopPropagation(); + } else { + event.returnValue = false; + event.cancelBubble = true; + } + }, + + // find the first node with the given tagName, starting from the + // node the event was triggered on; traverses the DOM upwards + findElement: function(event, tagName) { + var element = Event.element(event); + while (element.parentNode && (!element.tagName || + (element.tagName.toUpperCase() != tagName.toUpperCase()))) + element = element.parentNode; + return element; + }, + + observers: false, + + _observeAndCache: function(element, name, observer, useCapture) { + if (!this.observers) this.observers = []; + if (element.addEventListener) { + this.observers.push([element, name, observer, useCapture]); + element.addEventListener(name, observer, useCapture); + } else if (element.attachEvent) { + this.observers.push([element, name, observer, useCapture]); + element.attachEvent('on' + name, observer); + } + }, + + unloadCache: function() { + if (!Event.observers) return; + for (var i = 0, length = Event.observers.length; i < length; i++) { + Event.stopObserving.apply(this, Event.observers[i]); + Event.observers[i][0] = null; + } + Event.observers = false; + }, + + observe: function(element, name, observer, useCapture) { + element = $(element); + useCapture = useCapture || false; + + if (name == 'keypress' && + (navigator.appVersion.match(/Konqueror|Safari|KHTML/) + || element.attachEvent)) + name = 'keydown'; + + Event._observeAndCache(element, name, observer, useCapture); + }, + + stopObserving: function(element, name, observer, useCapture) { + element = $(element); + useCapture = useCapture || false; + + if (name == 'keypress' && + (navigator.appVersion.match(/Konqueror|Safari|KHTML/) + || element.detachEvent)) + name = 'keydown'; + + if (element.removeEventListener) { + element.removeEventListener(name, observer, useCapture); + } else if (element.detachEvent) { + try { + element.detachEvent('on' + name, observer); + } catch (e) {} + } + } +}); + +/* prevent memory leaks in IE */ +if (navigator.appVersion.match(/\bMSIE\b/)) + Event.observe(window, 'unload', Event.unloadCache, false); +var Position = { + // set to true if needed, warning: firefox performance problems + // NOT neeeded for page scrolling, only if draggable contained in + // scrollable elements + includeScrollOffsets: false, + + // must be called before calling withinIncludingScrolloffset, every time the + // page is scrolled + prepare: function() { + this.deltaX = window.pageXOffset + || document.documentElement.scrollLeft + || document.body.scrollLeft + || 0; + this.deltaY = window.pageYOffset + || document.documentElement.scrollTop + || document.body.scrollTop + || 0; + }, + + realOffset: function(element) { + var valueT = 0, valueL = 0; + do { + valueT += element.scrollTop || 0; + valueL += element.scrollLeft || 0; + element = element.parentNode; + } while (element); + return [valueL, valueT]; + }, + + cumulativeOffset: function(element) { + var valueT = 0, valueL = 0; + do { + valueT += element.offsetTop || 0; + valueL += element.offsetLeft || 0; + element = element.offsetParent; + } while (element); + return [valueL, valueT]; + }, + + positionedOffset: function(element) { + var valueT = 0, valueL = 0; + do { + valueT += element.offsetTop || 0; + valueL += element.offsetLeft || 0; + element = element.offsetParent; + if (element) { + if(element.tagName=='BODY') break; + var p = Element.getStyle(element, 'position'); + if (p == 'relative' || p == 'absolute') break; + } + } while (element); + return [valueL, valueT]; + }, + + offsetParent: function(element) { + if (element.offsetParent) return element.offsetParent; + if (element == document.body) return element; + + while ((element = element.parentNode) && element != document.body) + if (Element.getStyle(element, 'position') != 'static') + return element; + + return document.body; + }, + + // caches x/y coordinate pair to use with overlap + within: function(element, x, y) { + if (this.includeScrollOffsets) + return this.withinIncludingScrolloffsets(element, x, y); + this.xcomp = x; + this.ycomp = y; + this.offset = this.cumulativeOffset(element); + + return (y >= this.offset[1] && + y < this.offset[1] + element.offsetHeight && + x >= this.offset[0] && + x < this.offset[0] + element.offsetWidth); + }, + + withinIncludingScrolloffsets: function(element, x, y) { + var offsetcache = this.realOffset(element); + + this.xcomp = x + offsetcache[0] - this.deltaX; + this.ycomp = y + offsetcache[1] - this.deltaY; + this.offset = this.cumulativeOffset(element); + + return (this.ycomp >= this.offset[1] && + this.ycomp < this.offset[1] + element.offsetHeight && + this.xcomp >= this.offset[0] && + this.xcomp < this.offset[0] + element.offsetWidth); + }, + + // within must be called directly before + overlap: function(mode, element) { + if (!mode) return 0; + if (mode == 'vertical') + return ((this.offset[1] + element.offsetHeight) - this.ycomp) / + element.offsetHeight; + if (mode == 'horizontal') + return ((this.offset[0] + element.offsetWidth) - this.xcomp) / + element.offsetWidth; + }, + + page: function(forElement) { + var valueT = 0, valueL = 0; + + var element = forElement; + do { + valueT += element.offsetTop || 0; + valueL += element.offsetLeft || 0; + + // Safari fix + if (element.offsetParent==document.body) + if (Element.getStyle(element,'position')=='absolute') break; + + } while (element = element.offsetParent); + + element = forElement; + do { + if (!window.opera || element.tagName=='BODY') { + valueT -= element.scrollTop || 0; + valueL -= element.scrollLeft || 0; + } + } while (element = element.parentNode); + + return [valueL, valueT]; + }, + + clone: function(source, target) { + var options = Object.extend({ + setLeft: true, + setTop: true, + setWidth: true, + setHeight: true, + offsetTop: 0, + offsetLeft: 0 + }, arguments[2] || {}) + + // find page position of source + source = $(source); + var p = Position.page(source); + + // find coordinate system to use + target = $(target); + var delta = [0, 0]; + var parent = null; + // delta [0,0] will do fine with position: fixed elements, + // position:absolute needs offsetParent deltas + if (Element.getStyle(target,'position') == 'absolute') { + parent = Position.offsetParent(target); + delta = Position.page(parent); + } + + // correct by body offsets (fixes Safari) + if (parent == document.body) { + delta[0] -= document.body.offsetLeft; + delta[1] -= document.body.offsetTop; + } + + // set position + if(options.setLeft) target.style.left = (p[0] - delta[0] + options.offsetLeft) + 'px'; + if(options.setTop) target.style.top = (p[1] - delta[1] + options.offsetTop) + 'px'; + if(options.setWidth) target.style.width = source.offsetWidth + 'px'; + if(options.setHeight) target.style.height = source.offsetHeight + 'px'; + }, + + absolutize: function(element) { + element = $(element); + if (element.style.position == 'absolute') return; + Position.prepare(); + + var offsets = Position.positionedOffset(element); + var top = offsets[1]; + var left = offsets[0]; + var width = element.clientWidth; + var height = element.clientHeight; + + element._originalLeft = left - parseFloat(element.style.left || 0); + element._originalTop = top - parseFloat(element.style.top || 0); + element._originalWidth = element.style.width; + element._originalHeight = element.style.height; + + element.style.position = 'absolute'; + element.style.top = top + 'px'; + element.style.left = left + 'px'; + element.style.width = width + 'px'; + element.style.height = height + 'px'; + }, + + relativize: function(element) { + element = $(element); + if (element.style.position == 'relative') return; + Position.prepare(); + + element.style.position = 'relative'; + var top = parseFloat(element.style.top || 0) - (element._originalTop || 0); + var left = parseFloat(element.style.left || 0) - (element._originalLeft || 0); + + element.style.top = top + 'px'; + element.style.left = left + 'px'; + element.style.height = element._originalHeight; + element.style.width = element._originalWidth; + } +} + +// Safari returns margins on body which is incorrect if the child is absolutely +// positioned. For performance reasons, redefine Position.cumulativeOffset for +// KHTML/WebKit only. +if (/Konqueror|Safari|KHTML/.test(navigator.userAgent)) { + Position.cumulativeOffset = function(element) { + var valueT = 0, valueL = 0; + do { + valueT += element.offsetTop || 0; + valueL += element.offsetLeft || 0; + if (element.offsetParent == document.body) + if (Element.getStyle(element, 'position') == 'absolute') break; + + element = element.offsetParent; + } while (element); + + return [valueL, valueT]; + } +} + +Element.addMethods(); \ No newline at end of file diff --git a/solr/client/ruby/flare/public/plugin_assets/README b/solr/client/ruby/flare/public/plugin_assets/README new file mode 100644 index 00000000000..6576ab55bca --- /dev/null +++ b/solr/client/ruby/flare/public/plugin_assets/README @@ -0,0 +1,17 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +Files in this directory are automatically generated from your Rails Engines. +They are copied from the 'public' directories of each engine into this directory +each time Rails starts (server, console... any time 'start_engine' is called). +Any edits you make will NOT persist across the next server restart; instead you +should edit the files within the /assets/ directory itself. diff --git a/solr/client/ruby/flare/public/stylesheets/flare.css b/solr/client/ruby/flare/public/stylesheets/flare.css new file mode 100644 index 00000000000..17150ab1f37 --- /dev/null +++ b/solr/client/ruby/flare/public/stylesheets/flare.css @@ -0,0 +1,180 @@ +/* +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +*/ + +body { + font-family: Verdana, Geneva, Arial, helvetica, sans-serif; + font-size: 12px; + margin: 15px; +} + +#header { + font-size: 22px; + margin-bottom: -10px; + width: 200px; + border-top: 4px solid #D43D1A; + border-left: 4px solid #EE4000; + background:url(../images/flare.jpg) no-repeat left top; +} + +#header a { + color: #EE4000; +} + +#header a:hover { + color: #CD3700; +} + +a img { + border: 0; +} + +td { + font-family: Verdana, Geneva, Arial, helvetica, sans-serif; + font-size: 11px; +} +a, a:visited { + color: #cc6633; + text-decoration: none; +} +a:hover { + color: #ff9933; + text-decoration: none; +} + +h2 { + font-size: 16px; + font-weight: normal; + } + +h4 { + font-size: 13px; + font-weight: normal; + letter-spacing: .2em; + } + +h4 a { + font-size: 11px; + letter-spacing: normal; + } + +h4 a:hover { + text-decoration: none; + } + +em { + color: #000; +} + +#sidebar { + float: right; + width: 339px; + font-size: 11px; + line-height: 20px; + margin-bottom: 20px; + padding: 10px; +} + +.searchbar { + line-height: 12px; +} + +#sidebar ul { + list-style: none; + margin-left: -20px; + margin-top: -10px; +} + +#sidebar img { + vertical-align: middle; +} + +#sidebar .exhibit { + float: right; +} + +#variables { + padding: 20px; +} + +.varheader { + font-size: 14px; + font-weight: normal; + letter-spacing: .2em; +} + +#queries img { + vertical-align: baseline; + width:14px; +} + +#queries { + margin-left: 10px; + color: #666; +} + +#filters img { + vertical-align: baseline; + width:14px; +} + +#filters { + margin-left: 10px; + color: #666; +} + +.clear { + padding: 10px 0 10px 0px; + font-size: 11px; +} + +.resultsheader { + font-size: 14px; + font-weight: normal; + letter-spacing: .2em; + margin-bottom: -20px; +} + +#results { + padding: 20px; + margin-right: 380px; +} + +.resultsPager { + padding: 4px; + margin-top: 30px; + margin-bottom: 20px; + border: 1px solid #A2C9EF; + background-color: #E4F1FD; +} + +.title { + font-size: 13px; + color: #000; + letter-spacing: .1em; +} + +.entry { + color: #666; +} + +.field { + color: #999; + font-style: italic; +} + +div.auto_complete { + padding-left: 20px; + padding-top: 10px; + background: #fff; +} diff --git a/solr/client/ruby/flare/script/about b/solr/client/ruby/flare/script/about new file mode 100755 index 00000000000..f30f07e13c4 --- /dev/null +++ b/solr/client/ruby/flare/script/about @@ -0,0 +1,15 @@ +#!/usr/bin/env ruby +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require File.dirname(__FILE__) + '/../config/boot' +require 'commands/about' \ No newline at end of file diff --git a/solr/client/ruby/flare/script/breakpointer b/solr/client/ruby/flare/script/breakpointer new file mode 100755 index 00000000000..ce02815cbd9 --- /dev/null +++ b/solr/client/ruby/flare/script/breakpointer @@ -0,0 +1,15 @@ +#!/usr/bin/env ruby +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require File.dirname(__FILE__) + '/../config/boot' +require 'commands/breakpointer' \ No newline at end of file diff --git a/solr/client/ruby/flare/script/console b/solr/client/ruby/flare/script/console new file mode 100755 index 00000000000..7c2f7a4cb01 --- /dev/null +++ b/solr/client/ruby/flare/script/console @@ -0,0 +1,15 @@ +#!/usr/bin/env ruby +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require File.dirname(__FILE__) + '/../config/boot' +require 'commands/console' \ No newline at end of file diff --git a/solr/client/ruby/flare/script/destroy b/solr/client/ruby/flare/script/destroy new file mode 100755 index 00000000000..96ca14a0457 --- /dev/null +++ b/solr/client/ruby/flare/script/destroy @@ -0,0 +1,15 @@ +#!/usr/bin/env ruby +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require File.dirname(__FILE__) + '/../config/boot' +require 'commands/destroy' \ No newline at end of file diff --git a/solr/client/ruby/flare/script/generate b/solr/client/ruby/flare/script/generate new file mode 100755 index 00000000000..6c6fcb0d942 --- /dev/null +++ b/solr/client/ruby/flare/script/generate @@ -0,0 +1,15 @@ +#!/usr/bin/env ruby +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require File.dirname(__FILE__) + '/../config/boot' +require 'commands/generate' \ No newline at end of file diff --git a/solr/client/ruby/flare/script/performance/benchmarker b/solr/client/ruby/flare/script/performance/benchmarker new file mode 100755 index 00000000000..5bedc810749 --- /dev/null +++ b/solr/client/ruby/flare/script/performance/benchmarker @@ -0,0 +1,15 @@ +#!/usr/bin/env ruby +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require File.dirname(__FILE__) + '/../../config/boot' +require 'commands/performance/benchmarker' diff --git a/solr/client/ruby/flare/script/performance/profiler b/solr/client/ruby/flare/script/performance/profiler new file mode 100755 index 00000000000..3daeaabdfdd --- /dev/null +++ b/solr/client/ruby/flare/script/performance/profiler @@ -0,0 +1,15 @@ +#!/usr/bin/env ruby +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require File.dirname(__FILE__) + '/../../config/boot' +require 'commands/performance/profiler' diff --git a/solr/client/ruby/flare/script/pie.rb b/solr/client/ruby/flare/script/pie.rb new file mode 100755 index 00000000000..7f9d94403cd --- /dev/null +++ b/solr/client/ruby/flare/script/pie.rb @@ -0,0 +1,26 @@ +#!/usr/bin/env ruby +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'rubygems' +require 'sparklines' + +0.upto(100) do |i| + Sparklines.plot_to_file("public/images/pie_#{i}.png", + [i], :type => 'pie', + :share_color => "#D43D1A", + :remain_color => "#dcdcdc" +# :background_color => "#ededed" + ) +end + + diff --git a/solr/client/ruby/flare/script/plugin b/solr/client/ruby/flare/script/plugin new file mode 100755 index 00000000000..3ba193491f1 --- /dev/null +++ b/solr/client/ruby/flare/script/plugin @@ -0,0 +1,15 @@ +#!/usr/bin/env ruby +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require File.dirname(__FILE__) + '/../config/boot' +require 'commands/plugin' \ No newline at end of file diff --git a/solr/client/ruby/flare/script/process/inspector b/solr/client/ruby/flare/script/process/inspector new file mode 100755 index 00000000000..5653e9199f1 --- /dev/null +++ b/solr/client/ruby/flare/script/process/inspector @@ -0,0 +1,15 @@ +#!/usr/bin/env ruby +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require File.dirname(__FILE__) + '/../../config/boot' +require 'commands/process/inspector' diff --git a/solr/client/ruby/flare/script/process/reaper b/solr/client/ruby/flare/script/process/reaper new file mode 100755 index 00000000000..7dd181a1cad --- /dev/null +++ b/solr/client/ruby/flare/script/process/reaper @@ -0,0 +1,15 @@ +#!/usr/bin/env ruby +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require File.dirname(__FILE__) + '/../../config/boot' +require 'commands/process/reaper' diff --git a/solr/client/ruby/flare/script/process/spawner b/solr/client/ruby/flare/script/process/spawner new file mode 100755 index 00000000000..2e9b27d85ca --- /dev/null +++ b/solr/client/ruby/flare/script/process/spawner @@ -0,0 +1,15 @@ +#!/usr/bin/env ruby +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require File.dirname(__FILE__) + '/../../config/boot' +require 'commands/process/spawner' diff --git a/solr/client/ruby/flare/script/runner b/solr/client/ruby/flare/script/runner new file mode 100755 index 00000000000..05ba1575781 --- /dev/null +++ b/solr/client/ruby/flare/script/runner @@ -0,0 +1,15 @@ +#!/usr/bin/env ruby +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require File.dirname(__FILE__) + '/../config/boot' +require 'commands/runner' \ No newline at end of file diff --git a/solr/client/ruby/flare/script/server b/solr/client/ruby/flare/script/server new file mode 100755 index 00000000000..5ddadff82b7 --- /dev/null +++ b/solr/client/ruby/flare/script/server @@ -0,0 +1,15 @@ +#!/usr/bin/env ruby +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require File.dirname(__FILE__) + '/../config/boot' +require 'commands/server' \ No newline at end of file diff --git a/solr/client/ruby/flare/test/functional/browse_controller_test.rb b/solr/client/ruby/flare/test/functional/browse_controller_test.rb new file mode 100644 index 00000000000..d206f16121b --- /dev/null +++ b/solr/client/ruby/flare/test/functional/browse_controller_test.rb @@ -0,0 +1,30 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require File.dirname(__FILE__) + '/../test_helper' +require 'browse_controller' + +# Re-raise errors caught by the controller. +class BrowseController; def rescue_action(e) raise e end; end + +class BrowseControllerTest < Test::Unit::TestCase + def setup + @controller = BrowseController.new + @request = ActionController::TestRequest.new + @response = ActionController::TestResponse.new + end + + # Replace this with your real tests. + def test_truth + assert true + end +end diff --git a/solr/client/ruby/flare/test/test_helper.rb b/solr/client/ruby/flare/test/test_helper.rb new file mode 100644 index 00000000000..cc17dfff1a0 --- /dev/null +++ b/solr/client/ruby/flare/test/test_helper.rb @@ -0,0 +1,47 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +ENV["RAILS_ENV"] = "test" +require File.expand_path(File.dirname(__FILE__) + "/../config/environment") + +# Added the following requires, and commented out test_help and the self.* calls +# such that tests can run without a database. (Based on Fowler's Rails Recipes) +require 'application' +require 'test/unit' +require 'action_controller/test_process' +require 'breakpoint' +#require 'test_help' + +class Test::Unit::TestCase + # Transactional fixtures accelerate your tests by wrapping each test method + # in a transaction that's rolled back on completion. This ensures that the + # test database remains unchanged so your fixtures don't have to be reloaded + # between every test method. Fewer database queries means faster tests. + # + # Read Mike Clark's excellent walkthrough at + # http://clarkware.com/cgi/blosxom/2005/10/24#Rails10FastTesting + # + # Every Active Record database supports transactions except MyISAM tables + # in MySQL. Turn off transactional fixtures in this case; however, if you + # don't care one way or the other, switching from MyISAM to InnoDB tables + # is recommended. + #self.use_transactional_fixtures = true + + # Instantiated fixtures are slow, but give you @david where otherwise you + # would need people(:david). If you don't want to migrate your existing + # test cases which use the @david style and don't mind the speed hit (each + # instantiated fixtures translates to a database query per test method), + # then set this back to true. + #self.use_instantiated_fixtures = false + + # Add more helper methods to be used by all tests here... +end diff --git a/solr/client/ruby/flare/vendor/plugins/engines/CHANGELOG b/solr/client/ruby/flare/vendor/plugins/engines/CHANGELOG new file mode 100644 index 00000000000..05a0e1a015e --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/engines/CHANGELOG @@ -0,0 +1,241 @@ += 1.2.0 - MASSIVE INTERNAL REFACTORING + + +* !!!Support for Rails < 1.2 has been dropped!!!; if you are using Rails =< 1.1.6, please use Engines 1.1.6, available from http://svn.rails-engines.org/engines/tags/rel_1.1.6 + +* Engines are dead! Long live plugins! There is now no meaningful notion of an engine - all plugins can take advantage of the more powerful features that the engines plugin provides by including app directories, etc. + +* Init_engine.rb is no longer used; please use the plugin-standard init.rb instead. + +* Engines.start is no longer required; please use the config.plugins array provided by Rails instead + +* To get the most benefit from Engines, set config.plugins to ["engines", "*"] to load the engines plugin first, and then all other plugins in their normal order after. + +* Access all loaded plugins via the new Rails.plugins array, and by name using Rails.plugins[:plugin_name]. + +* Access plugin metadata loaded automatically from about.yml: Rails.plugins[:name].about. Plugin#version is provided directly, for easy access. + +* Module.config is has been removed - use mattr_accessor instead, and initialize your default values via the init.rb mechanism. + +* Public asset helpers have been rewritten; instead of engine_stylesheet, now use stylesheet_link_tag :name, :plugin => "plugin_name" + +* Plugin migrations have been reworked to integrate into the main migration stream. Please run script/generate plugin_migration to create plugin migrations in your main application. + +* The fixture method for loading fixtures against any class has been removed; instead, engines will now provide a mechanism for loading fixtures from all plugins, by mirroring fixtures into a common location. + +* All references to engines have been removed; For example, any rake tasks which applied to engines now apply to all plugins. The default Rails rake tasks for plugins are overridden where necessary. + +* Layouts can now be shared via plugins - inspiration gratefully taken from PluginAWeek's plugin_routing :) + +* Actual routing from plugins is now possible, by including routes.rb in your plugin directory and using the from_plugin method in config/routes.rb (Ticket #182) + +* Controllers are no longer loaded twice if they're not present in the normal app/ directory (Ticket #177) + +* The preferred location for javascripts/stylesheets/etc is now 'assets' rather than 'public' + +* Ensure that plugins started before routing have their controllers appropriately added to config.controller_paths (Ticket #258) + +* Removed Engines.version - it's not longer relevant, now we're loading version information from about.yml files. + +* Added a huge amount of documentation to all new modules. + +* Added new warning message if installation of engines 1.2.x is attempted in a Rails 1.1.x application + +* Added details of the removal of the config method to UPGRADING + +* Removed the plugins:info rake task in favour of adding information to script/about via the Rails::Info module (Ticket #261) + +* Improved handling of testing and documentation tasks for plugins + + + += 1.1.4 + +* Fixed creation of multipart emails (Ticket #190) + +* Added a temporary fix to the code-mixing issue. In your engine's test/test_helper.rb, please add the following lines: + + # Ensure that the code mixing and view loading from the application is disabled + Engines.disable_app_views_loading = true + Engines.disable_app_code_mixing = true + + which will prevent code mixing for controllers and helpers, and loading views from the application. One thing to remember is to load any controllers/helpers using 'require_or_load' in your tests, to ensure that the engine behaviour is respected (Ticket #135) + +* Added tasks to easily test engines individually (Ticket #120) + +* Fixture extensions will now fail with an exception if the corresponding class cannot be loaded (Ticket #138) + +* Patch for new routing/controller loading in Rails 1.1.6. The routing code is now replaced with the contents of config.controller_paths, along with controller paths from any started engines (Ticket #196) + +* Rails' Configuration instance is now stored, and available from all engines and plugins. + + + += 1.1.3 + +* Fixed README to show 'models' rather than 'model' class (Ticket #167) +* Fixed dependency loading to work with Rails 1.1.4 (Ticket #180) + + + += 1.1.2 + +* Added better fix to version checking (Ticket #130, jdell@gbdev.com). + +* Fixed generated init_engine.rb so that VERSION module doesn't cause probems (Ticket #131, japgolly@gmail.com) + +* Fixed error with Rails 1.0 when trying to ignore the engine_schema_info table (Ticket #132, snowblink@gmail.com) + +* Re-added old style rake tasks (Ticket #133) + +* No longer adding all subdirectories of /app or /lib, as this can cause issues when files are grouped in modules (Ticket #149, kasatani@gmail.com) + +* Fixed engine precidence ordering for Rails 1.1 (Ticket #146) + +* Added new Engines.each method to assist in processing the engines in the desired order (Ticket #146) + +* Fixed annoying error message at appears when starting the console in development mode (Ticket #134) + +* Engines is now super-careful about loading the correct version of Rails from vendor (Ticket #154) + + + += 1.1.1 + +* Fixed migration rake task failing when given a specific version (Ticket #115) + +* Added new rake task "test:engines" which will test engines (and other plugins) but ensure that the test database is cloned from development beforehand (Ticket #125) + +* Fixed issue where 'engine_schema_info' table was included in schema dumps (Ticket #87) + +* Fixed multi-part emails (Ticket #121) + +* Added an 'install.rb' file to new engines created by the bundled generator, which installs the engines plugin automatically if it doesn't already exist (Ticket #122) + +* Added a default VERSION module to generated engines (Ticket #123) + +* Refactored copying of engine's public files to a method of an Engine instance. You can now call Engines.get(:engine_name).copy_public_files (Ticket #108) + +* Changed engine generator templates from .rb files to .erb files (Ticket #106) + +* Fixed the test_helper.erb file to use the correct testing extensions and not load any schema - the schema will be cloned automatically via rake test:engines + +* Fixed problem when running with Rails 1.1.1 where version wasn't determined correctly (Ticket #129) + +* Fixed bug preventing engines from loading when both Rails 1.1.0 and 1.1.1 gems are installed and in use. + +* Updated version (d'oh!) + + + += 1.1.0 + +* Improved regexp matching for Rails 1.0 engines with peculiar paths + +* Engine instance objects can be accessed via Engines[:name], an alias for Engines.get(:name) (Ticket #99) + +* init_engine.rb is now processed as the final step in the Engine.start process, so it can access files within the lib directory, which is now in the $LOAD_PATH at that point. (Ticket #99) + +* Clarified MIT license (Ticket #98) + +* Updated Rake tasks to integrate smoothly with Rails 1.1 namespaces + +* Changed the version to "1.1.0 (svn)" + +* Added more information about using the plugin with Edge Rails to the README + +* moved extensions into lib/engines/ directory to enable use of Engines module in extension code. + +* Added conditional require_or_load method which attempts to detect the current Rails version. To use the Edge Rails version of the loading mechanism, add the line: + +* Engines.config :edge, true + +* to your environment.rb file. + +* Merged changes from /branches/edge and /branches/rb_1.0 into /trunk + +* engine_schema_info now respects the prefix/suffixes set for ActiveRecord::Base (Ticket #67) + +* added ActiveRecord::Base.wrapped_table_name(name) method to assist in determining the correct table name + + + += 1.0.6 + +* Added ability to determine version information for engines: rake engine_info + +* Added a custom logger for the Engines module, to stop pollution of the Rails logs. + +* Added some more tests (in particular, see rails_engines/applications/engines_test). + +* Another attempt at solving Ticket #53 - controllers and helpers should now be loadable from modules, and if a full path (including RAILS_ROOT/ENGINES_ROOT) is given, it should be safely stripped from the require filename such that corresponding files can be located in any active engines. In other words, controller/helper overloading should now completely work, even if the controllers/helpers are in modules. + +* Added (finally) patch from Ticket #22 - ActionMailer helpers should now load + +* Removed support for Engines.start :engine, :engine_name => 'whatever'. It was pointless. + +* Fixed engine name referencing; engine_stylesheet/engine_javascript can now happily use shorthand engine names (i.e. :test == :test_engine) (Ticket #45) + +* Fixed minor documentation error ('Engine.start' ==> 'Engines.start') (Ticket #57) + +* Fixed double inclusion of RAILS_ROOT in engine_migrate rake task (Ticket #61) + +* Added ability to force config values even if given as a hash (Ticket #62) + + + += 1.0.5 + +* Fixed bug stopping fixtures from loading with PostgreSQL + + + += 1.0.4 + +* Another attempt at loading controllers within modules (Ticket #56) + + + += 1.0.3 + +* Fixed serious dependency bug stopping controllers being loaded (Ticket #56) + + + += 1.0.2 + +* Fixed bug with overloading controllers in modules from /app directory + +* Fixed exception thrown when public files couldn't be created; exception is now logged (Ticket #52) + +* Fixed problem with generated test_helper.rb file via File.expand_path (Ticket #50) + + + += 1.0.1 + +* Added engine generator for creation of new engines + +* Fixed 'Engine' typo in README + +* Fixed bug in fixtures extensions + +* Fixed /lib path management bug + +* Added method to determine public directory location from Engine object + +* Fixed bug in the error message in get_engine_dir() + +* Added proper component loading + +* Added preliminary tests for the config() methods module + + + += pre-v170 + +* Fixed copyright notices to point to DHH, rather than me. + +* Moved extension require statements into lib/engines.rb, so the will be loaded if another module/file calls require 'engines + +* Added a CHANGELOG file (this file) diff --git a/solr/client/ruby/flare/vendor/plugins/engines/MIT-LICENSE b/solr/client/ruby/flare/vendor/plugins/engines/MIT-LICENSE new file mode 100644 index 00000000000..17a3b22b61c --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/engines/MIT-LICENSE @@ -0,0 +1,21 @@ +Copyright (c) 2007 James Adam + +The MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/solr/client/ruby/flare/vendor/plugins/engines/README b/solr/client/ruby/flare/vendor/plugins/engines/README new file mode 100644 index 00000000000..415e97fcd8c --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/engines/README @@ -0,0 +1,64 @@ +The engines plugin enhances Rails' own plugin framework, making it simple to share controllers, helpers, models, public assets, routes and migrations in plugins. + +For more information, see http://rails-engines.org + += Using the plugin + +With engines 1.2, no extra configuration in environment.rb is required. Having the plugin installed will automatically enable sharing of code within plugin/app/ directories. Developers should be aware that the config.plugins parameter can be used to control plugin load order, if this is important for your application. + +=== config.plugins + +With Rails 1.2, the config.plugins list can be used to specify the order in which plugins are loaded. It's recommended (although not required) that you load the engines plugin first, and any other plugins later. The engines plugin enhances Rails' processing of config.plugins for occasions where you only care about the order of a small selection of your application's plugins. For instance, if you want to load +engines+ first, and anything else afterwards, then + + config.plugins = ["engines", "*"] + +will ensure that engines is loaded first, and everything else (in whatever order Rails chooses) afterwards. + +== Better plugins + +In addition to the regular set of plugin-supported files (lib, init.rb, tasks, generators, tests), plugins can carry the following when the engines plugin is also installed. + + +=== Controllers, Helpers, and Views + +Include these files in an app directory just like you would in a normal Rails application. If you need to override a method, view or partial, create the corresponding file in your main app directory and it will be used instead. + +* Controllers & Helpers: See Engines::RailsExtensions::Dependencies for more information. +* Views: See Engines::RailsExtensions::Templates for more information. + +=== Models + +Model code can similarly be placed in an app/models/ directory. Unfortunately, it's not possible to automatically override methods within a model; if your application needs to change the way a model behaves, consider creating a subclass, or replacing the model entirely within your application's app/models/ directory. See Engines::RailsExtensions::Dependencies for more information. + +IMPORTANT NOTE: when you load code from within plugins, it is typically not handled well by Rails in terms of unloading and reloading changes. Look here for more information - http://rails-engines.org/development/common-issues-when-overloading-code-from-plugins/ + +=== Routes + +Include your route declarations in a routes.rb file at the root of your plugins, e.g.: + + connect "/my/url", :controller => "some_controller" + my_named_route "do_stuff", :controller => "blah", :action => "stuff" + # etc. + +You can then load these files into your application by declaring their inclusion in the application's config/routes.rb: + + map.from_plugin :plugin_name + +See Engines::RailsExtensions::Routing for more information. + +=== Migrations + +Migrations record the changes in your database as your application evolves. With engines 1.2, migrations from plugins can also join in this evolution as first-class entities. To add migrations to a plugin, include a db/migrate/ folder and add migrations there as normal. These migrations can then be integrated into the main flow of database evolution by running the plugin_migration generator: + + script/generate plugin_migration + +This will produce a migration in your application. Running this migration (via rake db:migrate, as normal) will migrate the database according to the latest migrations in each plugin. See Engines::RailsExtensions::Migrations for more information. + + +=== More powerful Rake tasks + +The engines plugin enhances and adds to the suite of default rake tasks for working with plugins. The doc:plugins task now includes controllers, helpers and models under app, and anything other code found under the plugin's code_paths attribute. New testing tasks have been added to run unit, functional and integration tests from plugins, whilst making it easier to load fixtures from plugins. See Engines::Testing for more details about testing, and run + + rake -T + +to see the set of rake tasks available. \ No newline at end of file diff --git a/solr/client/ruby/flare/vendor/plugins/engines/Rakefile b/solr/client/ruby/flare/vendor/plugins/engines/Rakefile new file mode 100644 index 00000000000..3c346c860d4 --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/engines/Rakefile @@ -0,0 +1,32 @@ +require 'rake' +require 'rake/rdoctask' +require 'tmpdir' + +task :default => :doc + +desc 'Generate documentation for the engines plugin.' +Rake::RDocTask.new(:doc) do |doc| + doc.rdoc_dir = 'doc' + doc.title = 'Engines' + doc.main = "README" + doc.rdoc_files.include("README", "UPGRADING", "CHANGELOG", "MIT-LICENSE") + doc.rdoc_files.include('lib/**/*.rb') + doc.options << '--line-numbers' << '--inline-source' +end + +desc 'Run the engine plugin tests within their test harness' +task :cruise do + # checkout the project into a temporary directory + version = "rails_1.2" + test_dir = "#{Dir.tmpdir}/engines_plugin_#{version}_test" + puts "Checking out test harness for #{version} into #{test_dir}" + `svn co http://svn.rails-engines.org/test/engines/#{version} #{test_dir}` + + # run all the tests in this project + Dir.chdir(test_dir) + load 'Rakefile' + puts "Running all tests in test harness" + ['db:migrate', 'test', 'test:plugins'].each do |t| + Rake::Task[t].invoke + end +end \ No newline at end of file diff --git a/solr/client/ruby/flare/vendor/plugins/engines/UPGRADING b/solr/client/ruby/flare/vendor/plugins/engines/UPGRADING new file mode 100644 index 00000000000..af3debd29c1 --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/engines/UPGRADING @@ -0,0 +1,93 @@ += Upgrading from Engines 1.1.x to Engines 1.2.x + + +The 1.2 release of the engines plugin represents a significant rewrite of the internals. Now less intrusive into the core frameworks, it's even simpler for plugin developers to make more flexible plugins than Rails typically allows. + +However, as a consequence of change, you may need to make some adjustments to your application to get the most benefit. These are outlined below: + + +== In your application + +The following changes will affect code in applications which use the engines plugin + +=== Change the engine_schema table to plugin_schema + +Because the engines plugin now works universally for any Rails plugin, the table used to store migration information has been renamed appropriately. We can also play nicely with other plugin_migration implementations by supporting this convention. + + +=== Rename RAILS_ROOT/public/engine_files to RAILS_ROOT/public/plugin_assets + +Similarly, any plugin can now share stylesheets, images and so on via an assets folder, so the destination within the main public directory has been renamed to reflect this. You can simply delete the RAILS_ROOT/public/engine_files directory; the new one will be generated the next time your application starts. + + +=== The engine_image, engine_stylesheet and engine_javascript helpers have been replaced with enhanced versions of the normal Rails asset helpers + +Now, it's simple to include an image from any plugin, by adding a plugin key to the helper's options hash: + + image_tag "image.png", :plugin => "my_plugin" + +A similar convention exists for stylesheets and javascript files. Additionally, the old behaviour of automatically including assets which match the "engine"'s name has been dropped - only the files you explicitly request are included. + + +=== Engines.start is replaced with config.plugins + +Rails' native configuration object now includes an array for specifying which plugins are loaded, and in what order. The engines plugin enhances this feature with a "*" wildcard, so that you can load any order-sensitive plugins, and then the rest in any order. + + config.plugins = ["engines", "some_plugin", "another_plugin", "*"] + +It's worth noting that the engines plugin no longer needs to be specifically loaded first, so most people can happily ignore the config.plugins array without any problems. + + +=== The "config" method for "engine"-style configuration is no longer loaded by default. + +See below. + + +=== Any "engines" type rake tasks are now "plugin" tasks. + +The following rake tasks are now provided by the engines plugin for operating on your application's plugins (whenever a task can operate on a single plugin, this can be controlled by specifying PLUGIN= on the commandline): + + db:fixtures:plugins:load # load fixtures from plugins. + + test:plugins:units # Run tests from within /test/units + test:plugins:functional # Run tests from within /test/functional + test:plugins:integration # Run tests from within /test/integration + test:plugins # Run all tests from within /test/* + + doc:plugins # Run full rdoc against all source files within all plugins + doc:plugins: # Run full rdoc against all source files within the given plugin + + + +== In your "engine"-style plugins + +If you have developed or are using plugins that leverage engines enhancements, you need to be aware of the following changes in engines 1.2. + +=== init_engine.rb is no longer required + +Please now create init.rb files, just as you'd find in "normal" plugins. Remember - there's no such thing as "an engine" anymore. + + +=== The "fixture" method for loading tests from arbitrary files + +Supporting this was just too difficult, and it seems like the combination of Rails' own lack of enhancements regarding this feature, along with the mind-shift against static fixtures, meant that it simply was too much work to continue to support. + +The only reason this mechanism originally existed was to support plugins which couldn't predict the names of the tables that models might be stored in. This is now a discouraged behaviour. + + +=== Engines.current.version has changed its behaviour slightly. + +The engines plugin used to provide a simple way to store version information about plugins. This has now been superceded by including that information in an about.yml file, which any plugin can use. + + +=== The "config" method is not included, by default. + +Where previously developers could use the "config" method to define configuration, this is now deprecated in favour of using mattr_accessor directly in the Module. However, if you still *need* to use the config method, it is included but must be explicitly required, probably near the top of environment.rb: + + require File.join(RAILS_ROOT, "vendor", "plugins", "engines", + "lib", "engines", "deprecated_config_support") + + +=== Public assets should now be stored in a subdirectory of your plugin called "assets", rather than "public". + +This clarifies the nature of this directory; it is not made public itself, but rather is a container for files which should be made accessible from the web. diff --git a/solr/client/ruby/flare/vendor/plugins/engines/about.yml b/solr/client/ruby/flare/vendor/plugins/engines/about.yml new file mode 100644 index 00000000000..aee53e2c5e2 --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/engines/about.yml @@ -0,0 +1,7 @@ +author: James Adam +email: james.adam@gmail.com +homepage: http://www.rails-engines.org +summary: Enhances the plugin mechanism to perform more flexible sharing +description: The Rails Engines plugin allows the sharing of almost any type of code or asset that you could use in a Rails application, including controllers, models, stylesheets, and views. +license: MIT +version: 1.2.0 \ No newline at end of file diff --git a/solr/client/ruby/flare/vendor/plugins/engines/generators/plugin_migration/USAGE b/solr/client/ruby/flare/vendor/plugins/engines/generators/plugin_migration/USAGE new file mode 100644 index 00000000000..ac39b504abe --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/engines/generators/plugin_migration/USAGE @@ -0,0 +1,45 @@ +Description: + The plugin migration generator assists in working with schema additions + required by plugins. Instead of running migrations from plugins directly, + the generator creates a regular Rails migration which will be responsible + for migrating the plugins from their current version to the latest version + installed. + + This is important because the set of application migrations remains an + accurate record of the state of the database, even as plugins are installed + and removed during the development process. + +Example: + ./script/generate plugin_migration [ ...] + + This will generate: + + RAILS_ROOT + |- db + |-migrate + |- xxx_plugin_migrations.rb + + which contains the migrations for the given plugin(s). + + +Advanced Usage: + +There may be situations where you need *complete* control over the migrations +of plugins in your application, migrating a certainly plugin down to X, and +another plugin up to Y, where neither X or Y are the latest migrations for those +plugins. + +For those unfortunate few, I have two pieces of advice: + + 1. Why? This is a code smell [http://c2.com/xp/CodeSmell.html]. + + 2. Well, OK. Don't panic. You can completely control plugin migrations by + creating your own migrations. To manually migrate a plugin to a specific + version, simply use + + Rails.plugins[:your_plugin_name].migrate(version) + + where version is the integer of the migration this plugin should end + up at. + +With great power comes great responsibility. Use this wisely. \ No newline at end of file diff --git a/solr/client/ruby/flare/vendor/plugins/engines/generators/plugin_migration/plugin_migration_generator.rb b/solr/client/ruby/flare/vendor/plugins/engines/generators/plugin_migration/plugin_migration_generator.rb new file mode 100644 index 00000000000..906a9111e5b --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/engines/generators/plugin_migration/plugin_migration_generator.rb @@ -0,0 +1,79 @@ +# Generates a migration which migrates all plugins to their latest versions +# within the database. +class PluginMigrationGenerator < Rails::Generator::Base + + def initialize(runtime_args, runtime_options={}) + super + @options = {:assigns => {}} + + ensure_plugin_schema_table_exists + get_plugins_to_migrate(runtime_args) + + if @plugins_to_migrate.empty? + puts "All plugins are migrated to their latest versions" + exit(0) + end + + @options[:migration_file_name] = build_migration_name + @options[:assigns][:class_name] = build_migration_name.classify + end + + def manifest + record do |m| + m.migration_template 'plugin_migration.erb', 'db/migrate', @options + end + end + + protected + + # Create the plugin schema table if it doesn't already exist. See + # Engines::RailsExtensions::Migrations#initialize_schema_information_with_engine_additions + def ensure_plugin_schema_table_exists + ActiveRecord::Base.connection.initialize_schema_information + end + + # Determine all the plugins which have migrations that aren't present + # according to the plugin schema information from the database. + def get_plugins_to_migrate(plugin_names) + + # First, grab all the plugins which exist and have migrations + @plugins_to_migrate = if plugin_names.empty? + Rails.plugins + else + plugin_names.map do |name| + Rails.plugins[name] ? Rails.plugins[name] : raise("Cannot find the plugin '#{name}'") + end + end + + @plugins_to_migrate.reject! { |p| p.latest_migration.nil? } + + # Then find the current versions from the database + @current_versions = {} + @plugins_to_migrate.each do |plugin| + @current_versions[plugin.name] = Engines::PluginMigrator.current_version(plugin) + end + + # Then find the latest versions from their migration directories + @new_versions = {} + @plugins_to_migrate.each do |plugin| + @new_versions[plugin.name] = plugin.latest_migration + end + + # Remove any plugins that don't need migration + @plugins_to_migrate.map { |p| p.name }.each do |name| + @plugins_to_migrate.delete(Rails.plugins[name]) if @current_versions[name] == @new_versions[name] + end + + @options[:assigns][:plugins] = @plugins_to_migrate + @options[:assigns][:new_versions] = @new_versions + @options[:assigns][:current_versions] = @current_versions + end + + # Construct a unique migration name based on the plugins involved and the + # versions they should reach after this migration is run. + def build_migration_name + @plugins_to_migrate.map do |plugin| + "#{plugin.name}_to_version_#{@new_versions[plugin.name]}" + end.join("_and_") + end +end \ No newline at end of file diff --git a/solr/client/ruby/flare/vendor/plugins/engines/generators/plugin_migration/templates/plugin_migration.erb b/solr/client/ruby/flare/vendor/plugins/engines/generators/plugin_migration/templates/plugin_migration.erb new file mode 100644 index 00000000000..7a5f48a97af --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/engines/generators/plugin_migration/templates/plugin_migration.erb @@ -0,0 +1,13 @@ +class <%= class_name %> < ActiveRecord::Migration + def self.up + <%- plugins.each do |plugin| -%> + Rails.plugins["<%= plugin.name %>"].migrate(<%= new_versions[plugin.name] %>) + <%- end -%> + end + + def self.down + <%- plugins.each do |plugin| -%> + Rails.plugins["<%= plugin.name %>"].migrate(<%= current_versions[plugin.name] %>) + <%- end -%> + end +end diff --git a/solr/client/ruby/flare/vendor/plugins/engines/init.rb b/solr/client/ruby/flare/vendor/plugins/engines/init.rb new file mode 100644 index 00000000000..63914ccca2d --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/engines/init.rb @@ -0,0 +1,40 @@ +begin + silence_warnings { require 'rails/version' } # it may already be loaded + unless Rails::VERSION::MAJOR >= 1 && Rails::VERSION::MINOR >= 2 + raise "This version of the engines plugin requires Rails 1.2 or later!" + end +end + +# First, require the engines module & core methods +require "engines" + +# Load this before we get actually start engines +require "engines/rails_extensions/rails_initializer" + +# Start the engines mechanism. +Engines.init(config, self) + +# Now that we've defined the engines module, load up any extensions +[:rails, + :rails_initializer, + :dependencies, + :active_record, + :migrations, + :templates, + :public_asset_helpers, + :routing +].each do |f| + require "engines/rails_extensions/#{f}" +end + +# Load the testing extensions, if we are in the test environment. +require "engines/testing" if RAILS_ENV == "test" + +# Load the Rails::Info module so that plugins can insert information into it. +begin + require 'rails/info' +rescue Exception + # If this file can't be loaded, it's probably because we're running in an + # environment where Rails' builtins aren't yet in the load path. + # For the moment, just ignore this. See Ticket #261 +end \ No newline at end of file diff --git a/solr/client/ruby/flare/vendor/plugins/engines/install.rb b/solr/client/ruby/flare/vendor/plugins/engines/install.rb new file mode 100644 index 00000000000..7bff225a136 --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/engines/install.rb @@ -0,0 +1,32 @@ +$LOAD_PATH.unshift File.join(File.dirname(__FILE__), "..", "..", "rails", "railties", "lib") +silence_warnings { require 'rails/version' } # it may already be loaded. + +unless Rails::VERSION::MAJOR >= 1 && Rails::VERSION::MINOR >= 2 + puts <<-end_of_warning + + !!!=== IMPORTANT NOTE ===!!! + +Support for Rails < 1.2 has been dropped; if you are using +Rails =< 1.1.6, please use Engines 1.1.6, available from: + + >> http://svn.rails-engines.org/engines/tags/rel_1.1.6 + +For more details about changes in Engines 1.2, please see +the changelog or the website: + + >> http://www.rails-engines.org + +end_of_warning +else + puts <<-end_of_message + +The engines plugin is now installed. Feels good, right? Yeah. +You knew it would. + +Once the warm, fuzzy glow has subsided, be sure to read the contents +of the README and UPGRADING files if you're migrating this application +from Rails 1.1.x to 1.2.x. + +Have a great day! +end_of_message +end \ No newline at end of file diff --git a/solr/client/ruby/flare/vendor/plugins/engines/lib/engines.rb b/solr/client/ruby/flare/vendor/plugins/engines/lib/engines.rb new file mode 100644 index 00000000000..8128716c579 --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/engines/lib/engines.rb @@ -0,0 +1,323 @@ +# This file contains the Engines module, which holds most of the logic regarding +# the startup and management of plugins. See Engines for more details. +# +# The engines plugin adds to Rails' existing behaviour by producing the +Rails.plugins+ +# PluginList, a list of all loaded plugins in a form which can be easily queried +# and manipulated. Each instance of Plugin has methods which are used to enhance +# their behaviour, including mirroring public assets, add controllers, helpers +# and views and even migration. +# +# = init.rb +# +# When the engines plugin loads, it first includes the +# Engines::RailsExtensions::RailsInitializer module into Rails::Initializer, +# overriding a number of the methods used to load plugins. +# +# Once this is loaded, Engines.init is called to prepare the application +# and create the relevant new datastructures (including Rails.plugins). +# +# Finally, each of the extension modules from Engines::RailsExtensionsis +# loaded and included into the relevant Rails classes and modules, enhancing +# their behaviour to work better with files from plugins. + +require "engines/plugin_list" +require "engines/plugin" + +# TODO: define a better logger. +def logger + RAILS_DEFAULT_LOGGER +end + +# The Engines module contains most of the methods used during the enhanced +# startup of Rails plugins. +# +# When the Engines plugin loads (its init.rb file is evaluated), the +# Engines.init method is called. This kickstarts the plugins hooks into +# the initialization process. +# +# == Parameters +# +# The Engines module has a number of public configuration parameters: +# +# [+public_directory+] The directory into which plugin assets should be +# mirrored. Defaults to RAILS_ROOT/public/plugin_assets. +# [+schema_info_table+] The table to use when storing plugin migration +# version information. Defaults to +plugin_schema_info+. +# [+rails_initializer+] A reference of the Rails initializer instance that +# was used to startup Rails. This is often useful +# when working with the startup process; see +# Engines::RailsExtensions::RailsInitializer for more +# information +# +# Additionally, there are a few flags which control the behaviour of +# some of the features the engines plugin adds to Rails: +# +# [+disable_application_view_loading+] A boolean flag determining whether +# or not views should be loaded from +# the main app/views directory. +# Defaults to false; probably only +# useful when testing your plugin. +# [+disable_application_code_loading+] A boolean flag determining whether +# or not to load controllers/helpers +# from the main +app+ directory, +# if corresponding code exists within +# a plugin. Defaults to false; again, +# probably only useful when testing +# your plugin. +# [+disable_code_mixing+] A boolean flag indicating whether all plugin +# copies of a particular controller/helper should +# be loaded and allowed to override each other, +# or if the first matching file should be loaded +# instead. Defaults to false. +# +module Engines + # The name of the public directory to mirror public engine assets into. + # Defaults to RAILS_ROOT/public/plugin_assets. + mattr_accessor :public_directory + self.public_directory = File.join(RAILS_ROOT, 'public', 'plugin_assets') + + # The table in which to store plugin schema information. Defaults to + # "plugin_schema_info". + mattr_accessor :schema_info_table + self.schema_info_table = "plugin_schema_info" + + # A reference to the current Rails::Initializer instance + mattr_accessor :rails_initializer + + + #-- + # These attributes control the behaviour of the engines extensions + #++ + + # Set this to true if views should *only* be loaded from plugins + mattr_accessor :disable_application_view_loading + self.disable_application_view_loading = false + + # Set this to true if controller/helper code shouldn't be loaded + # from the application + mattr_accessor :disable_application_code_loading + self.disable_application_code_loading = false + + # Set this ti true if code should not be mixed (i.e. it will be loaded + # from the first valid path on $LOAD_PATH) + mattr_accessor :disable_code_mixing + self.disable_code_mixing = false + + + private + + # A memo of the bottom of Rails' default load path + mattr_accessor :rails_final_load_path + # A memo of the bottom of Rails Dependencies load path + mattr_accessor :rails_final_dependency_load_path + + public + + # Initializes the engines plugin and prepares Rails to start loading + # plugins using engines extensions. Within this method: + # + # 1. Copies of the Rails configuration and initializer are stored; + # 2. The Rails.plugins PluginList instance is created; + # 3. Any plugins which were loaded before the engines plugin are given + # the engines treatment via #enhance_loaded_plugins. + # 4. The base public directory (into which plugin assets are mirrored) + # is created, if necessary - #initialize_base_public_directory + # 5. config.plugins is checked to see if a wildcard was present - + # #check_for_star_wildcard + # + def self.init(rails_configuration, rails_initializer) + # First, determine if we're running in legacy mode + @legacy_support = self.const_defined?(:LegacySupport) && LegacySupport + + # Store some information about the plugin subsystem + Rails.configuration = rails_configuration + + # We need a hook into this so we can get freaky with the plugin loading itself + self.rails_initializer = rails_initializer + + @load_all_plugins = false + + store_load_path_markers + + Rails.plugins ||= PluginList.new + enhance_loaded_plugins # including this one, as it happens. + + initialize_base_public_directory + + check_for_star_wildcard + + logger.debug "engines has started." + end + + # You can enable legacy support by defining the LegacySupport constant + # in the Engines module before Rails loads, i.e. at the *top* of environment.rb, + # add: + # + # module Engines + # LegacySupport = true + # end + # + # Legacy Support doesn't actually do anything at the moment. If necessary + # we may support older-style 'engines' using this flag. + def self.legacy_support? + @legacy_support + end + + # A reference to the currently-loading/loaded plugin. This is present to support + # legacy engines; it's preferred to use Rails.plugins[name] in your plugin's + # init.rb file in order to get your Plugin instance. + def self.current + Rails.plugins.last + end + + # This is set to true if a "*" widlcard is present at the end of + # the config.plugins array. + def self.load_all_plugins? + @load_all_plugins + end + + # Stores a record of the last paths which Rails added to each of the load path + # attributes ($LOAD_PATH, Dependencies.load_paths and + # ActionController::Routing.controller_paths) that influence how code is loaded + # We need this to ensure that we place our additions to the load path *after* + # all Rails' defaults + def self.store_load_path_markers + self.rails_final_load_path = $LOAD_PATH.last + logger.debug "Rails final load path: #{self.rails_final_load_path}" + self.rails_final_dependency_load_path = ::Dependencies.load_paths.last + logger.debug "Rails final dependency load path: #{self.rails_final_dependency_load_path}" + end + + # Create Plugin instances for plugins loaded before the engines plugin was. + # Once a Plugin instance is created, the Plugin#load method is then called + # to fully load the plugin. See Plugin#load for more details about how a + # plugin is started once engines is involved. + def self.enhance_loaded_plugins + Engines.rails_initializer.loaded_plugins.each do |name| + plugin_path = File.join(self.find_plugin_path(name), name) + unless Rails.plugins[name] + plugin = Plugin.new(name, plugin_path) + logger.debug "enginizing plugin: #{plugin.name} from #{plugin_path}" + plugin.load # injects the extra directories into the load path, and mirrors public files + Rails.plugins << plugin + end + end + logger.debug "plugins is now: #{Rails.plugins.map { |p| p.name }.join(", ")}" + end + + # Ensure that the plugin asset subdirectory of RAILS_ROOT/public exists, and + # that we've added a little warning message to instruct developers not to mess with + # the files inside, since they're automatically generated. + def self.initialize_base_public_directory + if !File.exist?(self.public_directory) + # create the public/engines directory, with a warning message in it. + logger.debug "Creating public engine files directory '#{self.public_directory}'" + FileUtils.mkdir(self.public_directory) + message = %{Files in this directory are automatically generated from your Rails Engines. +They are copied from the 'public' directories of each engine into this directory +each time Rails starts (server, console... any time 'start_engine' is called). +Any edits you make will NOT persist across the next server restart; instead you +should edit the files within the /assets/ directory itself.} + target = File.join(public_directory, "README") + File.open(target, 'w') { |f| f.puts(message) } unless File.exist?(target) + end + end + + # Check for a "*" at the end of the plugins list; if one is found, note that + # we should load all other plugins once Rails has finished initializing, and + # remove the "*". + def self.check_for_star_wildcard + if Rails.configuration.plugins && Rails.configuration.plugins.last == "*" + Rails.configuration.plugins.pop + @load_all_plugins = true + end + end + + + #- + # The following code is called once all plugins are loaded, and Rails is almost + # finished initialization + #+ + + # Once the Rails Initializer has finished, the engines plugin takes over + # and performs any post-processing tasks it may have, including: + # + # * Loading any remaining plugins if config.plugins ended with a '*'. + # * Updating Rails::Info with version information, if possible. + # + def self.after_initialize + if self.load_all_plugins? + logger.debug "loading remaining plugins from #{Rails.configuration.plugin_paths.inspect}" + # this will actually try to load ALL plugins again, but any that have already + # been loaded will be ignored. + rails_initializer.load_all_plugins + update_rails_info_with_loaded_plugins + end + end + + # Updates Rails::Info with the list of loaded plugins, and version information for + # each plugin. This information is then available via script/about, or through + # the builtin rails_info controller. + def self.update_rails_info_with_loaded_plugins + if defined?(Rails::Info) # since it may not be available by default in some environments... + # don't do anything if it's not there. + Rails::Info.property("Loaded plugins") { Rails.plugins.map { |p| p.name }.join(", ") } + Rails.plugins.each do |plugin| + Rails::Info.property("#{plugin.name} version") { plugin.version.blank? ? "(unknown)" : plugin.version } + end + end + end + + #- + # helper methods to find and deal with plugin paths and names + #+ + + # Returns the path within +Rails.configuration.plugin_paths+ which includes + # a plugin with the given name. + def self.find_plugin_path(name) + Rails.configuration.plugin_paths.find do |path| + File.exist?(File.join(path, name.to_s)) + end + end + + # Returns the name for the plugin at the given path. + # (Note this method also appears in Rails::Initializer extensions) + def self.plugin_name(path) + File.basename(path) + end + + # A general purpose method to mirror a directory (+source+) into a destination + # directory, including all files and subdirectories. Files will not be mirrored + # if they are identical already (checked via FileUtils#identical?). + def self.mirror_files_from(source, destination) + return unless File.directory?(source) + + # TODO: use Rake::FileList#pathmap? + + source_files = Dir[source + "/**/*"] + source_dirs = source_files.select { |d| File.directory?(d) } + source_files -= source_dirs + + source_dirs.each do |dir| + # strip down these paths so we have simple, relative paths we can + # add to the destination + target_dir = File.join(destination, dir.gsub(source, '')) + begin + FileUtils.mkdir_p(target_dir) + rescue Exception => e + raise "Could not create directory #{target_dir}: \n" + e + end + end + + source_files.each do |file| + begin + target = File.join(destination, file.gsub(source, '')) + unless File.exist?(target) && FileUtils.identical?(file, target) + FileUtils.cp(file, target) + end + rescue Exception => e + raise "Could not copy #{file} to #{target}: \n" + e + end + end + end +end \ No newline at end of file diff --git a/solr/client/ruby/flare/vendor/plugins/engines/lib/engines/deprecated_config_support.rb b/solr/client/ruby/flare/vendor/plugins/engines/lib/engines/deprecated_config_support.rb new file mode 100644 index 00000000000..54e504ca676 --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/engines/lib/engines/deprecated_config_support.rb @@ -0,0 +1,135 @@ +# This file contains support for the now-deprecated +config+ method that the engines +# plugin provided before version 1.2. Instead of using this, plugin authors are +# now encouraged to create their own Module configuration mechanisms; the +# +mattr_accessor+ mechanism provided by ActiveSupport is ideal for this: +# +# module MyPlugin +# mattr_accessor :config_value +# self.config_value = "default" +# end +# +# == Using the deprecated config method +# +# If you require the config method to be present, change your environment.rb +# file such that the very top of the file looks like this: +# +# require File.join(File.dirname(__FILE__), 'boot') +# require File.join(RAILS_ROOT, "vendor", "plugins", "engines", +# "lib", "engines", "deprecated_config_support") +# + + +# Adds the +config+ and +default_constant+ methods to Module. +# +# *IMPORTANT NOTE* - these methods are deprecated. Only use them when you have no +# other choice. See link:files/lib/engines/deprecated_config_support_rb.html for more +# information. +class Module + # Defines a constant within a module/class ONLY if that constant does + # not already exist. + # + # This can be used to implement defaults in plugins/engines/libraries, e.g. + # if a plugin module exists: + # module MyPlugin + # default_constant :MyDefault, "the_default_value" + # end + # + # then developers can override this default by defining that constant at + # some point *before* the module/plugin gets loaded (such as environment.rb) + def default_constant(name, value) + if !(name.is_a?(String) or name.is_a?(Symbol)) + raise "Cannot use a #{name.class.name} ['#{name}'] object as a constant name" + end + if !self.const_defined?(name) + self.class_eval("#{name} = #{value.inspect}") + end + end + + # A mechanism for defining configuration of Modules. With this + # mechanism, default values for configuration can be provided within shareable + # code, and the end user can customise the configuration without having to + # provide all values. + # + # Example: + # + # module MyModule + # config :param_one, "some value" + # config :param_two, 12345 + # end + # + # Those values can now be accessed by the following method + # + # MyModule.config :param_one + # => "some value" + # MyModule.config :param_two + # => 12345 + # + # ... or, if you have overrriden the method 'config' + # + # MyModule::CONFIG[:param_one] + # => "some value" + # MyModule::CONFIG[:param_two] + # => 12345 + # + # Once a value is stored in the configuration, it will not be altered + # by subsequent assignments, unless a special flag is given: + # + # (later on in your code, most likely in another file) + # module MyModule + # config :param_one, "another value" + # config :param_two, 98765, :force + # end + # + # The configuration is now: + # + # MyModule.config :param_one + # => "some value" # not changed + # MyModule.config :param_two + # => 98765 + # + # Configuration values can also be given as a Hash: + # + # MyModule.config :param1 => 'value1', :param2 => 'value2' + # + # Setting of these values can also be forced: + # + # MyModule.config :param1 => 'value3', :param2 => 'value4', :force => true + # + # A value of anything other than false or nil given for the :force key will + # result in the new values *always* being set. + def config(*args) + + raise "config expects at least one argument" if args.empty? + + # extract the arguments + if args[0].is_a?(Hash) + override = args[0][:force] + args[0].delete(:force) + args[0].each { |key, value| _handle_config(key, value, override)} + else + _handle_config(*args) + end + end + + private + # Actually set the config values + def _handle_config(name, value=nil, override=false) + if !self.const_defined?("CONFIG") + self.class_eval("CONFIG = {}") + end + + if value != nil + if override or self::CONFIG[name] == nil + self::CONFIG[name] = value + end + else + # if we pass an array of config keys to config(), + # get the array of values back + if name.is_a? Array + name.map { |c| self::CONFIG[c] } + else + self::CONFIG[name] + end + end + end +end \ No newline at end of file diff --git a/solr/client/ruby/flare/vendor/plugins/engines/lib/engines/plugin.rb b/solr/client/ruby/flare/vendor/plugins/engines/lib/engines/plugin.rb new file mode 100644 index 00000000000..63d1db2ce87 --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/engines/lib/engines/plugin.rb @@ -0,0 +1,214 @@ +# An instance of Plugin is created for each plugin loaded by Rails, and +# stored in the Rails.plugins PluginList +# (see Engines::RailsExtensions::RailsInitializer for more details). +# +# Once the engines plugin is loaded, other plugins can take advantage of +# their own instances by accessing either Engines.current, or the preferred mechanism +# +# Rails.plugins[:plugin_name] +# +# Useful properties of this object include Plugin#version, which plugin developers +# can set in their init.rb scripts: +# +# Rails.plugins[:my_plugin].version = "1.4.2" +# +# Plugin developers can also access the contents of their about.yml files +# via Plugin#about, which returns a Hash if the about.yml file exists for +# this plugin. Note that if about.yml contains a "version" key, it will +# automatically be loaded into the version attribute described above. +# +# If this plugin contains paths in directories other than app/controllers, +# app/helpers, app/models and components, authors can +# declare this by adding extra paths to #code_paths: +# +# Rails.plugin[:my_plugin].code_paths << "app/sweepers" << "vendor/my_lib" +# +# Other properties of the Plugin instance can also be set. +class Plugin + + # The name of this plugin + attr_accessor :name + + # The directory in which this plugin is located + attr_accessor :root + + # The version of this plugin + attr_accessor :version + + # The about.yml information as a Hash, if it exists + attr_accessor :about + + # Plugins can add code paths to this attribute in init.rb if they + # need plugin directories to be added to the load path, i.e. + # + # plugin.code_paths << 'app/other_classes' + # + # Defaults to ["app/controllers", "app/helpers", "app/models", "components"] + # (see #default_code_paths). NOTE: if you want to set this, you must + # ensure that the engines plugin is loaded before any plugins which + # reference this since it's not available before the engines plugin has worked + # its magic. + attr_accessor :code_paths + + # Plugins can add paths to this attribute in init.rb if they need + # controllers loaded from additional locations. See also #default_controller_paths, and + # the caveat surrounding the #code_paths accessor. + attr_accessor :controller_paths + + # The directory in this plugin to mirror into the shared directory + # under +public+. See Engines.initialize_base_public_directory + # for more information. + # + # Defaults to "assets" (see default_public_directory). + attr_accessor :public_directory + + protected + + # The default set of code paths which will be added to $LOAD_PATH + # and Dependencies.load_paths + def default_code_paths + # lib will actually be removed from the load paths when we call + # uniq! in #inject_into_load_paths, but it's important to keep it + # around (for the documentation tasks, for instance). + %w(app/controllers app/helpers app/models components lib) + end + + # The default set of code paths which will be added to the routing system + def default_controller_paths + %w(app/controllers components) + end + + # Attempts to detect the directory to use for public files. + # If +assets+ exists in the plugin, this will be used. If +assets+ is missing + # but +public+ is found, +public+ will be used. + def default_public_directory + %w(assets public).select { |dir| File.directory?(File.join(root, dir)) }.first || "assets" + end + + public + + # Creates a new Plugin instance, and loads any other data from about.yml + def initialize(name, path) + @name = name + @root = path + + @code_paths = default_code_paths + @controller_paths = default_controller_paths + @public_directory = default_public_directory + + load_about_information + end + + # Load the information from about.yml. This Hash is then accessible + # from #about. + # + # If about.yml includes a "version", this will be assigned + # automatically into #version. + def load_about_information + about_path = File.join(self.root, 'about.yml') + if File.exist?(about_path) + @about = YAML.load(File.open(about_path).read) + @about.stringify_keys! + @version = @about["version"] + end + end + + # Load the plugin. Since Rails takes care of evaluating init.rb and + # adding +lib+ to the $LOAD_PATH, we don't need to do that here (see + # Engines::RailsExtensions::RailsInitializer.load_plugins_with_engine_additions). + # + # Here we add controller/helper code to the appropriate load paths (see + # #inject_into_load_path) and mirror the plugin assets into the shared public + # directory (#mirror_public_assets). + def load + logger.debug "Plugin '#{name}': starting load." + + inject_into_load_path + mirror_public_assets + + logger.debug "Plugin '#{name}': loaded." + end + + # Adds all directories in the +app+ and +lib+ directories within the engine + # to the three relevant load paths mechanism that Rails might use: + # + # * $LOAD_PATH + # * Dependencies.load_paths + # * ActionController::Routing.controller_paths + # + def inject_into_load_path + + load_path_index = $LOAD_PATH.index(Engines.rails_final_load_path) + dependency_index = ::Dependencies.load_paths.index(Engines.rails_final_dependency_load_path) + + # Add relevant paths under the engine root to the load path + code_paths.map { |p| File.join(root, p) }.each do |path| + if File.directory?(path) + # Add to the load paths + $LOAD_PATH.insert(load_path_index + 1, path) + # Add to the dependency system, for autoloading. + ::Dependencies.load_paths.insert(dependency_index + 1, path) + end + end + + # Add controllers to the Routing system specifically. We actually add our paths + # to the configuration too, since routing is started AFTER plugins are. Plugins + # which are loaded by engines specifically (i.e. because of the '*' in + # +config.plugins+) will need their paths added directly to the routing system, + # since at that point it has already been configured. + controller_paths.map { |p| File.join(root, p) }.each do |path| + if File.directory?(path) + ActionController::Routing.controller_paths << path + Rails.configuration.controller_paths << path + end + end + + $LOAD_PATH.uniq! + ::Dependencies.load_paths.uniq! + ActionController::Routing.controller_paths.uniq! + Rails.configuration.controller_paths.uniq! + end + + # Replicates the subdirectories under the plugins's +assets+ (or +public+) directory into + # the corresponding public directory. See also Plugin#public_directory for more. + def mirror_public_assets + + begin + source = File.join(root, self.public_directory) + # if there is no public directory, just return after this file + return if !File.exist?(source) + + logger.debug "Attempting to copy plugin plugin asset files from '#{source}' to '#{Engines.public_directory}'" + + Engines.mirror_files_from(source, File.join(Engines.public_directory, name)) + + rescue Exception => e + logger.warn "WARNING: Couldn't create the public file structure for plugin '#{name}'; Error follows:" + logger.warn e + end + end + + # The path to this plugin's public files + def public_asset_directory + "#{File.basename(Engines.public_directory)}/#{name}" + end + + # The directory containing this plugin's migrations (plugin/db/migrate) + def migration_directory + File.join(self.root, 'db', 'migrate') + end + + # Returns the version number of the latest migration for this plugin. Returns + # nil if this plugin has no migrations. + def latest_migration + migrations = Dir[migration_directory+"/*.rb"] + return nil if migrations.empty? + migrations.map { |p| File.basename(p) }.sort.last.match(/0*(\d+)\_/)[1].to_i + end + + # Migrate this plugin to the given version. See Engines::PluginMigrator for more + # information. + def migrate(version = nil) + Engines::PluginMigrator.migrate_plugin(self, version) + end +end \ No newline at end of file diff --git a/solr/client/ruby/flare/vendor/plugins/engines/lib/engines/plugin_list.rb b/solr/client/ruby/flare/vendor/plugins/engines/lib/engines/plugin_list.rb new file mode 100644 index 00000000000..a89d9899b40 --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/engines/lib/engines/plugin_list.rb @@ -0,0 +1,31 @@ +# The PluginList class is an array, enhanced to allow access to loaded plugins +# by name, and iteration over loaded plugins in order of priority. This array is used +# by Engines::RailsExtensions::RailsInitializer to create the Rails.plugins array. +# +# Each loaded plugin has a corresponding Plugin instance within this array, and +# the order the plugins were loaded is reflected in the entries in this array. +# +# For more information, see the Rails module. +class PluginList < Array + # Finds plugins with the set with the given name (accepts Strings or Symbols), or + # index. So, Rails.plugins[0] returns the first-loaded Plugin, and Rails.plugins[:engines] + # returns the Plugin instance for the engines plugin itself. + def [](name_or_index) + if name_or_index.is_a?(Fixnum) + super + else + self.find { |plugin| plugin.name.to_s == name_or_index.to_s } + end + end + + # Go through each plugin, highest priority first (last loaded first). Effectively, + # this is like Rails.plugins.reverse, except when given a block, when it behaves + # like Rails.plugins.reverse.each. + def by_precedence(&block) + if block_given? + reverse.each { |x| yield x } + else + reverse + end + end +end \ No newline at end of file diff --git a/solr/client/ruby/flare/vendor/plugins/engines/lib/engines/plugin_migrator.rb b/solr/client/ruby/flare/vendor/plugins/engines/lib/engines/plugin_migrator.rb new file mode 100644 index 00000000000..795d36e715b --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/engines/lib/engines/plugin_migrator.rb @@ -0,0 +1,60 @@ +# The PluginMigrator class contains the logic to run migrations from +# within plugin directories. The directory in which a plugin's migrations +# should be is determined by the Plugin#migration_directory method. +# +# To migrate a plugin, you can simple call the migrate method (Plugin#migrate) +# with the version number that plugin should be at. The plugin's migrations +# will then be used to migrate up (or down) to the given version. +# +# For more information, see Engines::RailsExtensions::Migrations +class Engines::PluginMigrator < ActiveRecord::Migrator + + # We need to be able to set the 'current' engine being migrated. + cattr_accessor :current_plugin + + # Runs the migrations from a plugin, up (or down) to the version given + def self.migrate_plugin(plugin, version) + self.current_plugin = plugin + migrate(plugin.migration_directory, version) + end + + # Returns the name of the table used to store schema information about + # installed plugins. + # + # See Engines.schema_info_table for more details. + def self.schema_info_table_name + ActiveRecord::Base.wrapped_table_name Engines.schema_info_table + end + + # Returns the current version of the given plugin + def self.current_version(plugin=current_plugin) + result = ActiveRecord::Base.connection.select_one(<<-ESQL + SELECT version FROM #{schema_info_table_name} + WHERE plugin_name = '#{plugin.name}' + ESQL + ) + if result + result["version"].to_i + else + # There probably isn't an entry for this engine in the migration info table. + # We need to create that entry, and set the version to 0 + ActiveRecord::Base.connection.execute(<<-ESQL + INSERT INTO #{schema_info_table_name} (version, plugin_name) + VALUES (0,'#{plugin.name}') + ESQL + ) + 0 + end + end + + # Sets the version of the plugin in Engines::PluginMigrator.current_plugin to + # the given version. + def set_schema_version(version) + ActiveRecord::Base.connection.update(<<-ESQL + UPDATE #{self.class.schema_info_table_name} + SET version = #{down? ? version.to_i - 1 : version.to_i} + WHERE plugin_name = '#{self.current_plugin.name}' + ESQL + ) + end +end diff --git a/solr/client/ruby/flare/vendor/plugins/engines/lib/engines/rails_extensions.rb b/solr/client/ruby/flare/vendor/plugins/engines/lib/engines/rails_extensions.rb new file mode 100644 index 00000000000..36e76a919c0 --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/engines/lib/engines/rails_extensions.rb @@ -0,0 +1,6 @@ +# This module contains all the extensions that the engines plugin makes to +# Rails core. Explanations of how each extension works are included as RDoc +# documentation for each of the modules listed. +module Engines::RailsExtensions + # let's not rely *entirely* on Rails' magic modules. Not just yet. +end \ No newline at end of file diff --git a/solr/client/ruby/flare/vendor/plugins/engines/lib/engines/rails_extensions/active_record.rb b/solr/client/ruby/flare/vendor/plugins/engines/lib/engines/rails_extensions/active_record.rb new file mode 100644 index 00000000000..15bb045ac7c --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/engines/lib/engines/rails_extensions/active_record.rb @@ -0,0 +1,19 @@ +# Here we add a single helpful method to ActiveRecord::Base. This method may be deprecated +# in the future, since support for the Module#config mechanism which required it has +# also been dropped. +module Engines::RailsExtensions::ActiveRecord + # NOTE: Currently the Migrations system will ALWAYS wrap given table names + # in the prefix/suffix, so any table name set via ActiveRecord::Base#set_table_name, + # for instance will always get wrapped in the process of migration. For this + # reason, whatever value you give to the config will be wrapped when set_table_name + # is used in the model. + # + # This method is useful for determining the actual name (including prefix and + # suffix) that Rails will use for a model, given a particular set_table_name + # parameter. + def wrapped_table_name(name) + table_name_prefix + name + table_name_suffix + end +end + +::ActiveRecord::Base.extend(Engines::RailsExtensions::ActiveRecord) \ No newline at end of file diff --git a/solr/client/ruby/flare/vendor/plugins/engines/lib/engines/rails_extensions/dependencies.rb b/solr/client/ruby/flare/vendor/plugins/engines/lib/engines/rails_extensions/dependencies.rb new file mode 100644 index 00000000000..07da01fdc2e --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/engines/lib/engines/rails_extensions/dependencies.rb @@ -0,0 +1,143 @@ +# One of the magic features that that engines plugin provides is the ability to +# override selected methods in controllers and helpers from your application. +# This is achieved by trapping requests to load those files, and then mixing in +# code from plugins (in the order the plugins were loaded) before finally loading +# any versions from the main +app+ directory. +# +# The behaviour of this extension is output to the log file for help when +# debugging. +# +# == Example +# +# A plugin contains the following controller in plugin/app/controllers/my_controller.rb: +# +# class MyController < ApplicationController +# def index +# @name = "HAL 9000" +# end +# def list +# @robots = Robot.find(:all) +# end +# end +# +# In one application that uses this plugin, we decide that the name used in the +# index action should be "Robbie", not "HAL 9000". To override this single method, +# we create the corresponding controller in our application +# (RAILS_ROOT/app/controllers/my_controller.rb), and redefine the method: +# +# class MyController < ApplicationController +# def index +# @name = "Robbie" +# end +# end +# +# The list method remains as it was defined in the plugin controller. +# +# The same basic principle applies to helpers, and also views and partials (although +# view overriding is performed in Engines::RailsExtensions::Templates; see that +# module for more information). +# +# === What about models? +# +# Unfortunately, it's not possible to provide this kind of magic for models. +# The only reason why it's possible for controllers and helpers is because +# they can be recognised by their filenames ("whatever_controller", "jazz_helper"), +# whereas models appear the same as any other typical Ruby library ("node", +# "user", "image", etc.). +# +# If mixing were allowed in models, it would mean code mixing for *every* +# file that was loaded via +require_or_load+, and this could result in +# problems where, for example, a Node model might start to include +# functionality from another file called "node" somewhere else in the +# $LOAD_PATH. +# +# One way to overcome this is to provide model functionality as a module in +# a plugin, which developers can then include into their own model +# implementations. +# +# Another option is to provide an abstract model (see the ActiveRecord::Base +# documentation) and have developers subclass this model in their own +# application if they must. +# +# --- +# +# The Engines::RailsExtensions::Dependencies module includes a method to +# override Dependencies.require_or_load, which is called to load code needed +# by Rails as it encounters constants that aren't defined. +# +# This method is enhanced with the code-mixing features described above. +# +module Engines::RailsExtensions::Dependencies + def self.included(base) #:nodoc: + base.class_eval { alias_method_chain :require_or_load, :engine_additions } + end + + # Attempt to load the given file from any plugins, as well as the application. + # This performs the 'code mixing' magic, allowing application controllers and + # helpers to override single methods from those in plugins. + # If the file can be found in any plugins, it will be loaded first from those + # locations. Finally, the application version is loaded, using Ruby's behaviour + # to replace existing methods with their new definitions. + # + # If Engines.disable_code_mixing == true, the first controller/helper on the + # $LOAD_PATH will be used (plugins' +app+ directories are always lower on the + # $LOAD_PATH than the main +app+ directory). + # + # If Engines.disable_application_code_loading == true, controllers will + # not be loaded from the main +app+ directory *if* they are present in any + # plugins. + # + # Returns true if the file could be loaded (from anywhere); false otherwise - + # mirroring the behaviour of +require_or_load+ from Rails (which mirrors + # that of Ruby's own +require+, I believe). + def require_or_load_with_engine_additions(file_name, const_path=nil) + return require_or_load_without_engine_additions(file_name, const_path) if Engines.disable_code_mixing + + file_loaded = false + + # try and load the plugin code first + # can't use model, as there's nothing in the name to indicate that the file is a 'model' file + # rather than a library or anything else. + ['controller', 'helper'].each do |file_type| + # if we recognise this type + # (this regexp splits out the module/filename from any instances of app/#{type}, so that + # modules are still respected.) + if file_name =~ /^(.*app\/#{file_type}s\/)?(.*_#{file_type})(\.rb)?$/ + base_name = $2 + # ... go through the plugins from first started to last, so that + # code with a high precedence (started later) will override lower precedence + # implementations + Rails.plugins.each do |plugin| + plugin_file_name = File.expand_path(File.join(plugin.root, 'app', "#{file_type}s", base_name)) + logger.debug("checking plugin '#{plugin.name}' for '#{base_name}'") + if File.file?("#{plugin_file_name}.rb") + logger.debug("==> loading from plugin '#{plugin.name}'") + file_loaded = true if require_or_load_without_engine_additions(plugin_file_name, const_path) + end + end + + # finally, load any application-specific controller classes using the 'proper' + # rails load mechanism, EXCEPT when we're testing engines and could load this file + # from an engine + if Engines.disable_application_code_loading + logger.debug("loading from application disabled.") + else + # Ensure we are only loading from the /app directory at this point + app_file_name = File.join(RAILS_ROOT, 'app', "#{file_type}s", "#{base_name}") + if File.file?("#{app_file_name}.rb") + logger.debug("loading from application: #{base_name}") + file_loaded = true if require_or_load_without_engine_additions(app_file_name, const_path) + else + logger.debug("(file not found in application)") + end + end + end + end + + # if we managed to load a file, return true. If not, default to the original method. + # Note that this relies on the RHS of a boolean || not to be evaluated if the LHS is true. + file_loaded || require_or_load_without_engine_additions(file_name, const_path) + end +end + +::Dependencies.send(:include, Engines::RailsExtensions::Dependencies) \ No newline at end of file diff --git a/solr/client/ruby/flare/vendor/plugins/engines/lib/engines/rails_extensions/migrations.rb b/solr/client/ruby/flare/vendor/plugins/engines/lib/engines/rails_extensions/migrations.rb new file mode 100644 index 00000000000..2c4d93ab7a5 --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/engines/lib/engines/rails_extensions/migrations.rb @@ -0,0 +1,155 @@ +# Contains the enhancements to Rails' migrations system to support the +# Engines::PluginMigrator. See Engines::RailsExtensions::Migrations for more +# information. + +require "engines/plugin_migrator" + +# = Plugins and Migrations: Background +# +# Rails uses migrations to describe changes to the databases as your application +# evolves. Each change to your application - adding and removing models, most +# commonly - might require tweaks to your schema in the form of new tables, or new +# columns on existing tables, or possibly the removal of tables or columns. Migrations +# can even include arbitrary code to *transform* data as the underlying schema +# changes. +# +# The point is that at any particular stage in your application's development, +# migrations serve to transform the database into a state where it is compatible +# and appropriate at that time. +# +# == What about plugins? +# +# If you want to share models using plugins, chances are that you might also +# want to include the corresponding migrations to create tables for those models. +# With the engines plugin installed, plugins can carry migration data easily: +# +# vendor/ +# | +# plugins/ +# | +# my_plugin/ +# |- init.rb +# |- lib/ +# |- db/ +# |-migrate/ +# |- 001_do_something.rb +# |- 002_and_something_else.rb +# |- ... +# +# When you install a plugin which contains migrations, you are undertaking a +# further step in the development of your application, the same as the addition +# of any other code. With this in mind, you may want to 'roll back' the +# installation of this plugin at some point, and the database should be able +# to migrate back to the point without this plugin in it too. +# +# == An example +# +# For example, our current application is at version 14 (according to the +# +schema_info+ table), when we decide that we want to add a tagging plugin. The +# tagging plugin chosen includes migrations to create the tables it requires +# (say, _tags_ and _taggings_, for instance), along with the models and helpers +# one might expect. +# +# After installing this plugin, these tables should be created in our database. +# Rather than running the migrations directly from the plugin, they should be +# integrated into our main migration stream in order to accurately reflect the +# state of our application's database *at this moment in time*. +# +# $ script/generate plugin_migration +# exists db/migrate +# create db/migrate/015_migrate_tagging_plugin_to_version_3.rb +# +# This migration will take our application to version 15, and contains the following, +# typical migration code: +# +# class MigrateTaggingPluginToVersion3 < ActiveRecord::Migration +# def self.up +# Rails.plugins[:tagging].migrate(3) +# end +# def self.down +# Rails.plugins[:tagging].migrate(0) +# end +# end +# +# When we migrate our application up, using rake db:migrate as normal, +# the plugin will be migrated up to its latest version (3 in this example). If we +# ever decide to migrate the application back to the state it was in at version 14, +# the plugin migrations will be taken back down to version 0 (which, typically, +# would remove all tables the plugin migrations define). +# +# == Upgrading plugins +# +# It might happen that later in an application's life, we update to a new version of +# the tagging plugin which requires some changes to our database. The tagging plugin +# provides these changes in the form of its own migrations. +# +# In this case, we just need to re-run the plugin_migration generator to create a +# new migration from the current revision to the newest one: +# +# $ script/generate plugin_migration +# exists db/migrate +# create db/migrate/023_migrate_tagging_plugin_to_version_5.rb +# +# The contents of this migration are: +# +# class MigrateTaggingPluginToVersion3 < ActiveRecord::Migration +# def self.up +# Rails.plugins[:tagging].migrate(5) +# end +# def self.down +# Rails.plugins[:tagging].migrate(3) +# end +# end +# +# Notice that if we were to migrate down to revision 22 or lower, the tagging plugin +# will be migrated back down to version 3 - the version we were previously at. +# +# +# = Creating migrations in plugins +# +# In order to use the plugin migration functionality that engines provides, a plugin +# only needs to provide regular migrations in a db/migrate folder within it. +# +# = Explicitly migrating plugins +# +# It's possible to migrate plugins within your own migrations, or any other code. +# Simply get the Plugin instance, and its Plugin#migrate method with the version +# you wish to end up at: +# +# Rails.plugins[:whatever].migrate(version) +# +# --- +# +# The Engines::RailsExtensions::Migrations module defines extensions for Rails' +# migration systems. Specifically: +# +# * Adding a hook to initialize_schema_information to create the plugin schema +# info table. +# +module Engines::RailsExtensions::Migrations + def self.included(base) # :nodoc: + base.class_eval { alias_method_chain :initialize_schema_information, :engine_additions } + end + + # Create the schema tables, and ensure that the plugin schema table + # is also initialized. The plugin schema info table is defined by + # Engines::PluginMigrator.schema_info_table_name. + def initialize_schema_information_with_engine_additions + initialize_schema_information_without_engine_additions + + # create the plugin schema stuff. + begin + execute <<-ESQL + CREATE TABLE #{Engines::PluginMigrator.schema_info_table_name} + (plugin_name #{type_to_sql(:string)}, version #{type_to_sql(:integer)}) + ESQL + rescue ActiveRecord::StatementInvalid + # Schema has been initialized + end + end +end + +::ActiveRecord::ConnectionAdapters::SchemaStatements.send(:include, Engines::RailsExtensions::Migrations) + +# Set ActiveRecord to ignore the plugin schema table by default +::ActiveRecord::SchemaDumper.ignore_tables << Engines.schema_info_table \ No newline at end of file diff --git a/solr/client/ruby/flare/vendor/plugins/engines/lib/engines/rails_extensions/public_asset_helpers.rb b/solr/client/ruby/flare/vendor/plugins/engines/lib/engines/rails_extensions/public_asset_helpers.rb new file mode 100644 index 00000000000..7f7a02e3de8 --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/engines/lib/engines/rails_extensions/public_asset_helpers.rb @@ -0,0 +1,116 @@ +# The engines plugin makes it trivial to share public assets using plugins. +# To do this, include an assets directory within your plugin, and put +# your javascripts, stylesheets and images in subdirectories of that folder: +# +# my_plugin +# |- init.rb +# |- lib/ +# |- assets/ +# |- javascripts/ +# | |- my_functions.js +# | +# |- stylesheets/ +# | |- my_styles.css +# | +# |- images/ +# |- my_face.jpg +# +# Files within the asset structure are automatically mirrored into +# a publicly-accessible folder each time your application starts (see +# Plugin#mirror_public_assets). +# +# +# == Using plugin assets in views +# +# It's also simple to use Rails' helpers in your views to use plugin assets. +# The default helper methods have been enhanced by the engines plugin to accept +# a :plugin option, indicating the plugin containing the desired asset. +# +# For example, it's easy to use plugin assets in your layouts: +# +# <%= stylesheet_link_tag "my_styles", :plugin => "my_plugin", :media => "screen" %> +# <%= javascript_include_tag "my_functions", :plugin => "my_plugin" %> +# +# ... and similarly in views and partials, it's easy to use plugin images: +# +# <%= image_tag "my_face", :plugin => "my_plugin" %> +# +# <%= image_path "my_face", :plugin => "my_plugin" %> +# +# Where the default helpers allow the specification of more than one file (i.e. the +# javascript and stylesheet helpers), you can do similarly for multiple assets from +# within a single plugin. +# +# --- +# +# This module enhances four of the methods from ActionView::Helpers::AssetTagHelper: +# +# * stylesheet_link_tag +# * javascript_include_tag +# * image_path +# * image_tag +# +# Each one of these methods now accepts the key/value pair :plugin => "plugin_name", +# which can be used to specify the originating plugin for any assets. +# +module Engines::RailsExtensions::PublicAssetHelpers + def self.included(base) #:nodoc: + base.class_eval do + [:stylesheet_link_tag, :javascript_include_tag, :image_path, :image_tag].each do |m| + alias_method_chain m, :engine_additions + end + end + end + + # Adds plugin functionality to Rails' default stylesheet_link_tag method. + def stylesheet_link_tag_with_engine_additions(*sources) + stylesheet_link_tag_without_engine_additions(*Engines::RailsExtensions::PublicAssetHelpers.pluginify_sources("stylesheets", *sources)) + end + + # Adds plugin functionality to Rails' default javascript_include_tag method. + def javascript_include_tag_with_engine_additions(*sources) + javascript_include_tag_without_engine_additions(*Engines::RailsExtensions::PublicAssetHelpers.pluginify_sources("javascripts", *sources)) + end + + #-- + # Our modified image_path now takes a 'plugin' option, though it doesn't require it + #++ + + # Adds plugin functionality to Rails' default image_path method. + def image_path_with_engine_additions(source, options={}) + options.stringify_keys! + source = Engines::RailsExtensions::PublicAssetHelpers.plugin_asset_path(options["plugin"], "images", source) if options["plugin"] + image_path_without_engine_additions(source) + end + + # Adds plugin functionality to Rails' default image_tag method. + def image_tag_with_engine_additions(source, options={}) + options.stringify_keys! + if options["plugin"] + source = Engines::RailsExtensions::PublicAssetHelpers.plugin_asset_path(options["plugin"], "images", source) + options.delete("plugin") + end + image_tag_without_engine_additions(source, options) + end + + #-- + # The following are methods on this module directly because of the weird-freaky way + # Rails creates the helper instance that views actually get + #++ + + # Convert sources to the paths for the given plugin, if any plugin option is given + def self.pluginify_sources(type, *sources) + options = sources.last.is_a?(Hash) ? sources.pop.stringify_keys : { } + sources.map! { |s| plugin_asset_path(options["plugin"], type, s) } if options["plugin"] + options.delete("plugin") # we don't want it appearing in the HTML + sources << options # re-add options + end + + # Returns the publicly-addressable relative URI for the given asset, type and plugin + def self.plugin_asset_path(plugin_name, type, asset) + raise "No plugin called '#{plugin_name}' - please use the full name of a loaded plugin." if Rails.plugins[plugin_name].nil? + "/#{Rails.plugins[plugin_name].public_asset_directory}/#{type}/#{asset}" + end +end + +::ActionView::Helpers::AssetTagHelper.send(:include, Engines::RailsExtensions::PublicAssetHelpers) \ No newline at end of file diff --git a/solr/client/ruby/flare/vendor/plugins/engines/lib/engines/rails_extensions/rails.rb b/solr/client/ruby/flare/vendor/plugins/engines/lib/engines/rails_extensions/rails.rb new file mode 100644 index 00000000000..5ae0a3ac13f --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/engines/lib/engines/rails_extensions/rails.rb @@ -0,0 +1,20 @@ +# In order to give a richer infrastructure for dealing with plugins, the engines +# plugin adds two new attributes to the Rails module: +# +# [+plugins+] A PluginList instance which holds the currently loaded plugins +# [+configuration+] The current Rails::Configuration instance, so that we can +# query any parameters that might be set *after* Rails has +# loaded, as well as during plugin initialization +# +#-- +# Here we just re-open the Rails module and add our custom accessors; it +# may be cleaner to seperate them into a module, but in this case that seems +# like overkill. +#++ +module Rails + # The set of all loaded plugins + mattr_accessor :plugins + + # The Rails::Initializer::Configuration object + mattr_accessor :configuration +end diff --git a/solr/client/ruby/flare/vendor/plugins/engines/lib/engines/rails_extensions/rails_initializer.rb b/solr/client/ruby/flare/vendor/plugins/engines/lib/engines/rails_extensions/rails_initializer.rb new file mode 100644 index 00000000000..5686b526e98 --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/engines/lib/engines/rails_extensions/rails_initializer.rb @@ -0,0 +1,86 @@ +# Enhances the Rails::Initializer class to be a bit smarter about +# plugins. See Engines::RailsExtensions::RailsInitializer for more +# details. + +require "engines/rails_extensions/rails" +require 'engines/plugin_list' + +# The engines plugin changes the way that Rails actually loads other plugins. +# It creates instances of the Plugin class to represent each plugin, stored +# in the Rails.plugins PluginList. +# +# --- +# +# Three methods from the original Rails::Initializer module are overridden +# by Engines::RailsExtensions::RailsInitializer: +# +# [+load_plugin+] which now creates Plugin instances and calls Plugin#load +# [+after_initialize+] which now performs Engines.after_initialize in addition +# to the given config block +# [plugin_enabled?] which now respects the result of +# Engines.load_all_plugins? +# +module Engines::RailsExtensions::RailsInitializer + def self.included(base) #:nodoc: + base.class_eval do + alias_method_chain :load_plugin, :engine_additions + alias_method_chain :after_initialize, :engine_additions + alias_method_chain :plugin_enabled?, :engine_additions + end + end + + # Loads all plugins in configuration.plugin_paths, regardless of the contents + # of configuration.plugins + def load_all_plugins + # a nil value implies we don't care about plugins; load 'em all in a reliable order + find_plugins(configuration.plugin_paths).sort.each { |path| load_plugin path } + end + + # Loads a plugin, performing the extra load path/public file magic of + # engines by calling Plugin#load. + def load_plugin_with_engine_additions(directory) + name = plugin_name(directory) + return false if loaded_plugins.include?(name) + + logger.debug "loading plugin from #{directory} with engine additions" + + # add the Plugin object + plugin = Plugin.new(plugin_name(directory), directory) + Rails.plugins << plugin + + # do the other stuff that load_plugin used to do. This includes + # allowing the plugin's init.rb to set configuration options on + # it's instance, which can then be used in it's initialization + load_plugin_without_engine_additions(directory) + + # perform additional loading tasks like mirroring public assets + # and adding app directories to the appropriate load paths + plugin.load + + true + end + + # Allow the engines plugin to do whatever it needs to do after Rails has + # loaded, and then call the actual after_initialize block. Currently, this + # is call Engines.after_initialize. + def after_initialize_with_engine_additions + Engines.after_initialize + after_initialize_without_engine_additions + end + + protected + + # Returns true if the plugin at the given path should be loaded; false + # otherwise. If Engines.load_all_plugins? is true, this method will return + # true regardless of the path given. + def plugin_enabled_with_engine_additions?(path) + Engines.load_all_plugins? || plugin_enabled_without_engine_additions?(path) + end + + # Returns the name of the plugin at the given path. + def plugin_name(path) + File.basename(path) + end +end + +::Rails::Initializer.send(:include, Engines::RailsExtensions::RailsInitializer) \ No newline at end of file diff --git a/solr/client/ruby/flare/vendor/plugins/engines/lib/engines/rails_extensions/routing.rb b/solr/client/ruby/flare/vendor/plugins/engines/lib/engines/rails_extensions/routing.rb new file mode 100644 index 00000000000..f21d61ec216 --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/engines/lib/engines/rails_extensions/routing.rb @@ -0,0 +1,77 @@ +# Effective use of Rails' routes can help create a tidy and elegant set of URLs, +# and is a significant part of creating an external API for your web application. +# +# When developing plugins which contain controllers, it seems obvious that including +# the corresponding routes would be extremely useful. This is particularly true +# when exposing RESTful resources using the new REST-ian features of Rails 1.2. +# +# == Including routes in your plugin +# +# The engines plugin makes it possible to include a set of routes within your plugin +# very simply, as it turns out. In your plugin, you simply include a routes.rb +# file like the one below at the root of your plugin: +# +# connect "/login", :controller => "my_plugin/account", :action => "login" +# +# # add a named route +# logout "/logout", :controller => "my_plugin/account", :action => "logout" +# +# # some restful stuff +# resources :things do |t| +# t.resources :other_things +# end +# +# Everywhere in a normal RAILS_ROOT/config/routes.rb file +# where you might have map.connect, you just use connect in your +# plugin's routes.rb. +# +# === Hooking it up in your application +# +# While it would be possible to have each plugin's routes automagically included into +# the application's route set, to do so would actually be a stunningly bad idea. Route +# priority is the key issue here. You, the application developer, needs to be in complete +# control when it comes to specifying the priority of routes in your application, since +# the ordering of your routes directly affects how Rails will interpret incoming requests. +# +# To add plugin routes into your application's routes.rb file, you need to explicitly +# map them in using the Engines::RailsExtensions::Routing#from_plugin method: +# +# ApplicationController::Routing::Routes.draw do |map| +# +# map.connect "/app_stuff", :controller => "application_thing" # etc... +# +# # This line includes the routes from the given plugin at this point, giving you +# # control over the priority of your application routes +# map.from_plugin :your_plugin +# +# map.connect ":controller/:action/:id" +# end +# +# By including routes in plugins which have controllers, you can now share in a simple way +# a compact and elegant URL scheme which corresponds to those controllers. +# +# --- +# +# The Engines::RailsExtensions::Routing module defines extensions to Rails' +# routing (ActionController::Routing) mechanism such that routes can be loaded +# from a given plugin. +# +# The key method is Engines::RailsExtensions::Routing#from_plugin, which can be called +# within your application's config/routes.rb file to load plugin routes at that point. +# +module Engines::RailsExtensions::Routing + # Loads the set of routes from within a plugin and evaluates them at this + # point within an application's main routes.rb file. + # + # Plugin routes are loaded from /routes.rb. + def from_plugin(name) + # At the point in which routing is loaded, we cannot guarantee that all + # plugins are in Rails.plugins, so instead we need to use find_plugin_path + path = Engines.find_plugin_path(name) + routes_path = File.join(path, name.to_s, "routes.rb") + logger.debug "loading routes from #{routes_path}" + eval(IO.read(routes_path), binding, routes_path) if File.file?(routes_path) + end +end + +::ActionController::Routing::RouteSet::Mapper.send(:include, Engines::RailsExtensions::Routing) \ No newline at end of file diff --git a/solr/client/ruby/flare/vendor/plugins/engines/lib/engines/rails_extensions/templates.rb b/solr/client/ruby/flare/vendor/plugins/engines/lib/engines/rails_extensions/templates.rb new file mode 100644 index 00000000000..6cad6e261e9 --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/engines/lib/engines/rails_extensions/templates.rb @@ -0,0 +1,140 @@ +# As well as providing code overloading for controllers and helpers +# (see Engines::RailsExtensions::Dependencies), the engines plugin also allows +# developers to selectively override views and partials within their application. +# +# == An example +# +# This is achieved in much the same way as controller overriding. Our plugin contains +# a view to be rendered at the URL /test/hello, in +# our_plugin/app/views/test/hello.rhtml: +# +#
    Hi, <%= @dude.name %>, what's up?
    +# +# If in a particular application we're not happy with this message, we can override +# it by replacing the partial in our own application - +# RAILS_ROOT/app/views/test/hello.rhtml: +# +#
    Wassup <%= @dude.name %>. +# Waaaaassaaaaaaaaup
    +# +# This view will then be rendered in favour of that in the plugin. +# +module Engines::RailsExtensions::Templates + + # Override the finding of partials and views. This is achieved by wrapping + # the (private) method #full_template_path_with_engine_additions, that checks + # for the given template within plugins as well as the application. + module ActionView + def self.included(base) #:nodoc: + base.class_eval { alias_method_chain :full_template_path, :engine_additions } + end + + private + def full_template_path_with_engine_additions(template_path, extension) + path_in_app_directory = full_template_path_from_application(template_path, extension) + + # First check for this template in the application. If it exists, the user has + # overridden anything from the plugin, so use it (unless we're testing plugins; + # see full_template_path_from_application()) + return path_in_app_directory if path_in_app_directory && File.exist?(path_in_app_directory) + + # Otherwise, check in the plugins to see if the template can be found there. + # Load this in order so that more recently started plugins will take priority. + Rails.plugins.by_precedence do |plugin| + plugin_specific_path = File.join(plugin.root, 'app', 'views', + template_path.to_s + '.' + extension.to_s) + return plugin_specific_path if File.exist?(plugin_specific_path) + end + + # If it cannot be found anywhere, return the default path. + return full_template_path_without_engine_additions(template_path, extension) + end + + # Return a path to this template within our default app/views directory. + # In some circumstances, we may wish to prevent users from overloading views, + # such as when we are testing plugins with views. In this case, return "". + def full_template_path_from_application(template_path, extension) + if Engines.disable_application_view_loading + nil + else + full_template_path_without_engine_additions(template_path, extension) + end + end + end + + + # The Layout module overrides a single (private) method in ActionController::Layout::ClassMethods, + # called #layout_list. This method now returns an array of layouts, including those in plugins. + module Layout + def self.included(base) #:nodoc: + base.class_eval { alias_method_chain :layout_list, :engine_additions } + end + + private + # Return the list of layouts, including any in the app/views/layouts + # directories of loaded plugins. + def layout_list_with_engine_additions + plugin_layouts = Rails.plugins.by_precedence.map do |p| + File.join(p.root, "app", "views", "layouts") + end + layout_list_without_engine_additions + Dir["{#{plugin_layouts.join(",")}}/**/*"] + end + end + + + # The way ActionMailer is coded in terms of finding templates is very restrictive, to the point + # where all templates for rendering must exist under the single base path. This is difficult to + # work around without re-coding significant parts of the action mailer code. + # + # --- + # + # The MailTemplates module overrides two (private) methods from ActionMailer to enable mail + # templates within plugins: + # + # [+template_path+] which now produces the contents of #template_paths + # [+render+] which now find the first matching template and creates an ActionVew::Base + # instance with the correct @base_path for that template + module MailTemplates + def self.included(base) #:nodoc: + base.class_eval do + alias_method_chain :template_path, :engine_additions + alias_method_chain :render, :engine_additions + end + end + + private + # Returns all possible template paths for the current mailer, including those + # within the loaded plugins. + def template_paths + paths = Rails.plugins.by_precedence.map { |p| "#{p.root}/app/views/#{mailer_name}" } + paths.unshift(template_path_without_engine_additions) unless Engines.disable_application_view_loading + paths + end + + # Return something that Dir[] can glob against. This method is called in + # ActionMailer::Base#create! and used as part of an argument to Dir. We can + # take advantage of this by using some of the features of Dir.glob to search + # multiple paths for matching files. + def template_path_with_engine_additions + "{#{template_paths.join(",")}}" + end + + # We've broken this up so that we can dynamically alter the base_path that ActionView + # is rendering from so that templates can be located from plugins. + def render_with_engine_additions(opts) + template_path_for_method = Dir["#{template_path}/#{opts[:file]}*"].first + body = opts.delete(:body) + i = initialize_template_class(body) + i.base_path = File.dirname(template_path_for_method) + i.render(opts) + end + end +end + + +::ActionView::Base.send(:include, Engines::RailsExtensions::Templates::ActionView) +::ActionController::Layout::ClassMethods.send(:include, Engines::RailsExtensions::Templates::Layout) + +if Object.const_defined?(:ActionMailer) # We don't need to do this if ActionMailer hasn't been loaded. + ::ActionMailer::Base.send(:include, Engines::RailsExtensions::Templates::MailTemplates) +end \ No newline at end of file diff --git a/solr/client/ruby/flare/vendor/plugins/engines/lib/engines/testing.rb b/solr/client/ruby/flare/vendor/plugins/engines/lib/engines/testing.rb new file mode 100644 index 00000000000..364f992ec66 --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/engines/lib/engines/testing.rb @@ -0,0 +1,87 @@ +# Contains the enhancements to assist in testing plugins. See Engines::Testing +# for more details. + +require 'test/unit' + +require 'tmpdir' +require 'fileutils' + +# In most cases, Rails' own plugin testing mechanisms are sufficient. However, there +# are cases where plugins can be given a helping hand in the testing arena. This module +# contains some methods to assist when testing plugins that contain fixtures. +# +# == Fixtures and plugins +# +# Since Rails' own fixtures method is fairly strict about where files can be loaded from, +# the simplest approach when running plugin tests with fixtures is to simply copy all +# fixtures into a single temporary location and inform the standard Rails mechanism to +# use this directory, rather than RAILS_ROOT/test/fixtures. +# +# The Engines::Testing#setup_plugin_fixtures method does this, copying all plugin fixtures +# into the temporary location before and tests are performed. This behaviour is invoked +# the the rake tasks provided by the Engines plugin, in the "test:plugins" namespace. If +# necessary, you can invoke the task manually. +# +# If you wish to take advantage of this, add a call to the Engines::Testing.set_fixture_path +# method somewhere before your tests (in a test_helper file, or above the TestCase itself). +# +# = Testing plugins +# +# Normally testing a plugin will require that Rails is loaded, unless you are including +# a skeleton Rails environment or set of mocks within your plugin tests. If you require +# the Rails environment to be started, you must ensure that this actually happens; while +# it's not obvious, your tests do not automatically run with Rails loaded. +# +# The simplest way to setup plugin tests is to include a test helper with the following +# contents: +# +# # Load the normal Rails helper. This ensures the environment is loaded +# require File.expand_path(File.dirname(__FILE__) + '/../../../../test/test_helper') +# # Ensure that we are using the temporary fixture path +# Engines::Testing.set_fixture_path +# +# Then run tests using the provided tasks (test:plugins, or the tasks that the engines +# plugin provides - test:plugins:units, etc.). +# +# Alternatively, you can explicitly load the environment by adpating the contents of the +# default test_helper: +# +# ENV["RAILS_ENV"] = "test" +# # Note that we are requiring config/environment from the root of the enclosing application. +# require File.expand_path(File.dirname(__FILE__) + "/../../../../config/environment") +# require 'test_help' +# +module Engines::Testing + mattr_accessor :temporary_fixtures_directory + self.temporary_fixtures_directory = FileUtils.mkdir_p(File.join(Dir.tmpdir, "rails_fixtures")) + + # Copies fixtures from plugins and the application into a temporary directory + # (Engines::Testing.temporary_fixtures_directory). + # + # If a set of plugins is not given, fixtures are copied from all plugins in order + # of precedence, meaning that plugins can 'overwrite' the fixtures of others if they are + # loaded later; the application's fixtures are copied last, allowing any custom fixtures + # to override those in the plugins. If no argument is given, plugins are loaded via + # PluginList#by_precedence. + # + # This method is called by the engines-supplied plugin testing rake tasks + def self.setup_plugin_fixtures(plugins=Rails.plugins.by_precedence) + + # Copy all plugin fixtures, and then the application fixtures, into this directory + plugins.each do |plugin| + plugin_fixtures_directory = File.join(plugin.root, "test", "fixtures") + if File.directory?(plugin_fixtures_directory) + Engines.mirror_files_from(plugin_fixtures_directory, self.temporary_fixtures_directory) + end + end + Engines.mirror_files_from(File.join(RAILS_ROOT, "test", "fixtures"), + self.temporary_fixtures_directory) + end + + # Sets the fixture path used by Test::Unit::TestCase to the temporary + # directory which contains all plugin fixtures. + def self.set_fixture_path + Test::Unit::TestCase.fixture_path = self.temporary_fixtures_directory + $LOAD_PATH.unshift self.temporary_fixtures_directory + end +end \ No newline at end of file diff --git a/solr/client/ruby/flare/vendor/plugins/engines/tasks/engines.rake b/solr/client/ruby/flare/vendor/plugins/engines/tasks/engines.rake new file mode 100644 index 00000000000..1d644352e84 --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/engines/tasks/engines.rake @@ -0,0 +1,149 @@ +# This code lets us redefine existing Rake tasks, which is extremely +# handy for modifying existing Rails rake tasks. +# Credit for this snippet of code goes to Jeremy Kemper +# http://pastie.caboo.se/9620 +unless Rake::TaskManager.methods.include?(:redefine_task) + module Rake + module TaskManager + def redefine_task(task_class, args, &block) + task_name, deps = resolve_args(args) + task_name = task_class.scope_name(@scope, task_name) + deps = [deps] unless deps.respond_to?(:to_ary) + deps = deps.collect {|d| d.to_s } + task = @tasks[task_name.to_s] = task_class.new(task_name, self) + task.application = self + task.add_comment(@last_comment) + @last_comment = nil + task.enhance(deps, &block) + task + end + end + class Task + class << self + def redefine_task(args, &block) + Rake.application.redefine_task(self, args, &block) + end + end + end + end +end + + +namespace :db do + namespace :fixtures do + namespace :plugins do + + desc "Load plugin fixtures into the current environment's database." + task :load => :environment do + require 'active_record/fixtures' + ActiveRecord::Base.establish_connection(RAILS_ENV.to_sym) + Dir.glob(File.join(RAILS_ROOT, 'vendor', 'plugins', ENV['PLUGIN'] || '**', + 'test', 'fixtures', '*.yml')).each do |fixture_file| + Fixtures.create_fixtures(File.dirname(fixture_file), File.basename(fixture_file, '.*')) + end + end + + end + end +end + +# this is just a modification of the original task in railties/lib/tasks/documentation.rake, +# because the default task doesn't support subdirectories like /app or +# /component. These tasks now include every file under a plugin's code paths (see +# Plugin#code_paths). +namespace :doc do + + plugins = FileList['vendor/plugins/**'].collect { |plugin| File.basename(plugin) } + + namespace :plugins do + + # Define doc tasks for each plugin + plugins.each do |plugin| + desc "Create plugin documentation for '#{plugin}'" + Rake::Task.redefine_task(plugin => :environment) do + plugin_base = RAILS_ROOT + "/vendor/plugins/#{plugin}" + options = [] + files = Rake::FileList.new + options << "-o doc/plugins/#{plugin}" + options << "--title '#{plugin.titlecase} Plugin Documentation'" + options << '--line-numbers' << '--inline-source' + options << '-T html' + + # Include every file in the plugin's code_paths (see Plugin#code_paths) + if Rails.plugins[plugin] + files.include("#{plugin_base}/{#{Rails.plugins[plugin].code_paths.join(",")}}/**/*.rb") + end + if File.exists?("#{plugin_base}/README") + files.include("#{plugin_base}/README") + options << "--main '#{plugin_base}/README'" + end + files.include("#{plugin_base}/CHANGELOG") if File.exists?("#{plugin_base}/CHANGELOG") + + if files.empty? + puts "No source files found in #{plugin_base}. No documentation will be generated." + else + options << files.to_s + sh %(rdoc #{options * ' '}) + end + end + end + end +end + + + +namespace :test do + task :warn_about_multiple_plugin_testing_with_engines do + puts %{-~============== A Moste Polite Warninge ===========================~- + +You may experience issues testing multiple plugins at once when using +the code-mixing features that the engines plugin provides. If you do +experience any problems, please test plugins individually, i.e. + + $ rake test:plugins PLUGIN=my_plugin + +or use the per-type plugin test tasks: + + $ rake test:plugins:units + $ rake test:plugins:functionals + $ rake test:plugins:integration + $ rake test:plugins:all + +Report any issues on http://dev.rails-engines.org. Thanks! + +-~===============( ... as you were ... )============================~-} + end + + namespace :plugins do + + desc "Run the plugin tests in vendor/plugins/**/test (or specify with PLUGIN=name)" + task :all => [:warn_about_multiple_plugin_testing_with_engines, + :units, :functionals, :integration] + + desc "Run all plugin unit tests" + Rake::TestTask.new(:units => :setup_plugin_fixtures) do |t| + t.pattern = "vendor/plugins/#{ENV['PLUGIN'] || "**"}/test/unit/**/*_test.rb" + t.verbose = true + end + + desc "Run all plugin functional tests" + Rake::TestTask.new(:functionals => :setup_plugin_fixtures) do |t| + t.pattern = "vendor/plugins/#{ENV['PLUGIN'] || "**"}/test/functional/**/*_test.rb" + t.verbose = true + end + + desc "Integration test engines" + Rake::TestTask.new(:integration => :setup_plugin_fixtures) do |t| + t.pattern = "vendor/plugins/#{ENV['PLUGIN'] || "**"}/test/integration/**/*_test.rb" + t.verbose = true + end + + desc "Mirrors plugin fixtures into a single location to help plugin tests" + task :setup_plugin_fixtures => :environment do + Engines::Testing.setup_plugin_fixtures + end + + # Patch the default plugin testing task to have setup_plugin_fixtures as a prerequisite + Rake::Task["test:plugins"].prerequisites << "test:plugins:setup_plugin_fixtures" + end +end \ No newline at end of file diff --git a/solr/client/ruby/flare/vendor/plugins/flare/README b/solr/client/ruby/flare/vendor/plugins/flare/README new file mode 100644 index 00000000000..873a19b6017 --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/flare/README @@ -0,0 +1,11 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/solr/client/ruby/flare/vendor/plugins/flare/Rakefile b/solr/client/ruby/flare/vendor/plugins/flare/Rakefile new file mode 100644 index 00000000000..6b2788c6bd2 --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/flare/Rakefile @@ -0,0 +1,34 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'rake' +require 'rake/testtask' +require 'rake/rdoctask' + +desc 'Default: run unit tests.' +task :default => :test + +desc 'Test the flare plugin.' +Rake::TestTask.new(:test) do |t| + t.libs << 'lib' + t.pattern = 'test/**/*_test.rb' + t.verbose = true +end + +desc 'Generate documentation for the flare plugin.' +Rake::RDocTask.new(:rdoc) do |rdoc| + rdoc.rdoc_dir = 'rdoc' + rdoc.title = 'Flare' + rdoc.options << '--line-numbers' << '--inline-source' + rdoc.rdoc_files.include('README') + rdoc.rdoc_files.include('lib/**/*.rb') +end diff --git a/solr/client/ruby/flare/vendor/plugins/flare/app/controllers/browse_controller.rb b/solr/client/ruby/flare/vendor/plugins/flare/app/controllers/browse_controller.rb new file mode 100644 index 00000000000..173d798c0f5 --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/flare/app/controllers/browse_controller.rb @@ -0,0 +1,15 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +class BrowseController < ApplicationController + flare +end diff --git a/solr/client/ruby/flare/vendor/plugins/flare/app/controllers/document_controller.rb b/solr/client/ruby/flare/vendor/plugins/flare/app/controllers/document_controller.rb new file mode 100755 index 00000000000..b11e345bfff --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/flare/app/controllers/document_controller.rb @@ -0,0 +1,16 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +class DocumentController < ApplicationController + # Currently not used, as partials are used for rendering documents in search results + # TODO: how best to allow pluggable document rendering? +end diff --git a/solr/client/ruby/flare/vendor/plugins/flare/app/controllers/simile_controller.rb b/solr/client/ruby/flare/vendor/plugins/flare/app/controllers/simile_controller.rb new file mode 100755 index 00000000000..d11791cd9e4 --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/flare/app/controllers/simile_controller.rb @@ -0,0 +1,46 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +class SimileController < ApplicationController + before_filter :flare_before + + def exhibit + @data = @flare.search(0, 10) + + # Exhibit seems to require a label attribute to be happy + @data.each {|d| d['label'] = d['title_text']} + + respond_to do |format| + format.html # renders exhibit.rhtml + format.json { render :json => {'items' => @data}.to_json } # Exhibit seems to require data to be in a 'items' Hash + end + end + + def timeline + @data = @flare.search(0, 10) + + respond_to do |format| + format.html # renders timeline.rhtml + format.xml # renders timeline.rxml + end + end + + #TODO: this is duplicated from flare's + private + def flare_before + # TODO: allow source of context to be configurable. + session[:flare_context] ||= Flare::Context.new(SOLR_CONFIG) + + @flare = session[:flare_context] + end + +end diff --git a/solr/client/ruby/flare/vendor/plugins/flare/app/helpers/application_helper.rb b/solr/client/ruby/flare/vendor/plugins/flare/app/helpers/application_helper.rb new file mode 100755 index 00000000000..2af0b89076b --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/flare/app/helpers/application_helper.rb @@ -0,0 +1,17 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +module ApplicationHelper + def facet_label(field) + field.match(/(.*)_.*/)[1].humanize.downcase + end +end diff --git a/solr/client/ruby/flare/vendor/plugins/flare/app/views/browse/_suggest.rhtml b/solr/client/ruby/flare/vendor/plugins/flare/app/views/browse/_suggest.rhtml new file mode 100755 index 00000000000..6cb4b05e6fd --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/flare/app/views/browse/_suggest.rhtml @@ -0,0 +1,24 @@ +<% +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +%> +
      +<% +@values.each do |value| + if value[1] > 0 +-%> + +<% + end +end +-%> +
    \ No newline at end of file diff --git a/solr/client/ruby/flare/vendor/plugins/flare/app/views/browse/facet.rhtml b/solr/client/ruby/flare/vendor/plugins/flare/app/views/browse/facet.rhtml new file mode 100755 index 00000000000..bf658cb5e39 --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/flare/app/views/browse/facet.rhtml @@ -0,0 +1,55 @@ +<% +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +%> +
    +
    + <%=link_to '[clear constraints]', :action => 'clear'%> +
    + +
    +Queries: +
    +<% @flare.queries.each_with_index do |q,i| %> +<%=link_to q[:negative] ? "-" : '+', :action => :invert_query, :index => i%> +<%=q[:query]%> +<%= in_place_editor "query_#{i}", :url=> url_for(:action=>"update_query", :index=>i) %> +<%=link_to image_tag("x-close.gif"), :action => :remove_query, :index => i %>
    +<% end %> +
    +
    + +
    +Filters: +
    +<% @flare.filters.each_with_index do |filter, i| %> + <%=link_to filter[:negative] ? "-" : "+", :action => :invert_filter, :index => i%> + <%=filter[:field]%>:<%=filter[:value]%> + <%=link_to image_tag("x-close.gif"), :action => :remove_filter, :index => i %> +<% end %> +
    +
    + +
    + <%=link_to '[clear constraints]', :action => 'clear'%> +
    + +
    + +

    <%=params[:field]%>

    +<% @facets.each do |f| %> + <% if f.name %> + <%= link_to f.name, :action => 'add_filter', :field => params[:field], :value => f.name %> (<%=f.value%>) + <% else %> + <%= link_to '---- NO VALUE ----', :action => 'add_filter', :field => params[:field], :value => "[* TO *]", :negative => true %> (<%=f.value%>) + <% end %> +<% end%> \ No newline at end of file diff --git a/solr/client/ruby/flare/vendor/plugins/flare/app/views/browse/index.rhtml b/solr/client/ruby/flare/vendor/plugins/flare/app/views/browse/index.rhtml new file mode 100755 index 00000000000..858d10c3351 --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/flare/app/views/browse/index.rhtml @@ -0,0 +1,121 @@ +<% +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +%> + + +
    +
    + <%=link_to '[clear constraints]', :action => 'clear'%> +
    + +
    + Saved searches: +
    + <% @flare.applied_facet_queries.each_with_index do |q, i| %> + <%=link_to q[:negative] ? "-" : "+", :action => :invert_saved_constraint, :index => i%> + <%=q[:name]%> + <%=link_to image_tag("x-close.gif"), :action => :remove_saved_constraint, :index => i %>
    + <% end %> +
    +Queries: +
    +<% @flare.queries.each_with_index do |q,i| %> +<%=link_to q[:negative] ? "-" : '+', :action => :invert_query, :index => i%> +<%=q[:query]%> +<%= in_place_editor "query_#{i}", :url=> url_for(:action=>"update_query", :index=>i, :script=>true) %> +<%=link_to image_tag("x-close.gif"), :action => :remove_query, :index => i %>
    +<% end %> +
    +
    + +
    +Filters: +
    +<% @flare.filters.each_with_index do |filter, i| %> + <%=link_to filter[:negative] ? "-" : "+", :action => :invert_filter, :index => i%> + <%=filter[:field]%>:<%=filter[:value]%> + <%=link_to image_tag("x-close.gif"), :action => :remove_filter, :index => i %>
    +<% end %> +
    +
    + +
    + <%=link_to '[clear constraints]', :action => 'clear'%> +
    + +<% form_tag({:action=>'save'},{:id => 'savesearch', :style => "display:none;"}) do %> + <%= text_field_tag :name, "", {:size => 10, :id => "savesearch_name"} %> + <%= link_to_function "save", "document.forms['savesearch'].submit();" -%> + <%= link_to_function "cancel", "Element.toggle('savesearch'); Element.toggle('savesearch_link')" -%> +<% end %> +<%= link_to_function "[save these constraints]", "Element.toggle('savesearch'); Element.toggle('savesearch_link'); $('savesearch_name').focus()", {:id => "savesearch_link"} -%> + +
    + +<% if @flare.empty_constraints? %> + search or facet to see results +<% else %> + +
    Results <%=[@start + 1,@response.total_hits].min%>-<%=[@response.total_hits,@results_per_page + @start].min%> of <%=@response.total_hits%>
    + +
    + <% @response.each do |doc| %> + <%= render :partial => "document/document_#{SOLR_ENV}", :locals => {:doc => doc, :response => @response}%> + <% end %> +
    +
    + +
    <%=link_to_if @flare.page != 1, "<<", :page => @flare.page - 1%> Results <%=[@start + 1,@response.total_hits].min%>-<%=[@response.total_hits,@results_per_page + @start].min%> of <%=@response.total_hits%><%=link_to_if @flare.page < (@response.total_hits.to_f / @results_per_page).ceil, ">>", :page => @flare.page + 1%>
    + +<% end %> \ No newline at end of file diff --git a/solr/client/ruby/flare/vendor/plugins/flare/app/views/document/_document_development.rhtml b/solr/client/ruby/flare/vendor/plugins/flare/app/views/document/_document_development.rhtml new file mode 100755 index 00000000000..1039e1e9818 --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/flare/app/views/document/_document_development.rhtml @@ -0,0 +1,25 @@ +<% +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +%> + + + + + + + <% doc.each do |k,v|; highlighting = response.highlighted(doc['id'], k) %> + + <% end %> +
    <%=doc['title_text']%>
    <%=k%>:<%= highlighting ? "...#{highlighting}..." : (v.respond_to?('join') ? v.join(',') : v.to_s)%>
    + + diff --git a/solr/client/ruby/flare/vendor/plugins/flare/app/views/layouts/browse.rhtml b/solr/client/ruby/flare/vendor/plugins/flare/app/views/layouts/browse.rhtml new file mode 100755 index 00000000000..18a15f51977 --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/flare/app/views/layouts/browse.rhtml @@ -0,0 +1,28 @@ +<% +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +%> + + + Solr flare: <%=controller.action_name%> + <%= javascript_include_tag :defaults %> + <%= stylesheet_link_tag 'flare'%> + + +
    + + diff --git a/solr/client/ruby/flare/vendor/plugins/flare/app/views/simile/exhibit.rhtml b/solr/client/ruby/flare/vendor/plugins/flare/app/views/simile/exhibit.rhtml new file mode 100644 index 00000000000..acb919fe05c --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/flare/app/views/simile/exhibit.rhtml @@ -0,0 +1,37 @@ +<% +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +%> + + + SIMILE Exhibit view + + + + + + +

    SIMILE Exhibit view

    + + + + + +
    +
    +
    +
    +
    ">
    +
    + + \ No newline at end of file diff --git a/solr/client/ruby/flare/vendor/plugins/flare/app/views/simile/timeline.rhtml b/solr/client/ruby/flare/vendor/plugins/flare/app/views/simile/timeline.rhtml new file mode 100755 index 00000000000..d04797b7b5e --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/flare/app/views/simile/timeline.rhtml @@ -0,0 +1,58 @@ +<% +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +%> + + + + + + +
    + + diff --git a/solr/client/ruby/flare/vendor/plugins/flare/app/views/simile/timeline.rxml b/solr/client/ruby/flare/vendor/plugins/flare/app/views/simile/timeline.rxml new file mode 100755 index 00000000000..d3d2518e537 --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/flare/app/views/simile/timeline.rxml @@ -0,0 +1,50 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +xml.data do + @data.each do |doc| + xml.event(doc['title_text'], + :start => doc[SOLR_CONFIG[:timeline_dates].to_s], + :end => doc[SOLR_CONFIG[:timeline_dates].to_s], + :title => doc['title_text'], + :image => SOLR_CONFIG[:image_proc] ? SOLR_CONFIG[:image_proc].call(doc) : nil) + end +end + +# Amazon images: http://www.betaversion.org/~stefano/linotype/news/66/ +# +# +# A few days to write some documentation for <a href="http://simile.mit.edu/timeline/">Timeline</a>. +# +# +# +# I'm not sure precisely when my friend's wedding is. +# +# +# +# Woohoo! +# +# \ No newline at end of file diff --git a/solr/client/ruby/flare/vendor/plugins/flare/init.rb b/solr/client/ruby/flare/vendor/plugins/flare/init.rb new file mode 100644 index 00000000000..4f97bed495a --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/flare/init.rb @@ -0,0 +1,13 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'flare' \ No newline at end of file diff --git a/solr/client/ruby/flare/vendor/plugins/flare/lib/flare.rb b/solr/client/ruby/flare/vendor/plugins/flare/lib/flare.rb new file mode 100644 index 00000000000..f70a14c1ee8 --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/flare/lib/flare.rb @@ -0,0 +1,15 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +module Flare; end +require 'flare/context' +require 'flare/controller_extensions' diff --git a/solr/client/ruby/flare/vendor/plugins/flare/lib/flare/context.rb b/solr/client/ruby/flare/vendor/plugins/flare/lib/flare/context.rb new file mode 100644 index 00000000000..094807cb7a9 --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/flare/lib/flare/context.rb @@ -0,0 +1,151 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'solr' + +class Flare::Context + attr_accessor :queries, :filters, :facet_queries, :applied_facet_queries, :page, :sort + attr_reader :facet_fields, :text_fields, :connection + + def initialize(solr_config={}) + @solr_config = solr_config + + @connection = Solr::Connection.new(@solr_config[:solr_url]) + + clear + @facet_queries = {} # name => {:queries => [], :filters => []} + + @index_info = index_info + + excluded = @solr_config[:facets_exclude] ? @solr_config[:facets_exclude].collect {|e| e.to_s} : [] + @facet_fields = @index_info.field_names.find_all {|v| v =~ /_facet$/} - excluded # TODO: is facets_excluded working? where are the tests?! :) + + @text_fields = @index_info.field_names.find_all {|v| v =~ /_text$/} + + @page = 1 + end + + def clear + #TODO unify initialize and clear + @queries = [] + @filters = [] + @applied_facet_queries = [] + @page = 1 + + # this is cleared for development purposes - allowing flare to stay running but different Solr datasets swapping + @index_info = index_info + excluded = @solr_config[:facets_exclude] ? @solr_config[:facets_exclude].collect {|e| e.to_s} : [] + @facet_fields = @index_info.field_names.find_all {|v| v =~ /_facet$/} - excluded + @text_fields = @index_info.field_names.find_all {|v| v =~ /_text$/} + + # facet_queries not cleared as their lifetime is different than constraints + end + + def empty_constraints? + @queries.empty? && @filters.empty? && @applied_facet_queries.empty? + end + + def search(start=0, max=25) + # TODO: Allow the search method to facilitate acts_as_solr somehow too + facet_queries = @facet_queries.collect do |k,v| + clauses = filter_queries(v[:filters]) + clauses << build_boolean_query(v[:queries]) + query = clauses.join(" AND ") + @facet_queries[k][:real_query] = query + query + end + + qa = applied_facet_queries.collect {|map| q = @facet_queries[map[:name]][:real_query]; map[:negative] ? "-(#{q})" : q} + qa << build_boolean_query(@queries) + + query_type = @solr_config[:solr_query_type] || :dismax + query_config = @solr_config["#{query_type.to_s}_query_params".to_sym] || {} + solr_params = query_config.merge(:query => qa.join(" AND "), + :filter_queries => filter_queries(@filters), + :start => start, + :rows => max, + :facets => { + :fields => @facet_fields, :limit => 20 , :mincount => 1, :sort => :count, + :queries => facet_queries + }, + :highlighting => {:field_list => @text_fields}, + :sort => @sort) +# if query_type == :dismax +# solr_params[:phrase_fields] ||= @text_fields +# if solr_params[:query] == "*:*" +# solr_params[:query] = "" +# end +# request = Solr::Request::Dismax.new(solr_params) # TODO rename to DisMax +# else + request = Solr::Request::Standard.new(solr_params) +# end + + #TODO: call response.field_facets(??) - maybe field_facets should be higher level? +# logger.info({:query => query, :filter_queries => filters}.inspect) + @connection.send(request) + end + + def document_by_id(id) + request = Solr::Request::Standard.new(:query => "id:\"#{id}\"") + @connection.send(request).hits[0] + end + + def retrieve_field_facets(field, limit=-1, prefix=nil) + req = Solr::Request::Standard.new(:query => build_boolean_query(@queries), + :filter_queries => filter_queries(@filters), + :facets => {:fields => [field], + :mincount => 1, :limit => limit, :prefix => prefix, :missing => true, :sort => :count + }, + :rows => 0 + ) + + results = @connection.send(req) + + results.field_facets(field) + end + + def to_s + <<-TO_S + ------ + Applied facet queries: #{applied_facet_queries.inspect} + Queries: #{queries.inspect} + Filters: #{filters.inspect} + Facet queries: #{facet_queries.inspect} + ------ + TO_S + end + + private + def build_boolean_query(queries) + if queries.nil? || queries.empty? + query = "*:*" + else + query = queries.collect{|q| "#{q[:negative] ? '-' : ''}(#{q[:query]})"}.join(' AND ') + end + + query + end + + def filter_queries(filters) + filters.collect do |filter| + value = filter[:value] + if value != "[* TO *]" + value = "\"#{value}\"" + end + "#{filter[:negative] ? '-' : ''}#{filter[:field]}:#{value}" + end + end + + def index_info + @connection.send(Solr::Request::IndexInfo.new) + end +end diff --git a/solr/client/ruby/flare/vendor/plugins/flare/lib/flare/controller_extensions.rb b/solr/client/ruby/flare/vendor/plugins/flare/lib/flare/controller_extensions.rb new file mode 100755 index 00000000000..f3b6ce97097 --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/flare/lib/flare/controller_extensions.rb @@ -0,0 +1,171 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +module Flare + module ActionControllerExtensions + + def self.included(base) + base.extend(ClassMethods) + end + + module ClassMethods + def flare(options={}) + include Flare::ActionControllerExtensions::InstanceMethods + + cattr_accessor :suggest_field + self.suggest_field = options[:suggest_field] || 'text' + + before_filter :flare_before + end + end + + module InstanceMethods + def index + @results_per_page = 25 + + if params[:page] + @flare.page = params[:page].to_i + end + + @start = (@flare.page - 1) * @results_per_page + + @response = @flare.search(@start, @results_per_page) + end + + def facet + @facets = @flare.retrieve_field_facets(params[:field]) + end + + def auto_complete_for_search_query + @values = @flare.retrieve_field_facets(self.class.suggest_field, 5, params['search']['query'].downcase) + + render :partial => 'suggest' + end + + + def add_query + query = params[:search][:query].strip + if query.size > 0 + @flare.queries << {:query => query} + @flare.page = 1 + end + redirect_to :action => 'index' + end + + def update_query + logger.debug "update_query: #{params.inspect}" + @flare.queries[params[:index].to_i][:query] = params[:value] + @flare.page = 1 # TODO: let the context adjust this automatically when its state changes + render :update do |page| + page.redirect_to :action => 'index' + end + end + + def invert_query + q = @flare.queries[params[:index].to_i] + q[:negative] = !q[:negative] + @flare.page = 1 # TODO: let the context adjust this automatically when its state changes + redirect_to :action => 'index' + end + + def remove_query + @flare.queries.delete_at(params[:index].to_i) + @flare.page = 1 # TODO: let the context adjust this automatically when its state changes + redirect_to :action => 'index' + end + + def invert_filter + f = @flare.filters[params[:index].to_i] + f[:negative] = !f[:negative] + @flare.page = 1 # TODO: let the context adjust this automatically when its state changes + redirect_to :action => 'index' + end + + def remove_filter + @flare.filters.delete_at(params[:index].to_i) + @flare.page = 1 # TODO: let the context adjust this automatically when its state changes + redirect_to :action => 'index' + end + + def add_filter + @flare.filters << {:field => params[:field], :value => params[:value], :negative => (params[:negative] ? true : false)} + @flare.page = 1 # TODO: let the context adjust this automatically when its state changes + redirect_to :action => 'index' + end + + def add_saved_search + @flare.applied_facet_queries << {:name => params[:name], :negative => (params[:negative] ? true : false)} + redirect_to :action => 'index' + end + + def remove_saved_constraint + @flare.applied_facet_queries.delete_at(params[:index].to_i) + @flare.page = 1 # TODO: let the context adjust this automatically when its state changes + redirect_to :action => 'index' + end + + def clear + @flare.clear + redirect_to :action => 'index' + end + + def edit_saved_search + @flare.clear + saved = @flare.facet_queries[params[:name]] + @flare.filters = saved[:filters].clone + @flare.queries = saved[:queries].clone + redirect_to :action => 'index' + end + + def show_saved + query = @flare.facet_queries[params[:name]] + @flare.applied_facet_queries << {:name => params[:name], :negative => (params[:negative] ? true : false)} + index + render :action => 'index' + end + + def save + @flare.facet_queries[params[:name]] = {:filters => @flare.filters.clone, :queries => @flare.queries.clone} + redirect_to :action => 'index' + end + + def remove_saved_search + @flare.facet_queries.delete(params[:name]) + @flare.applied_facet_queries.delete_if {|f| params[:name] == f[:name]} + @flare.page = 1 # TODO: let the context adjust this automatically when its state changes + redirect_to :action => 'index' + end + + def invert_saved_constraint + f = @flare.applied_facet_queries[params[:index].to_i] + f[:negative] = !f[:negative] + @flare.page = 1 # TODO: let the context adjust this automatically when its state changes + redirect_to :action => 'index' + end + + private + def flare_before + # TODO: allow source of context to be configurable. + session[:flare_context] ||= Flare::Context.new(SOLR_CONFIG) + + @flare = session[:flare_context] + end + end + + end +end + +module ActionController + class Base + include Flare::ActionControllerExtensions + end +end \ No newline at end of file diff --git a/solr/client/ruby/flare/vendor/plugins/flare/routes.rb b/solr/client/ruby/flare/vendor/plugins/flare/routes.rb new file mode 100755 index 00000000000..a2753fc4098 --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/flare/routes.rb @@ -0,0 +1,13 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +connect '', :controller => "browse" diff --git a/solr/client/ruby/flare/vendor/plugins/flare/test/flare_context_test.rb b/solr/client/ruby/flare/vendor/plugins/flare/test/flare_context_test.rb new file mode 100755 index 00000000000..5e61beca162 --- /dev/null +++ b/solr/client/ruby/flare/vendor/plugins/flare/test/flare_context_test.rb @@ -0,0 +1,40 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'test/unit' +require 'flare' + +class Flare::Context + def index_info + Solr::Response::IndexInfo.new( +<{'status'=>0, 'QTime'=>7}, + 'fields'=>{'id'=>{'type'=>'string'}, 'text'=>{'type'=>'text'}}, + 'index'=>{'maxDoc'=>1337165, 'numDocs'=>1337159, 'version'=>'1174965134952'} + } +SOLR_RESPONSE +) + end +end + +class FlareContextTest < Test::Unit::TestCase + def setup + @flare_context = Flare::Context.new({:solr_url => 'http://server:8983/solr'}) + end + + def test_clear + @flare_context.page = 5 + @flare_context.clear + assert_equal @flare_context.page, 1 + end +end \ No newline at end of file diff --git a/solr/client/ruby/solr-ruby/CHANGES.yml b/solr/client/ruby/solr-ruby/CHANGES.yml new file mode 100644 index 00000000000..0db9541104c --- /dev/null +++ b/solr/client/ruby/solr-ruby/CHANGES.yml @@ -0,0 +1,50 @@ +v0.0.8: + release_date: TBD + changes: + - Updated Solr::Request::Standard to use modern style sort parameter + +v0.0.7: + release_date: 2009-03-06 + changes: + - Fixed string parameters with carriage returns in Solr::Request::Select (due to String.each pain) + - SOLR-1047 - added support for facet.method + +v0.0.6: + release_date: 2008-07-14 + changes: + - Added Solr::Request::Spellcheck + - Enabled Solr::Request::Select to work as a general pass through to any registered request handler + - Fixed modify_document_test.rb so as to not be brittle with Hash ordering + - Added support for alternate field highlighting to Solr::Request::Standard (and thus DisMax) + - Added facet.offset support to Solr::Request::Standard/Dismax + - Added shards parameter to Solr::Request::Standard/Dismax + +v0.0.5: + release_date: 2007-08-27 + changes: + - Added support for highlighter fragment size to Solr::Request::Standard + - Added support for MoreLikeThese to Solr::Request::Standard + - Added Solr::Request::ModifyDocument (requires SOLR-139 patch) + - Added Solr::Util.query_parser_escape() + +v0.0.4: + release_date: 2007-08-16 + changes: + - Solr::Indexer#solr added to gain access to the Solr::Connection instance + - Fixed issue with multi-line String field values when field set multiValued="false" + - Fixed tests to work without either Hpricot or libxml2 + +v0.0.3: + release_date: 2007-05-22 + changes: + - Adjusted HpricotMapper and XPathMapper, and tests, to load only if their dependencies are available. + +v0.0.2: + release_date: 2007-05-15 + changes: + - mappers, etc + +v0.0.1: + release_date: 2007-02-15 + changes: + - initial release diff --git a/solr/client/ruby/solr-ruby/LICENSE.txt b/solr/client/ruby/solr-ruby/LICENSE.txt new file mode 100644 index 00000000000..261eeb9e9f8 --- /dev/null +++ b/solr/client/ruby/solr-ruby/LICENSE.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/solr/client/ruby/solr-ruby/README b/solr/client/ruby/solr-ruby/README new file mode 100644 index 00000000000..9c492059e3b --- /dev/null +++ b/solr/client/ruby/solr-ruby/README @@ -0,0 +1,56 @@ +solr-ruby exposes the power of Solr as a Ruby DSL (domain specific language). + + Visit the solr-ruby wiki for more information: http://wiki.apache.org/solr/solr-ruby + +USAGE + +First launch Solr: + + cd solr + java -jar start.jar + +In a separate shell, launch {{{irb -Ilib}}}: + + require 'solr' # load the library + include Solr # Allow Solr:: to be omitted from class/module references + + # connect to the solr instance + conn = Connection.new('http://localhost:8983/solr', :autocommit => :on) + + # add a document to the index + conn.add(:id => 123, :title_text => 'Lucene in Action') + + # update the document + conn.update(:id => 123, :title_text => 'Solr in Action') + + # print out the first hit in a query for 'action' + response = conn.query('action') + print response.hits[0] + + # iterate through all the hits for 'action' + conn.query('action') do |hit| + puts hit.inspect + end + + # delete document by id + conn.delete(123) + +INSTALLATION + +First run the tests: + + rake + +then build the gem: + + rake package + +and install the versioned gem: + + gem install pkg/solr-x.x.x.gem + +LICENSE + +This package is licensed using the Apache Software License 2.0. + + diff --git a/solr/client/ruby/solr-ruby/Rakefile b/solr/client/ruby/solr-ruby/Rakefile new file mode 100644 index 00000000000..d14399c91c2 --- /dev/null +++ b/solr/client/ruby/solr-ruby/Rakefile @@ -0,0 +1,190 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# the default task is to run both the unit and functional tests +# functional tests require that a solr test server is running +# but this Rakefil should take care of starting and stopping it +# for you +# +# if you just want to run unit tests: +# +# rake test_units +# +# and if you just want to run functional tests +# +# rake test_functionals +# +# if you would like to see solr startup messages on STDERR +# when starting solr test server during functional tests use: +# +# rake SOLR_CONSOLE=true + +SOLR_RUBY_VERSION = '0.0.8' + +require 'rubygems' +require 'rake' +require 'rake/testtask' +require 'rake/rdoctask' +require 'rake/packagetask' +require 'rake/gempackagetask' +require 'test/functional/test_solr_server' + +task :default => [:test_units] + +SOLR_PARAMS = { + :quiet => ENV['SOLR_CONSOLE'] ? false : true, + :jetty_home => ENV['SOLR_JETTY_HOME'] || File.expand_path('../../../example'), + :jetty_port => ENV['SOLR_JETTY_PORT'] || 8888, + :solr_home => ENV['SOLR_HOME'] || File.expand_path('test') +} + + +spec = Gem::Specification.new do |s| + s.name = 'solr-ruby' + s.version = SOLR_RUBY_VERSION + s.author = 'Apache Solr' + s.email = 'ruby-dev@lucene.apache.org' + s.homepage = 'http://wiki.apache.org/solr/solr-ruby' + s.platform = Gem::Platform::RUBY + s.summary = 'Ruby library for working with Apache Solr' + + # Omit functional tests from gem for now, as that requires a Solr instance + s.files = Dir.glob("lib/**/*").concat(Dir.glob("test/unit/**/*")) + s.require_path = 'lib' + s.autorequire = 'solr' + s.has_rdoc = true +end + +namespace :gem do + Rake::GemPackageTask.new(spec) do |pkg| + pkg.need_zip = true + pkg.need_tar = true + pkg.package_dir = "pkg/gem" + end +end + +namespace :rails do + desc "Creates rails plugin structure and distributable packages. init.rb is created and removed on the fly." + task :package => "init.rb" do + File.rm_f("init.rb") + end + Rake::PackageTask.new("solr-ruby-rails", SOLR_RUBY_VERSION) do |pkg| + pkg.need_zip = true + pkg.need_tar = true + pkg.package_dir = "pkg/rails" + pkg.package_files.include("lib/**/*.rb", "test/unit/**/*.rb", "init.rb", "LICENSE.txt", "README") + end + + file "init.rb" do + open("init.rb", "w") do |file| + file.puts LICENSE + file.puts "require 'solr.rb'" + end + end + + desc "Install the Rails plugin version into the vendor/plugins dir. Need to set PLUGINS_DIR environment variable." + task :install_solr_ruby => :package do + plugins_dir = ENV["PLUGINS_DIR"] or raise "You must set PLUGINS_DIR" + mkdir File.join(plugins_dir, "solr-ruby-rails-#{SOLR_RUBY_VERSION}/") rescue nil + File.cp_r(File.join("pkg","rails", "solr-ruby-rails-#{SOLR_RUBY_VERSION}/"), plugins_dir) + end +end + +task :package => ["rails:package", "gem:package"] +task :repackage => [:clobber_package, :package] +task :clobber_package => ["rails:clobber_package", "gem:clobber_package"] do rm_r "pkg" rescue nil end +task :clobber => [:clobber_package] + +desc "Generate rdoc documentation" +Rake::RDocTask.new('doc') do |rd| + rd.rdoc_files.include("lib/**/*.rb") + rd.rdoc_files.include('README', 'CHANGES.yml', 'LICENSE.txt') + rd.main = 'README' + rd.rdoc_dir = 'doc' +end + +desc "Run unit tests" +Rake::TestTask.new(:test_units) do |t| + t.pattern = 'test/unit/*_test.rb' + t.verbose = true + t.ruby_opts = ['-r solr', '-r test/unit', '-Itest/unit'] +end + +# NOTE: test_functionals does not work standalone currently. It needs the TestSolrServer wrapper in the :test task +Rake::TestTask.new(:test_functionals) do |t| + t.pattern = 'test/functional/*_test.rb' + t.verbose = true + t.ruby_opts = ['-r solr', '-r test/unit', '-Itest/functional'] +end + +desc "Run unit and functional tests" +task :test => [:test_units] do + rm_rf "test/data" # remove functional test temp data directory + + # wrap functional tests with a test-specific Solr server + got_error = TestSolrServer.wrap(SOLR_PARAMS) do + Rake::Task[:test_functionals].invoke + end + + raise "test failures" if got_error +end + +# TODO: consider replacing system() to rcov with the included +# Rake task: http://eigenclass.org/hiki.rb?cmd=view&p=rcov+FAQ&key=rake +namespace :test do + desc 'Measures test coverage' + # borrowed from here: http://clarkware.com/cgi/blosxom/2007/01/05#RcovRakeTask + task :coverage do + rm_rf "coverage" + rm_rf "coverage.data" + TestSolrServer.wrap(SOLR_PARAMS) do + system("rcov --aggregate coverage.data --text-summary -Ilib:test/functional test/functional/*_test.rb") + end + system("rcov --aggregate coverage.data --text-summary -Ilib:test/unit test/unit/*_test.rb") + system("open coverage/index.html") if PLATFORM['darwin'] + end +end + + +def egrep(pattern) + Dir['**/*.rb'].each do |fn| + count = 0 + open(fn) do |f| + while line = f.gets + count += 1 + if line =~ pattern + puts "#{fn}:#{count}:#{line}" + end + end + end + end +end + +desc "Report TODO/FIXME/TBD tags in the code" +task :todo do + egrep /#.*(FIXME|TODO|TBD)/ +end + +LICENSE = < Proc.new {|data| data[:upc].empty? ? data[:asin] : data[:upc]}, + :medium_facet => :medium, + :country_facet => :country, + :signed_facet => :signed, + :rating_facet => :netrating, + :language_facet => :language, + :genre_facet => Proc.new {|data| data[:genre].split('/').map {|s| s.strip}}, + :title_text => :title, + :full_title_text => :fullTitle, + :asin_display => :asin, + :notes_text => :notes, + :publisher_facet => :publisher, + :description_text => :description, + :author_text => :author, + :pages_text => :pages, + :published_year_facet => Proc.new {|data| data[:published].scan(/\d\d\d\d/)[0]} +} + +indexer = Solr::Indexer.new(source, mapping, :debug => debug) +indexer.index do |record, solr_document| + # can modify solr_document before it is indexed here +end + +indexer.solr.commit unless debug +indexer.solr.optimize unless debug \ No newline at end of file diff --git a/solr/client/ruby/solr-ruby/examples/delicious_library/sample_export.txt b/solr/client/ruby/solr-ruby/examples/delicious_library/sample_export.txt new file mode 100644 index 00000000000..b7edafbbbd2 --- /dev/null +++ b/solr/client/ruby/solr-ruby/examples/delicious_library/sample_export.txt @@ -0,0 +1,164 @@ +medium associatedURL boxHeightInInches boxLengthInInches boxWeightInPounds boxWidthInInches scannednumber upc asin country title fullTitle series numberInSeries edition aspect mediacount genre price currentValue language netrating description owner publisher published rare purchaseDate rating used signed hasExperienced notes location paid condition notowned platform key developer esrbrating players author illustrator pages director stars features mpaarating theatricalDate minutes artist conductor tracks tracklisting +game 0711719721123 B00006Z7HU us ATV Offroad Fury 2 ATV Offroad Fury 2 Video Game/ CD-ROM Online/ Bike/ Video Games $19.99 $4.40 4 ATV Offroad Fury 2 provides expansive off-road racing gameplay packed with more courses, modes, tricks, and ATVs, plus online gameplay via the network adapter to heighten the racing experience. Players will choose from more than 20 licensed and team-sponsored ATVs from top-tier manufacturers and off-road sponsors. Every featured ATV will be true to spec, allowing for realistic handling and reactions in every situation. Sony Computer Entertainment 20-03-2003 07-02-2007 5 PlayStation2 Teen +book 9780966075007 0966075005 us Chinese Characters: A Genealogy and Dictionary Chinese Characters: A Genealogy and Dictionary Paperback Chinese/ Polyglot/ Dictionaries; Polyglot $19.95 $18.00 4.5 This dictionary is designed to help students understand, appreciate and remember Chinese characters. It has the following features: -Every character entry includes a brief traditional Chinese etymology. -Genealogical charts highlight the connections between characters, showing the creation of more than 4000 characters from less than 200 simple pictographs and ideographs. -Mandarin standards in China and Taiwan are distinguished. -Simplified forms for each character are given. -Character entries list all words which use the character in any position, allowing a word to be found even if the first character is unknown. -English definitions are referenced in an English-Chinese index. -A word pronunciation index allows students to directly search for an overheard word without having to guess the initial character. -A stroke count index lists every character by number of strokes. Zhongwen.Com 01-08-1998 07-02-2007 550 +book 9780195840971 0195840976 us Concise English-Chinese Chinese-English Dictionary Concise English-Chinese Chinese-English Dictionary Paperback English (All)/ Chinese/ Linguistics $12.95 $3.00 3.5 With nearly 20,000 entries in each, this bilingual dictionary is ideal for travelers and students of Chinese or English. Among the dictionary's many features are:/ *Simplified Chinese characters as well as Pinyin/ romanization / *Pronunciation using international phonetic symbols/ *Numerous examples of usage in both languages/ *Appendices including consonants and vowels of the/ Chinese phonetic alphabet, and names and / abbreviations of China's provinces, regions,/ and municipalities/ *Handy pocket-sized format Oxford University Press, USA 01-07-1994 11-02-2007 1114 +movie 0025192022425 0783226985 us Dragon: The Bruce Lee Story Dragon: The Bruce Lee Story DVD Biography/ Drama/ Action & Adventure/ Documentary $14.98 $5.62 4 This enjoyable and touching biography of martial-arts film star Bruce Lee stars Jason Scott Lee (no relation), an actor with a lively face and natural intensity, who makes every moment of this film compelling. Directed by Rob Cohen, Dragon traces Bruce Lee's slow rise over myriad obstacles--most of them race-based--to become an international superstar in films. Lee's origins are oddly set in San Francisco instead of his real home in Seattle, but then again there is plenty of artistic license going on as Cohen explores the actor's psyche through some powerful fantasy sequences. Lauren Holly is good as Lee's wife, Linda (whose book about her late husband inspired this movie). A scene involving Bruce's rescue of son Brandon (who died in a filmmaking accident in 1993) from a murderous spirit is plain spooky. The special-edition DVD release has a widescreen presentation, director interview, featurette, screen tests, closed captioning, optional French soundtrack, and optional Spanish subtitles. --Tom Keogh Universal Studios 01-07-1998 07-02-2007 Aki Aleong/ Eric Bruskotter/ John Cheung/ Chao Li Chi/ Sam Hau/ Lauren Holly/ Clyde Kusatsu/ Nancy Kwan/ Michael Learned/ Jason Scott Lee/ Kay Tong Lim/ Sterling Macer Jr./ Iain M. Parker/ Ong Soo Han/ Michelle Tennant/ Sven-Ole Thorsen/ Robert Wagner/ Luoyong Wang/ Ric Young Closed-captioned/ Color/ Letterboxed/ Widescreen/ NTSC/ 2.35:1 PG-13 07-05-1993 120 +movie 0018111924795 B0009IW92A us The Essential Sherlock Holmes The Essential Sherlock Holmes DVD 8 Mystery/ Drama/ Mystery & Suspense/ Sherlock Holmes $29.99 $8.95 3.5 Dressed to Kill Terror By Night The Woman in Green Sherlock Holmes And the Secret Weapon A Study in Scarlet Silver Blaze Sherlock Holmes TV Series Delta 07-06-2005 09-02-2007 Essential Sherlock Holmes Box set/ Color/ NTSC NR +book 0073999151893 1569221863 us Guitar Scale Guru: The Scale Book - Your Guide for Success! Guitar Scale Guru: The Scale Book - Your Guide for Success! Paperback Guitar/ Instruction & Study/ Techniques $14.95 $9.46 4.5 All of the essential diagrams, drawings and information concerning theory, scales, and their uses in one easy-to-use book! Covers the five essential scales for today's guitarists - major, major pentatonic, minor, minor pentatonic and blues - with a unique Linking System that makes it easy to understand scales like a pro! Creative Concepts 01-02-2000 11-02-2007 168 +music 0019028394855 B000CC2XP6 us Light Up Ahead Light Up Ahead Audio CD $11.95 4 Track Listing: 1.going too 2.comin' after you 3.on the beam 4.dont know what 5.hide 6.signs 7.lower voice 8.bad side 9.light up ahead TR Music 07-02-1995 07-02-2007 5 1 +music 0800314886006 B000B7PU66 us Parallel Universe Parallel Universe Audio CD 2 $15.26 Contains Parallel Universe CD plus bonus Invisible Pagan Underdogs 13 track CD. 23 total tracks. TR Music 07-02-2005 07-02-2007 5 1 +book 9781556152115 1556152116 us Programmers at Work: Interviews With 19 Programmers Who Shaped the Computer Industry (Tempus) Programmers at Work: Interviews With 19 Programmers Who Shaped the Computer Industry (Tempus) Paperback Careers/ Compilers $9.95 $19.92 5 Tempus Books 07-02-1989 07-02-2007 400 +book 9781904978367 1904978363 us Time Out Amsterdam (Time Out Amsterdam Guide) Time Out Amsterdam (Time Out Amsterdam Guide) Paperback Guidebooks/ Amsterdam/ Time Out Travel $19.95 $0.99 3.5 Europe's most infamous city remains one of its most popular, and not without good reason: between its world-class art museums, its eminent canals that are perfect for wandering, and its coffee shops that don't exactly specialize in coffee, its variety is glorious indeed. However, with one of Europe's more forward-thinking cultural scenes and striking new architectural developments in IJburg and the Bijlmermeer, there's much more here to enjoy than the clichés; written, researched and edited entirely by locals, the Time Out Amsterdam guide tells travelers all about it. Highlights include Amsterdam after dark - the best restaurants, bars, and nightclubs in the city; an unmatched section on the city's cultural scene including galleries, performance art, classical music, and theater; and trips beyond Amsterdam to the flower auction in Aalsmeer, the cheese market at Gouda, and the windmills of Alblasserdam. Time Out Publishing 10-07-2005 18-02-2007 320 +book 9780865470804 0865470804 us Taking the Path of Zen (Taking the Path of Zen Ppr) Taking the Path of Zen (Taking the Path of Zen Ppr) Paperback Zen $12.00 $4.40 4 There is a fine art to presenting complex ideas with simplicity and insight, in a manner that both guides and inspires. In Taking the Path of Zen Robert Aitken presents the practice, lifestyle, rationale, and ideology of Zen Buddhism with remarkable clarity. / / The foundation of Zen is the practice of zazen, or mediation, and Aitken Roshi insists that everything flows from the center. He discusses correct breathing, posture, routine, teacher-student relations, and koan study, as well as common problems and milestones encountered in the process. Throughout the book the author returns to zazen, offering further advice and more advanced techniques. The orientation extends to various religious attitudes and includes detailed discussions of the Three Treasures and the Ten Precepts of Zen Buddhism./ Taking the Path of Zen will serve as orientation and guide for anyone who is drawn to the ways of Zen, from the simply curious to the serious Zen student. / North Point Press 01-01-1982 06-02-2007 Robert Aitken 150 +book 9780963177513 0963177516 us Bubishi: Martial Art Spirit Bubishi: Martial Art Spirit Paperback Martial Arts $34.95 $61.18 5 Bubishi-Martial Art Spirit is the secret karate text of the Okinawan Masters. Guarded for centuries, this mystical book has finally been completely translated into English. The BUBISHI was cherished by Miyagi Chojun, the founder of Goju Ryu, Funakoshi Gichin, founder of Shotokan and Mabuni Kenwa, founder of Shito Ryu karate. It includes Dim Mak (The Death Touch), pressure points, knockout and killing techniques , 48 essential self defense applications, Chinese cures for Martial arts injuries and much more!!! Yamazato Pubns 12-02-1993 06-02-2007 George Alexander +book 9780142000281 0142000280 us Getting Things Done: The Art of Stress-Free Productivity Getting Things Done: The Art of Stress-Free Productivity Paperback Health & Stress/ Time Management/ Guides/ Labor & Industrial Relations/ Motivational/ Office Skills/ Creativity/ Self-Esteem/ Stress Management $15.00 $7.01 4.5 With first-chapter allusions to martial arts, "flow,""mind like water," and other concepts borrowed from the East (and usually mangled), you'd almost think this self-helper from David Allen should have been called Zen and the Art of Schedule Maintenance./ Not quite. Yes, Getting Things Done offers a complete system for downloading all those free-floating gotta-do's clogging your brain into a sophisticated framework of files and action lists--all purportedly to free your mind to focus on whatever you're working on. However, it still operates from the decidedly Western notion that if we could just get really, really organized, we could turn ourselves into 24//7 productivity machines. (To wit, Allen, whom the New Economy bible Fast Company has dubbed "the personal productivity guru," suggests that instead of meditating on crouching tigers and hidden dragons while you wait for a plane, you should unsheathe that high-tech saber known as the cell phone and attack that list of calls you need to return.)/ As whole-life-organizing systems go, Allen's is pretty good, even fun and therapeutic. It starts with the exhortation to take every unaccounted-for scrap of paper in your workstation that you can't junk, The next step is to write down every unaccounted-for gotta-do cramming your head onto its own scrap of paper. Finally, throw the whole stew into a giant "in-basket"/ That's where the processing and prioritizing begin; in Allen's system, it get a little convoluted at times, rife as it is with fancy terms, subterms, and sub-subterms for even the simplest concepts. Thank goodness the spine of his system is captured on a straightforward, one-page flowchart that you can pin over your desk and repeatedly consult without having to refer back to the book. That alone is worth the purchase price. Also of value is Allen's ingenious Two-Minute Rule: if there's anything you absolutely must do that you can do right now in two minutes or less, then do it now, thus freeing up your time and mind tenfold over the long term. It's commonsense advice so obvious that most of us completely overlook it, much to our detriment; Allen excels at dispensing such wisdom in this useful, if somewhat belabored, self-improver aimed at everyone from CEOs to soccer moms (who we all know are more organized than most CEOs to start with). --Timothy Murphy/ Penguin (Non-Classics) 31-12-2002 03-02-2007 David Allen 267 +book 0076092024163 0131422464 us Core J2EE Patterns: Best Practices and Design Strategies, Second Edition Core J2EE Patterns: Best Practices and Design Strategies, Second Edition Hardcover Qualifying Textbooks - Winter 2007 $54.99 $23.99 5 Prentice Hall Ptr 10-05-2003 07-02-2007 1 Deepak Alur/ Dan Malks/ John Crupi 650 +book 9780471202820 0471202827 us Agile Modeling: Effective Practices for Extreme Programming and the Unified Process Agile Modeling: Effective Practices for Extreme Programming and the Unified Process Paperback Object-Oriented Design/ Software Development/ Quality Control/ Computers & Internet/ Qualifying Textbooks - Winter 2007 $34.99 $24.39 3.5 The first book to cover Agile Modeling, a new modeling technique created specifically for XP projects eXtreme Programming (XP) has created a buzz in the software development community-much like Design Patterns did several years ago. Although XP presents a methodology for faster software development, many developers find that XP does not allow for modeling time, which is critical to ensure that a project meets its proposed requirements. They have also found that standard modeling techniques that use the Unified Modeling Language (UML) often do not work with this methodology. In this innovative book, Software Development columnist Scott Ambler presents Agile Modeling (AM)-a technique that he created for modeling XP projects using pieces of the UML and Rational's Unified Process (RUP). Ambler clearly explains AM, and shows readers how to incorporate AM, UML, and RUP into their development projects with the help of numerous case studies integrated throughout the book./ • AM was created by the author for modeling XP projects-an element lacking in the original XP design/ • The XP community and its creator have embraced AM, which should give this book strong market acceptance/ Companion Web site at www.agilemodeling.com features updates, links to XP and AM resources, and ongoing case studies about agile modeling./ John Wiley & Sons 01-02-2001 07-02-2007 Scott W. Ambler/ Ron Jeffries 224 +book 9780262012102 0262012103 us A Semantic Web Primer (Cooperative Information Systems) A Semantic Web Primer (Cooperative Information Systems) Hardcover Web Site Design/ Storage/ Internet $42.00 $25.98 4.5 The development of the Semantic Web, with machine-readable content, has the potential to revolutionize the World Wide Web and its use. A Semantic Web Primer provides an introduction and guide to this emerging field, describing its key ideas, languages, and technologies. Suitable for use as a textbook or for self-study by professionals, it concentrates on undergraduate-level fundamental concepts and techniques that will enable readers to proceed with building applications on their own. It includes exercises, project descriptions, and annotated references to relevant online materials. A Semantic Web Primer is the only available book on the Semantic Web to include a systematic treatment of the different languages (XML, RDF, OWL, and rules) and technologies (explicit metadata, ontologies, and logic and inference) that are central to Semantic Web development. The book also examines such crucial related topics as ontology engineering and application scenarios./ / After an introductory chapter, topics covered in succeeding chapters include XML and related technologies that support semantic interoperability; RDF and RDF Schema, the standard data model for machine-processable semantics; and OWL, the W3C-approved standard for a Web ontology language more extensive than RDF Schema; rules, both monotonic and nonmonotonic, in the framework of the Semantic Web; selected application domains and how the Semantic Web would benefit them; the development of ontology-based systems; and current debates on key issues and predictions for the future. The MIT Press 01-04-2004 07-02-2007 Grigoris Antoniou/ Frank van Harmelen 272 +book 0076092016335 0130674826 us A Practical Guide to eXtreme Programming A Practical Guide to eXtreme Programming Paperback Software Development/ Software Engineering $49.99 $4.38 4 Prentice Hall PTR 08-02-2002 08-02-2007 David Astels/ Granville Miller/ Miroslav Novak 384 +book 9780961454739 0961454733 us Art & Fear Art & Fear Paperback Study & Teaching/ Criticism $12.95 $7.25 4.5 "This is a book about making art. Ordinary art. Ordinary art means something like: all art not made by Mozart. After all, art is rarely made by Mozart-like people; essentially-statistically speaking-there aren't any people like that. Geniuses get made once-a-century or so, yet good art gets made all the time, so to equate the making of art with the workings of genius removes this intimately human activity to a strangely unreachable and unknowable place. For all practical purposes making art can be examined in great detail without ever getting entangled in the very remote problems of genius."
--from the Introduction/ Art & Fear explores the way art gets made, the reasons it often doesn't get made, and the nature of the difficulties that cause so many artists to give up along the way. The book's co-authors, David Bayles and Ted Orland, are themselves both working artists, grappling daily with the problems of making art in the real world. Their insights and observations, drawn from personal experience, provide an incisive view into the world of art as it is expeienced by artmakers themselves./ This is not your typical self-help book. This is a book written by artists, for artists -- it's about what it feels like when artists sit down at their easel or keyboard, in their studio or performance space, trying to do the work they need to do. First published in 1994, Art & Fear quickly became an underground classic. Word-of-mouth response alone-now enhanced by internet posting-has placed it among the best-selling books on artmaking and creativity nationally./ Art & Fear has attracted a remarkably diverse audience, ranging from beginning to accomplished artists in every medium, and including an exceptional concentration among students and teachers. The original Capra Press edition of Art & Fear sold 80,000 copies./ An excerpt:/ Today, more than it was however many years ago, art is hard because you have to keep after it so consistently. On so many different fronts. For so little external reward. Artists become veteran artists only by making peace not just with themselves, but with a huge range of issues. You have to find your work.../ Image Continuum Press 01-04-2001 03-02-2007 David Bayles/ Ted Orland 122 +book 0785342616415 0201616416 us Extreme Programming Explained: Embrace Change Extreme Programming Explained: Embrace Change Paperback Software Development/ Software Engineering $29.95 $2.98 4 Kent Beck's eXtreme Programming eXplained provides an intriguing high-level overview of the author's Extreme Programming (XP) software development methodology. Written for IS managers, project leaders, or programmers, this guide provides a glimpse at the principles behind XP and its potential advantages for small- to mid-size software development teams./ The book intends to describe what XP is, its guiding principles, and how it works. Simply written, the book avoids case studies and concrete details in demonstrating the efficacy of XP. Instead, it demonstrates how XP relies on simplicity, unit testing, programming in pairs, communal ownership of code, and customer input on software to motivate code improvement during the development process. As the author notes, these principles are not new, but when they're combined their synergy fosters a new and arguably better way to build and maintain software. Throughout the book, the author presents and explains these principles, such as "rapid feedback" and "play to win," which form the basis of XP./ Generally speaking, XP changes the way programmers work. The book is good at delineating new roles for programmers and managers who Beck calls "coaches." The most striking characteristic of XP is that programmers work in pairs, and that testing is an intrinsic part of the coding process. In a later section, the author even shows where XP works and where it doesn't and offers suggestions for migrating teams and organizations over to the XP process./ In the afterword, the author recounts the experiences that led him to develop and refine XP, an insightful section that should inspire any organization to adopt XP. This book serves as a useful introduction to the philosophy and practice of XP for the manager or programmer who wants a potentially better way to build software. --Richard Dragan/ Topics covered: Extreme Programming (XP) software methodology, principles, XP team roles, facilities design, testing, refactoring, the XP software lifecycle, and adopting XP./ Addison-Wesley Professional 05-10-1999 07-02-2007 Kent Beck 224 +book 0785342146530 0321146530 us Test Driven Development: By Example (Addison-Wesley Signature Series) Test Driven Development: By Example (Addison-Wesley Signature Series) Paperback Software Development/ Testing/ Software Engineering/ Qualifying Textbooks - Winter 2007 $44.99 $28.75 4 Addison-Wesley Professional 08-11-2002 07-02-2007 Kent Beck 240 +book 9780743245517 0743245512 us A Tooth from the Tiger's Mouth: How to Treat Your Injuries with Powerful Healing Secrets of the Great Chinese Warrior (Fireside Books (Fireside)) A Tooth from the Tiger's Mouth: How to Treat Your Injuries with Powerful Healing Secrets of the Great Chinese Warrior (Fireside Books (Fireside)) Paperback Healing/ Herbal Remedies/ Chinese Medicine $14.00 $8.25 5 A renowned expert in Chinese sports medicine and martial arts reveals ancient Eastern secrets for healing common injuries, including sprains, bruises, deep cuts, and much more./ For centuries, Chinese martial arts masters have kept their highly prized remedies as carefully guarded secrets, calling such precious and powerful knowledge "a tooth from the tiger's mouth." Now, for the first time, these deeply effective methods are revealed to Westerners who want alternative ways to treat the acute and chronic injuries experienced by any active person./ While many books outline the popular teachings of traditional Chinese medicine, only this one offers step-by-step instructions for treating injuries. Expert practitioner and martial artist Tom Bisio explains the complete range of healing strategies and provides a Chinese first-aid kit to help the reader fully recover from every mishap: cuts, sprains, breaks, dislocations, bruises, muscle tears, tendonitis, and much more./ He teaches readers how to:/ / • Examine and diagnose injuries/ • / • Prepare and apply herbal formulas/ • / • Assemble a portable kit for emergencies/ • / • Fully recuperate with strengthening exercises and healing dietary advice/ Comprehensive and easy to follow, with drawings to illustrate both the treatment strategies and the strengthening exercises, this unique guidebook will give readers complete access to the powerful healing secrets of the great Chinese warriors./ Fireside 05-10-2004 06-02-2007 Tom Bisio 384 +book 9781932394696 1932394699 us Ruby for Rails: Ruby Techniques for Rails Developers Ruby for Rails: Ruby Techniques for Rails Developers Paperback/ Illustrated Web Site Design/ Object-Oriented Design/ Transportation & Highway $44.95 $24.50 4 -The word is out: with Ruby on Rails you can build powerful Web applications easily and quickly! And just like the Rails framework itself, Rails applications are Ruby programs. That means you can't tap into the full power of Rails unless you master the Ruby language./ Ruby for Rails, written by Ruby expert David Black (with a forward by David Heinemeier Hansson), helps Rails developers achieve Ruby mastery. Each chapter deepens your Ruby knowledge and shows you how it connects to Rails. You'll gain confidence working with objects and classes and learn how to leverage Ruby's elegant, expressive syntax for Rails application power. And you'll become a better Rails developer through a deep understanding of the design of Rails itself and how to take advantage of it./ Newcomers to Ruby will find a Rails-oriented Ruby introduction that's easy to read and that includes dynamic programming techniques, an exploration of Ruby objects, classes, and data structures, and many neat examples of Ruby and Rails code in action. Ruby for Rails: the Ruby guide for Rails developers!/ What's Inside/ Classes, modules, and objects/ Collection handling and filtering/ String and regular expression manipulation/ Exploration of the Rails source code/ Ruby dynamics/ Many more programming concepts and techniques!/ Manning Publications 11-05-2006 07-02-2007 David Black 532 +book 0021898130853 0020130856 us The Elements of Technical Writing (Elements of Series) The Elements of Technical Writing (Elements of Series) Paperback Writing Skills/ General & Reference/ Technical $9.95 $4.49 3.5 Longman 19-12-2000 11-02-2007 Gary Blake/ Robert W. Bly 192 +book 0785342310054 0201310058 us Effective Java Programming Language Guide Effective Java Programming Language Guide Paperback Qualifying Textbooks - Winter 2007 $49.99 $29.95 5 Written for the working Java developer, Joshua Bloch's Effective Java Programming Language Guide provides a truly useful set of over 50 best practices and tips for writing better Java code. With plenty of advice from an indisputable expert in the field, this title is sure to be an indispensable resource for anyone who wants to get more out of their code./ As a veteran developer at Sun, the author shares his considerable insight into the design choices made over the years in Sun's own Java libraries (which the author acknowledges haven't always been perfect). Based on his experience working with Sun's best minds, the author provides a compilation of 57 tips for better Java code organized by category. Many of these ideas will let you write more robust classes that better cooperate with built-in Java APIs. Many of the tips make use of software patterns and demonstrate an up-to-the-minute sense of what works best in today's design. Each tip is clearly introduced and explained with code snippets used to demonstrate each programming principle./ Early sections on creating and destroying objects show you ways to make better use of resources, including how to avoid duplicate objects. Next comes an absolutely indispensable guide to implementing "required" methods for custom classes. This material will help you write new classes that cooperate with old ones (with advice on implementing essential requirements like the equals() and hashCode() methods)./ The author has a lot to say about class design, whether using inheritance or composition. Tips on designing methods show you how to create understandable, maintainable, and robust classes that can be easily reused by others on your team. Sections on mapping C code (like structures, unions, and enumerated types) onto Java will help C programmers bring their existing skills to Sun's new language. Later sections delve into some general programming tips, like using exceptions effectively. The book closes with advice on using threads and synchronization techniques, plus some worthwhile advice on object serialization./ Whatever your level of Java knowledge, this title can make you a more effective programmer. Wisely written, yet never pompous or doctrinaire, the author has succeeded in packaging some really valuable nuggets of advice into a concise and very accessible guidebook that arguably deserves a place on most any developer's bookshelf. --Richard Dragan/ Topics covered:/ • Best practices and tips for Java/ • Creating and destroying objects (static factory methods, singletons, avoiding duplicate objects and finalizers)/ • Required methods for custom classes (overriding equals(), hashCode(), toString(), clone(), and compareTo() properly)/ • Hints for class and interface design (minimizing class and member accessibility, immutability, composition versus inheritance, interfaces versus abstract classes, preventing subclassing, static versus nonstatic classes)/ • C constructs in Java (structures, unions, enumerated types, and function pointers in Java)/ • Tips for designing methods (parameter validation, defensive copies, method signatures, method overloading, zero-length arrays, hints for Javadoc comments)/ • General programming advice (local variable scope, using Java API libraries, avoiding float and double for exact comparisons, when to avoid strings, string concatenation, interfaces and reflection, avoid native methods, optimizing hints, naming conventions)/ • Programming with exceptions (checked versus run-time exceptions, standard exceptions, documenting exceptions, failure-capture information, failure atomicity)/ • Threading and multitasking (synchronization and scheduling hints, thread safety, avoiding thread groups)/ • Serialization (when to implement Serializable, the readObject(), and readResolve() methods)/ Prentice Hall PTR 05-06-2001 07-02-2007 Joshua Bloch 252 +book 9780596000523 0596000529 us Creating Applications with Mozilla Creating Applications with Mozilla Paperback/ Illustrated Web Browsers/ Web Programming $39.95 $3.97 2.5 Mozilla is not just a browser. Mozilla is also a framework that allows developers to create cross-platform applications. This framework is made up of JavaScript, CSS (Cascading Style Sheets), and Mozilla's XUL (XML-based User-interface Language) as well as the Gecko rendering engine, XBL (eXtensible Binding Language), XPCOM (Mozilla's component model), and several other components. Creating Applications with Mozilla explains how applications are created with Mozilla and provides step-by-step information about how you can create your own programs using Mozilla's powerful cross-platform development framework. This book also shows examples of many different types of existing applications to demonstrate some of the possibilities of Mozilla application development. One of Mozilla's biggest advantages for a developer is that Mozilla-based applications are cross-platform, meaning programs work the same on Windows as they do on Linux or the Mac OS. Working through the book, you are introduced to the Mozilla development environment and after installing Mozilla, you quickly learn to create simple applications. After the initial satisfaction of developing your own portable applications, the book branches into topics on modular development and packaging your application. In order to build more complex applications, coverage of XUL, JavaScript, and CSS allow you to discover how to customize and build out your application shell. The second half of the book explores more advanced topics including UI enhancement, localization, and remote distribution. Mozilla 1.0 was released on June 5th, 2002, after more than four years of development as an open source project. This book has been written so that all of the information and examples will work with this release and any of the 1.0.x maintenance releases. In addition to Netscape's Mozilla-based browsers (Netscape 6.x and 7.x), the Mozilla framework has been used to create other browsers such as Galeon and Chimera, and chat clients such as ChatZilla and JabberZilla. Developers have also used Mozilla to create games, development tools, browser enhancements, as well as all sorts of other types of applications. O'Reilly Media 09-02-2002 07-02-2007 David Boswell/ Brian King/ Ian Oeschger/ Pete Collins/ Eric Murphy 480 +book 0031869008357 1558538356 us Life's Little Instruction Book 511 Suggestions, Observations, And Reminders On How To Live A Happy And Rewarding Life Life's Little Instruction Book 511 Suggestions, Observations, And Reminders On How To Live A Happy And Rewarding Life Paperback Ethics & Morality/ New Age/ Gifts/ Collections & Readers/ Spiritual $6.99 $1.70 5 H. Jackson Brown, Jr. originally wrote Life's Little Instruction Book™ as a gift for his son who was leaving home to begin his freshman year in college. Brown says, "I read years ago that it was not the responsibility of parents to pave the road for their children but to provide a road map, and I wanted to provide him with what I had learned about living a happy and rewarding life."Life's Little Instruction Book™ is a guidebook that gently points the way to happiness and fulfillment. The observations are direct, simple, and as practical as an umbrella./ "But it's not just for young people," says Brown. "Most of us already know how to live a successful and purposeful life. We know we should be more understanding and thoughtful, more responsible, courageous and appreciative. It's just that we sometimes need reminding."Life's Little Instruction Book™ is that reminder, as well as the perfect gift for a relative or a friend who needs encouragement at any time of the year./ • Never give up on anybody. Miracles happen every day./ • Be brave. Even if you're not, pretend to be. No one can tell the difference./ • Think big thoughts, but relish small pleasures./ • Learn to listen. Opportunity sometimes knocks softly./ • Never deprive someone of hope; it might be all they have./ • Be kinder than necessary./ • Become the most positive and enthusiastic person you know./ • Commit yourself to constant self-improvement./ • Don't major in minor things./ • Never cut what can be untied./ Since its debut in 1991, Life's Little Instruction Book™ has revolutionized the publishing industry. This little plaid book, which has been embraced the world over, has sold more than nine million copies, spent more than two years atop the New York Times bestseller list, and has been translated into 33 languages. Though originally written as a gift from a father to a son, its simple message has been enjoyed by men and women of all ages around the world./ Rutledge Hill Press 29-09-2000 08-02-2007 H. Jackson Brown +book 0037038174434 1580174434 us The Qigong Year The Qigong Year Hardcover Meditation/ Mental & Spiritual Healing/ Chinese Medicine/ Energy Healing/ Tai Chi & Qi Gong $12.95 $0.98 3 Closely related to the popular Chinese martial art Tai Chi, Qigong (pronounced "chee gong") is an ancient self-healing art that combines movement and meditation in holistic workouts that simultaneously develop body and spirit, promoting overall health and vitality./ Practiced by millions of Chinese for thousands of years, Qigong is now gaining popularity throughout the world. The program of exercises, movements, breathing techniques, and visualizations in The Qigong Year is specially designed to mirror the flow of the seasons. The exercises are illustrated with instructive line drawings, and the book features elegant duotones and patterned art accented with gold metallic ink./ Combining vigorous and gentle movement, Qigong exercises help strengthen the body, improve posture, align the spine, and relax shoulder and neck muscles. Three variations of each exercise - seated, standing, and advanced - are described, enabling people of all ages and abilities to easily practice and enjoy the benefits of Qigong./ Qigong meditation can help stabilize moods - reducing anger, anxiety, and depression - and improve outlook and self-confidence, making it easier to cope with life's challenges. The powerful affirmation technique, based on repeating positive statements that relate to personal problems or desires, can be practiced anywhere - while driving to work, taking a shower, or doing housework. Includes examples of effective affirmations, as well as guidelines for generating personalized ones to target individual health, work, and relationship challenges./ / Storey Publishing, LLC 15-05-2002 06-02-2007 Michael Bruney 256 +book 9780385494717 0385494718 us The Hidden Connections: Integrating The Biological, Cognitive, And Social Dimensions Of Life Into A Science Of Sustainability The Hidden Connections: Integrating The Biological, Cognitive, And Social Dimensions Of Life Into A Science Of Sustainability Hardcover Modern/ History $24.95 $2.00 4.5 The author of the bestselling The Tao of Physics and The Web of Life explores the profound social implications of emerging scientific principles and provides an innovative framework for using them to understand and solve some of the most important issues of our time./ / For most of history, scientific investigation was based on linear thinking. But the 1980's brought a revolutionary change. With the advent of improved computer power, scientists could apply complexity theory--nonlinear thinking--to scientific processes far more easily than ever before. Physicist Fritjof Capra was at the forefront of the revolution, and in The Web of Life he extended its scope by showing the impact of complexity theory on living organisms. In The Hidden Connections he breaks through another frontier, this time applying the principles of complexity theory to an analysis of the broad sphere of all human interactions./ / Capra posits that in order to sustain life in the future, the principles underlying our social institutions must be consistent with the organization that nature has evolved to sustain the "web of life." In a lucid and convincing argument, Capra explains how the theoretical ideas of science can be applied to the practical concerns of our time. Covering every aspect of human nature and society, he discusses such vital matters as the management of human organizations, the challenges and dangers of economic globalization, and the nature and the problems of biotechnology. He concludes with an authoritative, often provocative plan for designing ecologically sustainable communities and technologies as alternatives to the current economic globalization./ / A brilliant, incisive examination of the relationship between science and our social systems, The Hidden Connections will spark enormous debate in the scientific community and inspire us to think about the future of humanity in a new way. Doubleday 20-08-2002 11-02-2007 Fritjof Capra 320 +book 9781570625190 1570625190 us The Tao of Physics The Tao of Physics Paperback Taoism $16.95 $7.95 4 First published in 1975, The Tao of Physics rode the wave of fascination in exotic East Asian philosophies. Decades later, it still stands up to scrutiny, explicating not only Eastern philosophies but also how modern physics forces us into conceptions that have remarkable parallels. Covering over 3,000 years of widely divergent traditions across Asia, Capra can't help but blur lines in his generalizations. But the big picture is enough to see the value in them of experiential knowledge, the limits of objectivity, the absence of foundational matter, the interrelation of all things and events, and the fact that process is primary, not things. Capra finds the same notions in modern physics. Those approaching Eastern thought from a background of Western science will find reliable introductions here to Hinduism, Buddhism, and Taoism and learn how commonalities among these systems of thought can offer a sort of philosophical underpinning for modern science. And those approaching modern physics from a background in Eastern mysticism will find precise yet comprehensible descriptions of a Western science that may reinvigorate a hope in the positive potential of scientific knowledge. Whatever your background, The Tao of Physics is a brilliant essay on the meeting of East and West, and on the invaluable possibilities that such a union promises. --Brian Bruya Shambhala 04-01-2000 11-02-2007 Fritjof Capra 366 +book 9780553346107 0553346105 us Uncommon Wisdom Uncommon Wisdom Paperback Modern $27.00 $0.20 5 Bantam 01-01-1989 11-02-2007 Fritjof Capra 334 +book 9780385476751 0385476752 us The Web of Life The Web of Life Hardcover Chaos & Systems/ Acoustics & Sound/ System Theory/ Ecology $23.95 $5.49 4 The vitality and accessibility of Fritjof Capra's ideas have made him perhaps the most eloquent spokesperson of the latest findings emerging at the frontiers of scientific, social, and philosophical thought. In his international bestsellers The Tao of Physics and The Turning Point, he juxtaposed physics and mysticism to define a new vision of reality. In The Web of Life, Capra takes yet another giant step, setting forth a new scientific language to describe interrelationships and interdependence of psychological, biological, physical, social, and cultural phenomena--the "web of life."/ / / / During the past twenty-five years, scientists have challenged conventional views of evolution and the organization of living systems and have developed new theories with revolutionary philosophical and social implications. Fritjof Capra has been at the forefront of this revolution. In The Web of Life, Capra offers a brilliant synthesis of such recent scientific breakthroughs as the theory of complexity, Gaia theory, chaos theory, and other explanations of the properties of organisms, social systems, and ecosystems. Capra's surprising findings stand in stark contrast to accepted paradigms of mechanism and Darwinism and provide an extraordinary new foundation for ecological policies that will allow us to build and sustain communities without diminishing the opportunities for future generations./ / / / Now available in paperback for the first time, The Web of Life is cutting-edge science writing in the tradition of James Gleick's Chaos, Gregory Bateson's Mind and Matter, and Ilya Prigogine's Order Out of Chaos./ / / From the Trade Paperback edition. DoubleDay 01-09-1996 11-02-2007 Fritjof Capra 368 +book 9780880222785 0880222786 us DBase III Plus Advanced Programming DBase III Plus Advanced Programming Paperback dBASE $22.95 $1.85 Que Corporation,U.S. 02-02-1987 07-02-2007 Joseph-David Carrabis 300 +book 9780810807303 0810807300 us Building library collections, Building library collections, Unknown Binding Library Management $0.99 Scarecrow Press 08-02-1974 08-02-2007 Mary Duncan Carter 415 +book 9780894070174 0894070177 us The Book of Internal Exercises The Book of Internal Exercises Hardcover $12.95 $0.01 4 Strawberry Hill Pr 12-02-1978 11-02-2007 Stephen Thomas Chang 138 +book 9781416915546 1416915540 us The I Chong: Meditations from the Joint The I Chong: Meditations from the Joint Hardcover Entertainers/ Criminals $23.95 $9.00 4.5 / Beloved stoner comedian TOMMY CHONG is now older, wiser, and officially an EX-CON./ / / On the morning of February 24, 2003, agents of the U.S. Drug Enforcement Administration launched a sting called Operation Pipe Dreams and forced themselves through the door of Tommy's California home, with automatic weapons drawn. As a result of the raid on his home; the simultaneous ransacking of his son's company, Chong Glass; and the Bush administration's determination to make an example out of the "Pope of Pot;" he was sentenced to nine months in prison because his company shipped bongs to a head shop in Pennsylvania that was a front for the DEA./ / / Well . . . now it's Tommy Chong's turn to fight back and tell his side of the story./ / / Beginning with Tommy's experiences growing up in Canada in the forties and fifties as a mixed-race kid and going on to become a comedy legend, The I Chong is at once a memoir, a spiritual exploration of his time in prison, and a political indictment of the eroding civil liberties in post-9//11 American society. He tells the unbelievable story of his trip down the rabbit hole of America's war on drugs and of his experiences in the federal prison system, and he offers up timely observations on combating the conservative political forces at work in this country. Introspective, inspiring, and incendiary, The I Chong is a unique chronicle of one man's life and how his humorous and spiritual point of view saved him during his wrongful incarceration at the hands of an administration without boundaries./ Simon Spotlight Entertainment 08-08-2006 10-02-2007 Tommy Chong 224 +book 9780974514031 0974514039 us Pragmatic Project Automation: How to Build, Deploy, and Monitor Java Apps Pragmatic Project Automation: How to Build, Deploy, and Monitor Java Apps Paperback/ Illustrated Software Development/ Software Project Management $29.95 $15.65 4.5 Forget wizards, you need a slave--someone to do your repetitive, tedious and boring tasks, without complaint and without pay, so you'll have more time to design and write exciting code. Indeed, that's what computers are for. You can enlist your own computer to automate all of your project's repetitive tasks, ranging from individual builds and running unit tests through to full product release, customer deployment, and monitoring the system. Many teams try to do these tasks by hand. That's usually a really bad idea: people just aren't as good at repetitive tasks as machines. You run the risk of doing it differently the one time it matters, on one machine but not another, or doing it just plain wrong. But the computer can do these tasks for you the same way, time after time, without bothering you. You can transform these labor-intensive, boring and potentially risky chores into automatic, background processes that just work. In this eagerly anticipated book, you'll find a variety of popular, open-source tools to help automate your project. With this book, you will learn:/ • How to make your build processes accurate, reliable, fast, and easy./ • How to build complex systems at the touch of a button./ • How to build, test, and release software automatically, with no human intervention./ • Technologies and tools available for automation: which to use and when./ • Tricks and tips from the masters (do you know how to have your cell phone tell you that your build just failed?)/ You'll find easy-to-implement recipes to automate your Java project, using the same popular style as the rest of our Jolt Productivity Award-winning Starter Kit books. Armed with plenty of examples and concrete, pragmatic advice, you'll find it's easy to get started and reap the benefits of modern software development. You can begin to enjoy pragmatic, automatic, unattended software production that's reliable and accurate every time. The Pragmatic Programmers 08-02-2004 07-02-2007 Mike Clark 176 +book 9780912381039 0912381035 us Traditional Acupuncture: The Law of the Five Elements Traditional Acupuncture: The Law of the Five Elements Paperback Acupuncture & Acupressure/ Massage/ Pharmacology/ Chinese Medicine $16.00 $11.29 4 Traditional Acupuncture Institute, Incorporat 10-02-1994 11-02-2007 Dianne M. Connelly 192 +book 9780262032933 0262032937 us Introduction to Algorithms, Second Edition Introduction to Algorithms, Second Edition Hardcover Beginner's Guides/ Information Systems/ Qualifying Textbooks - Winter 2007 $82.00 $56.99 4 Aimed at any serious programmer or computer science student, the new second edition of Introduction to Algorithms builds on the tradition of the original with a truly magisterial guide to the world of algorithms. Clearly presented, mathematically rigorous, and yet approachable even for the math-averse, this title sets a high standard for a textbook and reference to the best algorithms for solving a wide range of computing problems./ With sample problems and mathematical proofs demonstrating the correctness of each algorithm, this book is ideal as a textbook for classroom study, but its reach doesn't end there. The authors do a fine job of explaining each algorithm. (Reference sections on basic mathematical notation will help readers bridge the gap, but it will help to have some math background to appreciate the full achievement of this handsome hardcover volume.) Every algorithm is presented in pseudo-code, which can be implemented in any computer language, including C//C++ and Java. This ecumenical approach is one of the book's strengths. When it comes to sorting and common data structures, from basic linked lists to trees (including binary trees, red-black, and B-trees), this title really shines, with clear diagrams that show algorithms in operation. Even if you just glance over the mathematical notation here, you can definitely benefit from this text in other ways./ The book moves forward with more advanced algorithms that implement strategies for solving more complicated problems (including dynamic programming techniques, greedy algorithms, and amortized analysis). Algorithms for graphing problems (used in such real-world business problems as optimizing flight schedules or flow through pipelines) come next. In each case, the authors provide the best from current research in each topic, along with sample solutions./ This text closes with a grab bag of useful algorithms including matrix operations and linear programming, evaluating polynomials, and the well-known Fast Fourier Transformation (FFT) (useful in signal processing and engineering). Final sections on "NP-complete" problems, like the well-known traveling salesman problem, show off that while not all problems have a demonstrably final and best answer, algorithms that generate acceptable approximate solutions can still be used to generate useful, real-world answers./ Throughout this text, the authors anchor their discussion of algorithms with current examples drawn from molecular biology (like the Human Genome Project), business, and engineering. Each section ends with short discussions of related historical material, often discussing original research in each area of algorithms. On the whole, they argue successfully that algorithms are a "technology" just like hardware and software that can be used to write better software that does more, with better performance. Along with classic books on algorithms (like Donald Knuth's three-volume set, The Art of Computer Programming), this title sets a new standard for compiling the best research in algorithms. For any experienced developer, regardless of their chosen language, this text deserves a close look for extending the range and performance of real-world software. --Richard Dragan/ Topics covered: Overview of algorithms (including algorithms as a technology); designing and analyzing algorithms; asymptotic notation; recurrences and recursion; probabilistic analysis and randomized algorithms; heapsort algorithms; priority queues; quicksort algorithms; linear time sorting (including radix and bucket sort); medians and order statistics (including minimum and maximum); introduction to data structures (stacks, queues, linked lists, and rooted trees); hash tables (including hash functions); binary search trees; red-black trees; augmenting data structures for custom applications; dynamic programming explained (including assembly-line scheduling, matrix-chain multiplication, and optimal binary search trees); greedy algorithms (including Huffman codes and task-scheduling problems); amortized analysis (the accounting and potential methods); advanced data structures (including B-trees, binomial and Fibonacci heaps, representing disjoint sets in data structures); graph algorithms (representing graphs, minimum spanning trees, single-source shortest paths, all-pairs shortest paths, and maximum flow algorithms); sorting networks; matrix operations; linear programming (standard and slack forms); polynomials and the Fast Fourier Transformation (FFT); number theoretic algorithms (including greatest common divisor, modular arithmetic, the Chinese remainder theorem, RSA public-key encryption, primality testing, integer factorization); string matching; computational geometry (including finding the convex hull); NP-completeness (including sample real-world NP-complete problems and their insolvability); approximation algorithms for NP-complete problems (including the traveling salesman problem); reference sections for summations and other mathematical notation, sets, relations, functions, graphs and trees, as well as counting and probability backgrounder (plus geometric and binomial distributions)./ The MIT Press 01-09-2001 07-02-2007 Thomas H. Cormen/ Charles E. Leiserson/ Ronald L. Rivest/ Clifford Stein 1184 +book 9781932394610 1932394613 us Ajax in Action Ajax in Action Paperback/ Illustrated HTML - General/ Internet/ Qualifying Textbooks - Winter 2007 $44.95 $17.97 4 Val's Blog "A tremendously useful field guide specifically written for developers down in the trenches...waiting for the killer solution..."/ Web users are getting tired of the traditional web experience. They get frustrated losing their scroll position; they get annoyed waiting for refresh; they struggle to reorient themselves on every new page. And the list goes on. With asynchronous JavaScript and XML, known as "Ajax," you can give them a better experience. Once users have experienced an Ajax interface, they hate to go back. Ajax is new way of thinking that can result in a flowing and intuitive interaction with the user./ Ajax in Action helps you implement that thinking--it explains how to distribute the application between the client and the server (hint: use a "nested MVC" design) while retaining the integrity of the system. You will learn how to ensure your app is flexible and maintainable, and how good, structured design can help avoid problems like browser incompatibilities. Along the way it helps you unlearn many old coding habits. Above all, it opens your mind to the many advantages gained by placing much of the processing in the browser. If you are a web developer who has prior experience with web technologies, this book is for you./ Manning Publications 01-10-2005 07-02-2007 Dave Crane/ Eric Pascarello/ Darren James 650 +book 9780802065193 0802065198 us Design with Type Design with Type Paperback Typography/ Manufacturing/ General & Reference/ Qualifying Textbooks - Winter 2007 $23.95 $20.00 5 Design with Type takes the reader through a study of typography that starts with the individual letter and proceeds through the word, the line, and the mass of text. The contrasts possible with type are treated in detail, along with their applications to the typography ofbooks, advertising, magazines, and information data. The various contending schools oftypography are discussed, copiously illustrated with the author's selection of over 150 examples of imaginative typography from many parts ot the world./ Design with Type differs from all other books on typography in that it discusses type as a design material as well as a means of communication: the premise is that if type is understood in terms of design, the user of type will be better able to work with it to achieve maximum legibility and effectiveness, as well as aesthetic pleasure. Everyone who uses type, everyone who enjoys the appearance of the printed word, will find Design with Type informative and fascinating. It provides, too, an outstanding example of the effectiveness of imaginative and tasteful typographic design./ University of Toronto Press 14-06-2000 11-02-2007 Carl Dair 162 +book 0676251833904 0804833907 us Making Out in Chinese (Making Out (Tuttle)) Making Out in Chinese (Making Out (Tuttle)) Paperback Chinese/ Phrasebooks - General $7.95 $4.00 3.5 Tuttle Publishing 11-02-2003 11-02-2007 Ray Daniels 96 +book 9780062502230 0062502239 us 365 Tao: Daily Meditations 365 Tao: Daily Meditations Paperback Taoism/ New Age/ Prayerbooks/ Meditations $15.95 $2.99 5 Umbrella, light, landscape, sky— 
There is no language of the holy. 
The sacred lies in the ordinary./ This treasury of life-enhancing daily readings turns a wise Taoist light on every facet of life. Each daily entry with a one-word title and its Chinese character in elegant calligraphy./ A brief, poetic aphorism provides the theme, followed by a clear, insightful mediation on the day's Taoist principle./ HarperSanFrancisco 17-07-1992 08-02-2007 Ming-Dao Deng 400 +book 9780460874113 046087411X us A Discourse on Method (Everyman's Library (Paper)) A Discourse on Method (Everyman's Library (Paper)) Paperback Classics/ History, 17th & 18th Century/ Modern/ Methodology & Statistics/ Applied/ Geometry & Topology/ Meteorology/ Experiments, Instruments & Measurement $7.95 $3.95 4.5 By calling everything into doubt, Descartes laid the foundations of modern philosophy. He deduced that human beings consist of minds and bodies; that these are totally distinct "substances"; that God exists and that He ensures we can trust the evidence of our senses. Ushering in the "scientific revolution" of Galileo and Newton, Descartes' ideas swept aside ancient and medieval traditions of philosophical methods and investigation. Tuttle Publishing 11-02-2007 Rene Descartes 300 +book 9780140286786 0140286780 us Your Money or Your Life: Transforming Your Relationship with Money and Achieving Financial Independence Your Money or Your Life: Transforming Your Relationship with Money and Achieving Financial Independence Paperback Public Finance/ Financial Planning/ Money Management/ Contemporary $15.00 $5.00 4.5 There's a big difference between "making a living" and making a life. Do you spend more than you earn? Does making a living feel more like making a dying? Do you dislike your job but can't afford to leave it? Is money fragmenting your time, your relationships with family and friends? If so, Your Money or Your Life is for you./ From this inspiring book, learn how to/ • get out of debt and develop savings/ • reorder material priorities and live well for less/ • resolve inner conflicts between values and lifestyles/ • convert problems into opportunities to learn new skills/ • attain a wholeness of livelihood and lifestyle/ • save the planet while saving money/ • and much more/ Penguin (Non-Classics) 01-09-1999 11-02-2007 Joe Dominguez/ Vicki Robin 400 +book 9780262541480 0262541483 us The Scheme Programming Language, 3rd Edition The Scheme Programming Language, 3rd Edition Paperback Qualifying Textbooks - Winter 2007 $37.00 $22.98 5 This thoroughly updated edition of The Scheme Programming Language provides an introduction to Scheme and a definitive reference for standard Scheme, presented in a clear and concise manner. Written for professionals and students with some prior programming experience, it begins by leading the programmer gently through the basics of Scheme and continues with an introduction to some of the more advanced features of the language. Many exercises are presented to help reinforce the lessons learned, and answers to the exercises are given in a new appendix. Most of the remaining chapters are dedicated to the reference material, which describes in detail the standard features of Scheme included in the Revised Report on Scheme and the ANSI//IEEE standard for Scheme./ / Numerous examples are presented throughout the introductory and reference portions of the text, and a unique set of extended example programs and applications, with additional exercises, are presented in the final chapter. Reinforcing the book's utility as a reference text are appendixes that present the formal syntax of Scheme, a summary of standard forms and procedures, and a bibliography of Scheme resources. The Scheme Programming Language stands alone as an introduction to and essential reference for Scheme programmers. It is also useful as a supplementary text for any course that uses Scheme./ / The Scheme Programming Language is illustrated by artist Jean-Pierre Hebert, who writes Scheme programs to extend his ability to create sophisticated works of digital art. The MIT Press 01-10-2003 08-02-2007 R. Kent Dybvig 329 +book 9780874775136 0874775132 us Drawing on the right side of the Brain Drawing on the right side of the Brain Paperback Reference/ Study & Teaching/ Creativity/ Drawing $15.95 $1.08 4.5 Tarcher 01-05-1989 11-02-2007 Betty Edwards 254 +book 9780912111193 0912111194 us Grasping the Wind (Paradigm Title) Grasping the Wind (Paradigm Title) Paperback Acupuncture & Acupressure/ Reference/ Pharmacology $28.95 $19.75 4 Point names, the traditional means for identifying acupoints, have meanings that are, like the wind, hard to grasp. Yet enfolded in these often poetic words is a utility that involves the complex associations derived from the evolution of the Chinese language and the vast array of therapeutic nalogies found in traditional medical works./ In discussing the point names, the authors examine the meaning, context, and significance of each acupuncture point to promote understanding of the point's use in acupuncture practice. Guidelines for understanding the nature and structure of the Chinese language are offered, along with discussions of the Chinese rationale for naming points and point groupings. The reasoning for selecting the English names is offered so that readers may adapt the names for their own use. Each of the 363 points covered is listed according to the system currently used in China. Descriptions include the name in Chinese characters, in Pinyin, and in English. The classical location according to major Chinese texts, the associated point groupings, an explanation of point functions, and classical energetic associations are also noted. Further detail is provided by inclusion of channel relationships, five-phase relationships, and qi functions. Additional notes detail linguistic and practical considerations that have accrued to the point over time. Alternate names for the point are given, again in Chinese, Pinyin, and English. Indexes provide stroke order listings, point group names, and point lists for each character. A glossary of all the characters used in point names provides a definition for each Chinese character. This book provides much valuable theoretical and therapeutic information./ Paradigm Publications (MA) 06-02-1989 11-02-2007 Andrew Ellis/ Nigel Wiseman/ Ken Boss 462 +book 9780064539142 0064539148 us Anatomy Coloring Book Anatomy Coloring Book Paperback Anatomy/ Fundamentals & Skills $15.00 $0.01 4.5 This unique learning tool teaches anatomical concepts and illustrates all the structures and systems of the body through coloring exercises, an effective teaching device that also aids in the retention of the material. 163 detailed illustrations are organized according to body system and a color-key system links terminology to illustrations, reinforcing learning and impressing upon students the visual details of anatomy. Harpercollins Publisher 01-02-1997 06-02-2007 Lawrence Elson 142 +book 0076092021193 0321136497 us Servlets and JSP: The J2EE Web Tier Servlets and JSP: The J2EE Web Tier Paperback Web Site Design/ Internet/ Servlets/ Qualifying Textbooks - Winter 2007 $54.99 $11.99 3.5 Addison-Wesley Professional 29-08-2003 07-02-2007 Jayson Falkner/ Kevin R Jones 784 +book 9781932394443 1932394443 us IntelliJ IDEA in Action (In Action series) IntelliJ IDEA in Action (In Action series) Paperback/ Illustrated Software Development $44.95 $22.29 4.5 This book will help developers dig a little deeper into IDEA and embrace its streamlining features which allow for more time to be spent on project design rather than code management. Without some educational investment, however, IDEA can be just another editor. That then, is the purpose of this book. To not only get you up and running quickly, but to teach you how to use IDEA's powerful software development tools to their fullest advantage. Important product features, including the debugger, source code control, and the many code generation tools, are carefully explained and accompanied by tips and tricks that will leave even experienced IDEA users with "Eureka!" moments of informed programming. Coders just graduating from NOTEPAD and Java IDE veterans alike will profit from the powerful and timesaving expertise provided in this essential programmer's resource./ IDEA is a next-generation IDE for Java, an Integrated Development Environment. As the term IDE implies, IDEA integrates or combines all of the tools needed to develop Java software into a single application and interface. In other words, IDEA is a tool that helps develop Java applications more quickly, easily, and intelligently. IDEA can help with every phase of a project, from design and development to testing and deployment. This book is based on the IntelliJ IDEA Java development environment software from JetBrains, version 5.0./ Manning Publications 01-03-2006 07-02-2007 Duane K. Fields/ Stephen Saunders/ Eugene Belayev 450 +book 0636920924876 1565924878 us Java in a Nutshell : A Desktop Quick Reference (Java Series) Java in a Nutshell : A Desktop Quick Reference (Java Series) (3rd Edition) 3rd Paperback Reference/ Networking/ Object-Oriented Design/ Nutshell/ Java $29.95 $0.01 4 The 3rd edition of the well-known reference, Java in a Nutshell, covers the essential APIs of Java 1.2, including networking, security, input and output, and basic language and utility classes. Due to the size of the Java 1.2 API, graphics and graphical user interface classes are now examined in a volume called Java Foundation Classes in a Nutshell, and server-side and enterprise programming are detailed in Java Enterprise in a Nutshell./ Though primarily a reference, the book starts off with a thorough, fast-paced introduction to Java, exploring all the key topics, including syntax, object-oriented programming, security, beans, and tools. These discussions are brief and very information-dense, and if you are buying this book to learn the language, you will probably be overwhelmed by the speed of this initiation./ This book intends to document quite a bit of Java, and it easily succeeds with broad coverage of Java programming in Part I, and API cataloging broken down by package in Part II. For example, discussions in Part I explain Types, Reflection, and Dynamic Loading. The handling of these topics takes a little over a page, but the book gives a useful overview with code examples that clearly illustrate the points made. It is one of the clearest and most concise treatments of these three topics available./ The chapters in Part II include an introduction, diagrams, and sections for each class in the package. The sections for each class can be very informative, as in the discussion of the Socket class in the java.net chapter, which includes how to instantiate a Socket object, getting I//O streams from the object you instantiated, and how to alter the behavior of sockets. This discussion, like most in this book, is brief, clear, and to the point./ If you are looking for a Java reference, this is a solid volume that will provide lasting value. --John Keogh/ Topics covered: Part I, "Introducing Java," provides broad coverage of Java programming topics, including data types, syntax, classes, and objects; Java file structure; inner classes; interfaces; packages; creating and initializing objects; destroying and finalizing objects; input//output; cryptography; networking; security; threads; JavaBeans; JavaDoc; and tools that come with Java 1.2 SDK./ Part II, "The Java API Quick Reference," includes chapters on the following Java packages: java.beans, java.beans.beancontext, java.io, java.lang, java.lang.ref, java.lang.reflect, java.math, java.net, java.security, java.security.acl, java.security.cert, java.security.interfaces, java.security.spec, java.text, java.util, java.util.jar, java.util.zip, javax.crypto, javax.crypto.interfaces, javax.crypto.spec, and a final chapter, which provides an index for classes, methods, and fields./ O'Reilly 11-02-1999 07-02-2007 David Flanagan 666 +book 9780596000486 0596000480 us JavaScript: The Definitive Guide JavaScript: The Definitive Guide Paperback/ Illustrated Web Site Design/ JavaScript/ Object-Oriented Design/ HTML - General/ Utilities/ Web Programming/ Qualifying Textbooks - Winter 2007 $44.95 $7.98 4.5 Since the earliest days of Internet scripting, Web developers have considered JavaScript: The Definitive Guide an essential resource. David Flanagan's approach, which combines tutorials and examples with easy-to-use syntax guides and object references, suits the typical programmer's requirements nicely. The brand-new fourth edition of Flanagan's "Rhino Book" includes coverage of JavaScript 1.5, JScript 5.5, ECMAScript 3, and the Document Object Model (DOM) Level 2 standard from the World Wide Web Consortium (W3C). Interestingly, the author has shifted away from specifying--as he did in earlier editions--what browsers support each bit of the language. Rather than say Netscape 3.0 supports the Image object while Internet Explorer 3.0 does not, he specifies that JavaScript 1.1 and JScript 3.0 support Image. More usefully, he specifies the contents of independent standards like ECMAScript, which encourages scripters to write applications for these standards and browser vendors to support them. As Flanagan says, JavaScript and its related subjects are very complex in their pure forms. It's impossible to keep track of the differences among half a dozen vendors' generally similar implementations. Nonetheless, a lot of examples make reference to specific browsers' capabilities./ Though he does not cover server-side APIs, Flanagan has chosen to separate coverage of core JavaScript (all the keywords, general syntax, and utility objects like Array) from coverage of client-side JavaScript (which includes objects, like History and Event, that have to do with Web browsers and users' interactions with them. This approach makes this book useful to people using JavaScript for applications other than Web pages. By the way, the other classic JavaScript text--Danny Goodman's JavaScript Bible--isn't as current as this book, but it's still a fantastic (and perhaps somewhat more novice-friendly) guide to the JavaScript language and its capabilities. --David Wall/ Topics covered: The JavaScript language (version 1.0 through version 1.5) and its relatives, JScript and ECMAScript, as well as the W3C DOM standards they're often used to manipulate. Tutorial sections show how to program in JavaScript, while reference sections summarize syntax and options while providing copious code examples./ O'Reilly Media 15-12-2001 07-02-2007 David Flanagan 900 +book 9780596007386 0596007388 us Java 5.0 Tiger: A Developer's Notebook Java 5.0 Tiger: A Developer's Notebook Paperback/ Illustrated Perl/ Java $29.95 $3.97 4.5 Java 5.0, code-named "Tiger", promises to be the most significant new version of Java since the introduction of the language. With over a hundred substantial changes to the core language, as well as numerous library and API additions, developers have a variety of new features, facilities, and techniques available. But with so many changes, where do you start? You could read through the lengthy, often boring language specification; you could wait for the latest 500 page tome on concepts and theory; you could even play around with the new JDK, hoping you figure things out--or you can get straight to work with Java 5.0 Tiger: A Developer's Notebook. This no-nonsense, down-and-dirty guide by bestselling Java authors Brett McLaughlin and David Flanagan skips all the boring prose and lecture, and jumps right into Tiger. You'll have a handle on the important new features of the language by the end of the first chapter, and be neck-deep in code before you hit the halfway point. Using the task-oriented format of this new series, you'll get complete practical coverage of generics, learn how boxing and unboxing affects your type conversions, understand the power of varargs, learn how to write enumerated types and annotations, master Java's new formatting methods and the for//in loop, and even get a grip on concurrency in the JVM. Light on theory and long on practical application, Java 5.0 Tiger: A Developer's Notebook allows you to cut to the chase, getting straight to work with Tiger's new features. The new Developer's Notebooks series from O'Reilly covers important new tools for software developers. Emphasizing example over explanation and practice over theory, they focus on learning by doing--you'll get the goods straight from the masters, in an informal and code-intensive style that suits developers. If you've been curious about Tiger, but haven't known where to start, this no-fluff, lab-style guide is the solution. O'Reilly Media 25-06-2004 07-02-2007 David Flanagan/ Brett McLaughlin 177 +book 9780936185514 0936185511 us Imperial Secrets of Health and Longevity Imperial Secrets of Health and Longevity Paperback New Age/ History/ Tai Chi & Qi Gong $12.95 $8.50 3.5 The 14 secrets of longevity of the Qing Dynasty Emperor, Qian Long, cover all aspects of living a long and healthy life. This book offers Qian Long's sage advice on the role of diet, exercise, relaxation, emotions, sex, and environment in achieving long life and good health. This traditional Chinese medical theory includes self-massage, stretching, and qi gong exercise as well as how to use Chinese tonic herbs. Blue Poppy Press 03-02-1999 06-02-2007 Bob Flaws 113 +book 9780936185521 093618552X us Statements of Fact in Traditional Chinese Medicine Statements of Fact in Traditional Chinese Medicine Paperback Basic Science/ History $15.95 $33.04 4.5 At last, what every tcm student has been looking for, a linguistically accurate, succinct list of the key statements of fact in tcm which, as a style of Chinese medicine, is largely a word game. However, to play the game, one needs to know the words. Hopefully, this book will help Western students gain both clarity and proficiency in the process and practice of doing tcm. When supplemented by a teacher, introductory grammar, and a dictionary, this book can quickly and efficiently help teach English language students and practitioners how to read medical Chinese and thus gain access to the vast library of Chinese medical literature. Blue Poppy Press 01-01-1994 06-02-2007 Bob Flaws 107 +book 9780936185927 0936185929 us The Tao of Healthy Eating The Tao of Healthy Eating Paperback Healthy/ Chinese Medicine $15.95 $8.21 4 Chinese dietary therapy is one of the most important aspects of Chinese medicine. The Tao of Healthy Eating illuminates the theory and practice of Chinese dietary therapy with emphasis on the concerns and attitudes of Westerners. Commonsense metaphors explain basic Chinese medical theories and their application in preventive and remedial dietary therapy. It features a clear description of the Chinese medical understanding of digestion and all the practical implications if this for day-to-day diet. Issues of Western interest are discussed, such as raw versus cooked foods, high cholesterol, food allergies, and candidacies. It includes the Chinese medical descriptions of 200 Western food and similar information on vitamins, minerals, and amino acids. Blue Poppy Press 01-01-1998 06-02-2007 Bob Flaws 128 +book 9781932394184 1932394184 us Java Reflection in Action (In Action series) Java Reflection in Action (In Action series) Paperback/ Illustrated $44.95 $17.99 4.5 Explaining the Java Reflection API and providing techniques for using it effectively, this guide describes the capabilities that allow a program to examine and modify itself at runtime. The java.lang.reflect package and its uses are covered, including a detailed discussion of Java's dynamic proxy facility. Less obvious reflective capabilities, such as call stack introspection and the Java class loader, are addressed. In recognition of the limitations of Java Reflection, the various ways to use Reflection to generate code and surpass these limitations are detailed. A discussion of performance analysis techniques and a look ahead at what is new in JDK 1.5 is included./ Manning Publications 10-02-2004 07-02-2007 Ira R. Forman/ Nate Forman 300 +book 9780977616602 0977616606 us Rails Recipes (Pragmatic Programmers) Rails Recipes (Pragmatic Programmers) Paperback Object-Oriented Design/ Internet $32.95 $22.46 4.5 Rails is large, powerful, and new. How do you use it effectively? How do you harness the power? And, most important, how do you get high quality, real-world applications written?/ From the latest Ajax effects to time-saving automation tips for your development process, Rails Recipes will show you how the experts have already solved the problems you have./ • Use generators to automate repetitive coding tasks./ • Create sophisticated role-based authentication schemes./ • Add live search and live preview to your site./ • Run tests when anyone checks code in./ • How to create tagged data the right way./ • and many, many more.../ Owning Rails Recipes is like having the best Rails programmers sitting next to you while you code./ Pragmatic Bookshelf 01-06-2006 01-02-2007 3 Chad Fowler 332 +book 0785342485677 0201485672 us Refactoring: Improving the Design of Existing Code Refactoring: Improving the Design of Existing Code Hardcover Object-Oriented Design/ Design Tools & Techniques/ Qualifying Textbooks - Winter 2007 $59.99 $39.99 4.5 Your class library works, but could it be better? Refactoring: Improving the Design of Existing Code shows how refactoring can make object-oriented code simpler and easier to maintain. Today refactoring requires considerable design know-how, but once tools become available, all programmers should be able to improve their code using refactoring techniques./ Besides an introduction to refactoring, this handbook provides a catalog of dozens of tips for improving code. The best thing about Refactoring is its remarkably clear presentation, along with excellent nuts-and-bolts advice, from object expert Martin Fowler. The author is also an authority on software patterns and UML, and this experience helps make this a better book, one that should be immediately accessible to any intermediate or advanced object-oriented developer. (Just like patterns, each refactoring tip is presented with a simple name, a "motivation," and examples using Java and UML.)/ Early chapters stress the importance of testing in successful refactoring. (When you improve code, you have to test to verify that it still works.) After the discussion on how to detect the "smell" of bad code, readers get to the heart of the book, its catalog of over 70 "refactorings"--tips for better and simpler class design. Each tip is illustrated with "before" and "after" code, along with an explanation. Later chapters provide a quick look at refactoring research./ Like software patterns, refactoring may be an idea whose time has come. This groundbreaking title will surely help bring refactoring to the programming mainstream. With its clear advice on a hot new topic, Refactoring is sure to be essential reading for anyone who writes or maintains object-oriented software. --Richard Dragan/ Topics Covered: Refactoring, improving software code, redesign, design tips, patterns, unit testing, refactoring research, and tools./ Addison-Wesley Professional 28-06-1999 07-02-2007 Martin Fowler/ Kent Beck/ John Brant/ William Opdyke/ Don Roberts 464 +book 0785342657838 020165783X us UML Distilled: A Brief Guide to the Standard Object Modeling Language UML Distilled: A Brief Guide to the Standard Object Modeling Language (2nd Edition) 2nd Paperback Object-Oriented Design/ Software Development/ UML $34.99 $2.95 4 The second edition of Martin Fowler's bestselling UML Distilled provides updates to the Unified Modeling Language (UML) without changing its basic formula for success. It is still arguably the best resource for quick, no-nonsense explanations of using UML./ The major strength of UML Distilled is its short, concise presentation of the essentials of UML and where it fits within today's software development process. The book describes all the major UML diagram types, what they're for, and the basic notation involved in creating and deciphering them. These diagrams include use cases; class and interaction diagrams; collaborations; and state, activity, and physical diagrams. The examples are always clear, and the explanations cut to the fundamental design logic./ For the second edition, the material has been reworked for use cases and activity diagrams, plus there are numerous small tweaks throughout, including the latest UML v. 1.3 standard. An appendix even traces the evolution of UML versions./ Working developers often don't have time to keep up with new innovations in software engineering. This new edition lets you get acquainted with some of the best thinking about efficient object-oriented software design using UML in a convenient format that will be essential to anyone who designs software professionally. --Richard Dragan/ Topics covered: UML basics, analysis and design, outline development (software development process), inception, elaboration, managing risks, construction, transition, use case diagrams, class diagrams, interaction diagrams, collaborations, state diagrams, activity diagrams, physical diagrams, patterns, and refactoring basics./ Addison-Wesley Professional 25-08-1999 07-02-2007 Martin Fowler/ Kendall Scott 185 +book 0752063320839 0672320835 us The Ruby Way The Ruby Way Paperback Object-Oriented Design/ Qualifying Textbooks - Winter 2007 $39.99 $13.69 4.5 The Ruby Way assumes that the reader is already familiar with the subject matter. Using many code samples it focuses on "how-to use Ruby" for specific applications, either as a stand-alone language, or in conjunction with other languages./ Topics covered include:/ • Simple data tasks;/ • Manipulating structured data;/ • External data manipulation;/ • User interfaces;/ • Handling threads;/ • System programming;/ • Network and web programming;/ • Tools and utilities./ Note: The appendices offer instruction on migrating from Perl and Python to Ruby, and extending Ruby in C and C++./ Sams 17-12-2001 07-02-2007 Hal Fulton 600 +book 0785342633610 0201633612 us Design Patterns: Elements of Reusable Object-Oriented Software (Addison-Wesley Professional Computing Series) Design Patterns: Elements of Reusable Object-Oriented Software (Addison-Wesley Professional Computing Series) Hardcover Object-Oriented Design/ Software Development/ Software Reuse/ HTML - General/ Gangs/ Design Tools & Techniques/ Object-Oriented Software Design/ Qualifying Textbooks - Winter 2007 $54.99 $30.99 4.5 Design Patterns is a modern classic in the literature of object-oriented development, offering timeless and elegant solutions to common problems in software design. It describes patterns for managing object creation, composing objects into larger structures, and coordinating control flow between objects. The book provides numerous examples where using composition rather than inheritance can improve the reusability and flexibility of code. Note, though, that it's not a tutorial but a catalog that you can use to find an object-oriented design pattern that's appropriate for the needs of your particular application--a selection for virtuoso programmers who appreciate (or require) consistent, well-engineered object-oriented designs. Addison-Wesley Professional 15-01-1995 07-02-2007 Erich Gamma/ Richard Helm/ Ralph Johnson/ John Vlissides 395 +book B0006AXOU2 us Gravity: [classic and modern views] (Science study series) Gravity: [classic and modern views] (Science study series) Unknown Binding Astrophysics & Space Science $0.99 Anchor Books 08-02-1962 08-02-2007 George Gamow 157 +book 9781422103296 1422103293 us Changing Minds: The Art And Science of Changing Our Own And Other People's Minds (Leadership for the Common Good) Changing Minds: The Art And Science of Changing Our Own And Other People's Minds (Leadership for the Common Good) Paperback Leadership/ Management/ Motivational/ Applied Psychology/ Cognitive $14.95 $5.99 3 Think about the last time you tried to change someone’s mind about something important: a voter’s political beliefs; a customer’s favorite brand; a spouse’s decorating taste. Chances are you weren’t successful in shifting that person’s beliefs in any way. In his book, Changing Minds, Harvard psychologist Howard Gardner explains what happens during the course of changing a mind – and offers ways to influence that process./ Remember that we don’t change our minds overnight, it happens in gradual stages that can be powerfully influenced along the way.This book provides insights that can broaden our horizons and shape our lives./ Harvard Business School Press 30-09-2006 08-02-2007 Howard Gardner 244 +book 9780596007331 0596007337 us We the Media We the Media Hardcover Culture/ Government/ Internet Publishing/ Journalism/ Media Studies/ Weblogs/ Technology & Society $24.95 $3.49 4.5 Grassroots journalists are dismantling Big Media's monopoly on the news, transforming it from a lecture to a conversation. Not content to accept the news as reported, these readers-turned-reporters are publishing in real time to a worldwide audience via the Internet. The impact of their work is just beginning to be felt by professional journalists and the newsmakers they cover. In We the Media: Grassroots Journalism by the People, for the People, nationally known business and technology columnist Dan Gillmor tells the story of this emerging phenomenon, and sheds light on this deep shift in how we make and consume the news. We the Media is essential reading for all participants in the news cycle:/ • Consumers learn how they can become producers of the news. Gillmor lays out the tools of the grassroots journalist's trade, including personal Web journals (called weblogs or blogs), Internet chat groups, email, and cell phones. He also illustrates how, in this age of media consolidation and diminished reporting, to roll your own news, drawing from the array of sources available online and even over the phone./ • Newsmakers politicians, business executives, celebrities get a wake-up call. The control that newsmakers enjoyed in the top-down world of Big Media is seriously undermined in the Internet Age. Gillmor shows newsmakers how to successfully play by the new rules and shift from control to engagement./ • Journalists discover that the new grassroots journalism presents opportunity as well as challenge to their profession. One of the first mainstream journalists to have a blog, Gillmor says, "My readers know more than I do, and that's a good thing." In We the Media, he makes the case to his colleagues that, in the face of a plethora of Internet-fueled news vehicles, they must change or become irrelevant./ At its core, We the Media is a book about people. People like Glenn Reynolds, a law professor whose blog postings on the intersection of technology and liberty garnered him enough readers and influence that he became a source for professional journalists. Or Ben Chandler, whose upset Congressional victory was fueled by contributions that came in response to ads on a handful of political blogs. Or Iraqi blogger Zayed, whose Healing Irag blog (healingiraq.blogspot.com) scooped Big Media. Or acridrabbit, who inspired an online community to become investigative reporters and discover that the dying Kaycee Nichols sad tale was a hoax. Give the people tools to make the news, We the Media asserts, and they will. Journalism in the 21st century will be fundamentally different from the Big Media that prevails today. We the Media casts light on the future of journalism, and invites us all to be part of it. O'Reilly Media 08-02-2004 11-02-2007 Dan Gillmor 304 +book 9780894711350 0894711350 us Gray's Anatomy: The Unabridged Running Press Edition of the American Classic Gray's Anatomy: The Unabridged Running Press Edition of the American Classic Hardcover Bargain Books/ Reference/ Family Health/ Anatomy/ Surgery/ Bargain Books Outlet $18.98 $0.34 3.5 The leg bone's connected to the hip bone, and so on. For many of us, anatomy can seem intimidating and unrewarding, but the right teacher can clear such feelings away in a heartbeat. Our fascination with our bodies is a powerful force, and once we start looking, we find that beauty is much more than skin-deep./ It so happens that the right teacher can take the form of a book. Gray's Anatomy is one of those few titles that practically everybody has heard of, and with good reason--it is a scientific and artistic triumph. Not just a dry index of parts and names, Gray's lets the natural beauty and grace of the body's interconnected systems and structures shine forth from the page. Using sumptuous illustrations and clear, matter-of-fact descriptions, Dr. Gray unleashed a classic on the world more than 100 years ago. Its clarity and usefulness keep it in print today. Whether you want to understand yourself or others, knowledge of our physical parts and how they fit together is essential. Gray's Anatomy provides that information in a simple, timeless format that cleanly dissects a body of knowledge grown over centuries. This book will not only fill the needs of people in the medical profession, but will please artists and naturalists as well. --Rob Lightner/ Running Press Book Publishers 11-02-2007 Henry F. R. S. Gray/ T. Pickering Pick 1248 +book 9780976694076 0976694077 us Best of Ruby Quiz Volume One (Pragmatic Programmers) Best of Ruby Quiz Volume One (Pragmatic Programmers) Paperback/ Illustrated Object-Oriented Design/ Software Development $29.95 $13.71 4.5 Solve these twenty-five popular programming puzzles, and sharpen your programming skills as you craft solutions./ You'll find interesting and challenging programming puzzles including:/ • 800 Numbers/ • Crosswords/ • Cryptograms/ • Knight's Tour/ • Paper, Rock, Scissors/ • Tic-Tac-Toe/ • Texas Hold-Em/ • ...and more./ / Learning to program can be quite a challenge. Classes and books can get you so far, but at some point you have to sit down and start playing with some code. Only by reading and writing real code, with real problems, can you learn./ / The Ruby Quiz was built to fill exactly this need for Ruby programmers. Challenges, solutions, and discussions combine to make Ruby Quiz a powerful way to learn Ruby tricks. See how algorithms translate to Ruby code, get exposure to Ruby's libraries, and learn how other programmers use Ruby to solve problems quickly and efficiently./ Pragmatic Bookshelf 01-03-2006 08-02-2007 James Edward Gray 298 +book 9780312064945 0312064942 us The Alexander Technique: A Complete Course in How to Hold and Use Your Body for Maximum Energy The Alexander Technique: A Complete Course in How to Hold and Use Your Body for Maximum Energy Paperback Stress Management/ Psychology & Counseling/ Physical Therapy/ Pharmacology/ Exercise/ Alexander Technique/ Injuries & Rehabilitation $15.95 $5.00 3.5 The Alexander Technique is a proven process of mind and body reeducation that reduces stress and muscle tension, and revitalization those who practice it. Used by many actors, athletes, and dancers, the technique can help anyone increase his or her energy and achieve a more dynamic presence./ / Written by a veteran instructor of the Alexander Technique, this authentic and easy-to-follow guide allows everyone to learn the increasingly popular program, with clear instructions for each exercise, and dozens of helpful photographs that show correct and incorrect positions to use for the exercises and throughout the day./ St. Martin's Griffin 15-11-1991 11-02-2007 John Gray 176 +book 9781556432149 1556432143 us Planet Medicine: Modalities Planet Medicine: Modalities Paperback Holistic/ Holistic Medicine/ Pharmacology/ History/ Philosophy of Medicine $25.00 $7.43 Planet Medicine is a major work by an anthropologist who looks at medicine in a broad context. In this edition, additions to this classic text include a section on Reiki, a comparison of types of palpation used in healing, updates on craniosacral therapy, and a means of understanding how different alternative medicines actually work. Illustrated throughout, this is the standard on the history, philosophy, and anthropology of this subject. North Atlantic Books 11-02-1995 11-02-2007 Richard Grossinger/ Spain Rodriguez/ Alex Grey 602 +book 9781590590997 1590590996 us Logging in Java with the JDK 1.4 Logging API and Apache log4j Logging in Java with the JDK 1.4 Logging API and Apache log4j Hardcover Software Development $49.99 $2.91 3 Logging in Java with the JDK 1.4 Logging API and Apache log4j is the first book to discuss the two foremost logging APIs: JDK 1.4.0 logging API and Apache log4j 1.2.6 logging API for application developers. The internals of each API are examined, contrasted, and compared in exhaustive depth. Programmers will find a wealth of information simply not available elsewhere--not even on the Internet./ Each concept explained is accompanied by code example written in Java language. The book also provides guidelines for extending the existing logging frameworks to cater to application-specific needs. This is an essential handbook for logging-related information and techniques needed for developing applications in the Java language./ Apress 15-04-2003 07-02-2007 Samudra Gupta 336 +book 0785342753066 0201753065 us Component Development for the Java Platform Component Development for the Java Platform Paperback Software Design/ Qualifying Textbooks - Winter 2007 $39.99 $4.41 4.5 Addison-Wesley Professional 15-12-2001 07-02-2007 1 Stuart Dabbs Halloway 304 +book 0636920002925 0596002920 us XML in a Nutshell, 2nd Edition XML in a Nutshell, 2nd Edition Paperback HTML - General/ XML/ Nutshell $39.95 $0.46 4 Continuing in the tradition of the Nutshell series, XML in a Nutshell provides a dense tutorial on its subject, as well as a useful day-to-day reference. While the reader isn't expected to have prior expertise in XML, this book is most effective as an add-on to a more introductory tutorial because of its relatively fast pace./ The authors set out to systematically--and rapidly--cover the basics of XML first, namely the history of the markup language and the various languages and technologies that compose the standard. In this first section, they discuss the basics of XML markup, Document Type Definitions (DTDs), namespaces, and Unicode. From there, the authors move into "narrative-centric documents" in a section that appropriately focuses on the application of XML to books, articles, Web pages and other readable content./ This book definitely presupposes in the reader an aptitude for picking up concepts quickly and for rapidly building cumulative knowledge. Code examples are used--only to illustrate the particular point in question--but not in excess. The book gets into "data-centric" XML, exploring the difference between the object-driven Document Object Model (DOM) and the event-driven Simple API for XML (SAX). However, these areas are a little underpowered and offer a bit less detail about this key area than the reader will expect./ At the core of any Nutshell book is the reference section, and the installment found inside this text is no exception. Here, the XML 1.0 standard, XPath, XSLT, DOM, SAX, and character sets are covered. Some material that is covered earlier in the book--such as Cascading Style Sheets (CSS)--is not re-articulated, however. XML in a Nutshell is not the only book on XML you should have, but it is definitely one that no XML coder should be without. --Stephen W. Plain/ Topics covered:/ • XML history/ • Document Type Definitions (DTDs)/ • Namespaces/ • Internationalization/ • XML-based data formats/ • XHTML/ • XSL/ • XPath/ • XLink/ • XPointer/ • Cascading Style Sheets (CSS)/ • XSL-FO/ • Document Object Model (DOM)/ • Simple API for XML (SAX)/ O'Reilly 15-06-2002 07-02-2007 Elliotte Rusty Harold/ W. Scott Means 640 +book 9781932394283 1932394281 us Lucene in Action (In Action series) Lucene in Action (In Action series) Paperback/ Illustrated $44.95 $31.14 4.5 Lucene is a gem in the open-source world‹-a highly scalable, fast search engine. It delivers performance and is disarmingly easy to use. Lucene in Action is the authoritative guide to Lucene. It describes how to index your data, including types you definitely need to know such as MS Word, PDF, HTML, and XML. It introduces you to searching, sorting, filtering, and highlighting search results./ / Lucene powers search in surprising places‹-in discussion groups at Fortune 100 companies, in commercial issue trackers, in email search from Microsoft, in the Nutch web search engine (that scales to billions of pages). It is used by diverse companies including Akamai, Overture, Technorati, HotJobs, Epiphany, FedEx, Mayo Clinic, MIT, New Scientist Magazine, and many others. Adding search to your application can be easy. With many reusable examples and good advice on best practices, Lucene in Action shows you how./ / What's Inside/ - How to integrate Lucene into your applications/ - Ready-to-use framework for rich document handling/ - Case studies including Nutch, TheServerSide, jGuru, etc./ - Lucene ports to Perl, Python, C#//.Net, and C++/ - Sorting, filtering, term vectors, multiple, and remote index searching/ - The new SpanQuery family, extending query parser, hit collecting/ - Performance testing and tuning/ - Lucene add-ons (hit highlighting, synonym lookup, and others)/ Manning Publications 28-12-2004 07-02-2007 Erik Hatcher/ Otis Gospodnetic 456 +book 9781930110588 1930110588 us Java Development with Ant Java Development with Ant Paperback/ Illustrated $44.95 $24.67 4.5 Encompassing Java-centric software project best practices for designing and automating build, test, and deployment processes using ANT, this book is written for developers using Java in large software projects and those who have reached the limits of classic IDE development systems. Benefiting developers who apply extreme programming methodology to Java projects, this resource provides detailed coverage of ANT and explains how to use it in large projects and extend it when needed. In addition to using ANT for Java applications, it includes discussions of servlets and J2EE applications, which cover the majority of Java development projects./ Manning Publications 08-02-2002 07-02-2007 Erik Hatcher/ Steve Loughran 672 +book 9781930110977 1930110979 us Code Generation in Action Code Generation in Action Paperback Software Design/ Software Development/ Systems Analysis & Design/ Coding Theory $44.95 $9.95 4.5 Developers using code generation are producing higher quality code faster than their hand-coding counterparts. And, they enjoy other advantages like maintainability, consistency and abstraction. Using the new CG methods they can make a change in one place, avoiding multiple synchronized changes you must make by hand./ Code Generation in Action shows you the techniques of building and using programs to write other programs. It shows how to avoid repetition and error to produce consistent, high quality code, and how to maintain it more easily. It demonstrates code generators for user interfaces, database access, remote procedure access, and much more./ Code Generation in Action is an A-to-Z guide covering building, buying, deploying and using code generators. If you are a software engineer-whether beginner or advanced-eager to become the "ideas person," the mover-and-shaker on your development team, you should learn CG techniques. This book will help you master them./ What's Inside:/ • Code generation basics/ • CG techniques and best practices/ • Patterns of CG design/ • How to deploy generators/ • Many example generators/ Includes generators for:/ • Database access/ • RPC/ • Unit tests/ • Documentation/ • Business logic/ • Data translation/ Over his twenty years of development experience, Jack Herrington has shipped many software applications helped by code generation techniques. He runs the Code Generation Network. Manning Publications 01-07-2003 07-02-2007 Jack Herrington 368 +book 9780140951448 014095144X us Tao of Pooh and Te of Piglet Boxed Set Tao of Pooh and Te of Piglet Boxed Set Paperback/ Box set History of Books/ 20th Century/ Taoism/ Entertainment/ Literature & Fiction/ Nonfiction/ Religion & Spirituality $27.00 $9.89 4 Is there such thing as a Western Taoist? Benjamin Hoff says there is, and this Taoist's favorite food is honey. Through brilliant and witty dialogue with the beloved Pooh-bear and his companions, the author of this smash bestseller explains with ease and aplomb that rather than being a distant and mysterious concept, Taoism is as near and practical to us as our morning breakfast bowl. Romp through the enchanting world of Winnie-the-Pooh while soaking up invaluable lessons on simplicity and natural living. Penguin (Non-Classics) 01-11-1994 11-02-2007 Benjamin Hoff +book 9780140230161 0140230165 us The Te of Piglet The Te of Piglet Paperback History of Books/ British/ Taoism/ Mysticism/ 20th Century $14.00 $1.05 3 In The Te of Piglet, a good deal of Taoist wisdom is revealed through the character and actions of A. A. Milne's Piglet. Piglet herein demonstrates a very important principle of Taoism: The Te-a Chinese word meaning Virtue-of the Small. Penguin (Non-Classics) 01-11-1993 11-02-2007 Benjamin Hoff 272 +book 9780553345841 0553345842 us The Mind's I: Fantasies and Reflections on Self and Soul The Mind's I: Fantasies and Reflections on Self and Soul Paperback Consciousness & Thought/ Cognitive Psychology/ Cognitive Science $18.95 $1.43 4.5 Brilliant, shattering, mind-jolting, The  Mind's I is a searching, probing nook--a  cosmic journey of the mind--that goes deeply into  the problem of self and self-consciousness as  anything written in our time. From verbalizing  chimpanzees to scientific speculations involving  machines with souls, from the mesmerizing, maze-like  fiction of Borges to the tantalizing, dreamlike  fiction of Lem and Princess Ineffable, her circuits  glowing read and gold, The Mind's I   opens the mind to the Black Box of fantasy, to the  windfalls of reflection, to new dimensions of  exciting possibilities. Bantam 01-04-1985 08-02-2007 Douglas Hofstadter/ Daniel C. Dennett 512 +book 9780880225724 0880225726 us Unix Shell Commands Quick Reference (Que Quick Reference Series) Unix Shell Commands Quick Reference (Que Quick Reference Series) Paperback MacOS/ Shell/ Macintosh/ Macs $8.95 $0.77 4 Que Pub 10-02-1990 07-02-2007 William Holliker 154 +book 9780131855861 0131855867 us Spring Into HTML and CSS (Spring Into... Series) Spring Into HTML and CSS (Spring Into... Series) Paperback HTML - General/ Internet/ XHTML/ Qualifying Textbooks - Winter 2007 $29.99 $6.00 4 Addison-Wesley Professional 22-04-2005 07-02-2007 1 Molly E. Holzschlag 336 +book 9789629962166 9629962160 us Business Chinese Business Chinese Paperback English (All)/ Chinese/ Study & Teaching/ Qualifying Textbooks - Winter 2007 $33.00 $24.98 This book will help readers develop their competence in advanced Chinese in a business context. Rather than teaching language in isolation from substantive content, Business Chinese presents readers with both content and context. Exercises and tasks in the book require readers to integrate their language skills with their content knowledge. To meet learners'practical communication needs, the book focuses on both oral and written language skills./ In order to keep readers abreast of the real business world, all texts and exercises are drawn from authentic materials from mainland China, Taiwan and Hong Kong. Business Chinese is the perfect, practical guide for those who want to master Chinese language and the Chinese business world./ Chinese University Press 20-07-2005 09-02-2007 Jiaying Howard/ Tsengtseng Chang 311 +book 0785342616224 020161622X us The Pragmatic Programmer: From Journeyman to Master The Pragmatic Programmer: From Journeyman to Master Paperback Qualifying Textbooks - Winter 2007 $45.99 $21.84 4.5 Programmers are craftspeople trained to use a certain set of tools (editors, object managers, version trackers) to generate a certain kind of product (programs) that will operate in some environment (operating systems on hardware assemblies). Like any other craft, computer programming has spawned a body of wisdom, most of which isn't taught at universities or in certification classes. Most programmers arrive at the so-called tricks of the trade over time, through independent experimentation. In The Pragmatic Programmer, Andrew Hunt and David Thomas codify many of the truths they've discovered during their respective careers as designers of software and writers of code./ Some of the authors' nuggets of pragmatism are concrete, and the path to their implementation is clear. They advise readers to learn one text editor, for example, and use it for everything. They also recommend the use of version-tracking software for even the smallest projects, and promote the merits of learning regular expression syntax and a text-manipulation language. Other (perhaps more valuable) advice is more light-hearted. In the debugging section, it is noted that, "if you see hoof prints think horses, not zebras." That is, suspect everything, but start looking for problems in the most obvious places. There are recommendations for making estimates of time and expense, and for integrating testing into the development process. You'll want a copy of The Pragmatic Programmer for two reasons: it displays your own accumulated wisdom more cleanly than you ever bothered to state it, and it introduces you to methods of work that you may not yet have considered. Working programmers will enjoy this book. --David Wall/ Topics covered: A useful approach to software design and construction that allows for efficient, profitable development of high-quality products. Elements of the approach include specification development, customer relations, team management, design practices, development tools, and testing procedures. This approach is presented with the help of anecdotes and technical problems./ Addison-Wesley Professional 20-10-1999 07-02-2007 1 Andrew Hunt/ David Thomas 352 +book 9780974514017 0974514012 us Pragmatic Unit Testing in Java with JUnit Pragmatic Unit Testing in Java with JUnit Paperback/ Illustrated Software Development/ Testing/ Information Systems/ Information Theory $29.95 $16.46 4.5 Learn how to improve your Java coding skills using unit testing. Despite it's name, unit testing is really a coding technique, not a testing technique. Unit testing is done by programmers, for programmers. It's primarily for our benefit: we get improved confidence in our code, better ability to make deadlines, less time spent in the debugger, and less time beating on the code to make it work correctly. This book shows how to write tests, but more importantly, it goes where other books fear to tread and gives you concrete advice and examples of what to test--the common things that go wrong in all of our programs. Discover the tricky hiding places where bugs breed, and how to catch them using the freely available JUnit framework. It's easy to learn how to think of all the things in your code that are likely to break. We'll show you how with helpful mnemonics, summarized in a handy tip sheet (also available from our www.pragmaticprogrammer.com website) to help you remember all this stuff. With this book you will:/ • Write better code, and take less time to write it/ • Discover the tricky places where bugs breed/ • Learn how to think of all the things that could go wrong/ • Test individual pieces of code without having to include the whole project/ • Test effectively with the whole team/ We'll also cover how to use Mock Objects for testing, how to write high quality test code, and how to use unit testing to improve your design skills. We'll show you frequent "gotchas"--along with the fixes--to save you time when problems come up. We'll show you how with helpful mnemonics, summarized in a handy tip sheet (also available from our www.pragmaticprogrammer.com website). But the best part is that you don't need a sweeping mandate to change your whole team or your whole company. You don't need to adopt Extreme Programming or Test-Driven Development, or change your development process in order to reap the proven benefits of unit testing. You can start unit testing, the pragmatic way, right away. The Pragmatic Programmers 09-02-2003 07-02-2007 Andrew Hunt/ David Thomas 159 +book 9780596000400 0596000405 us Java Servlet Programming, 2nd Edition Java Servlet Programming, 2nd Edition Paperback/ Illustrated Web Site Design/ Java/ Web Programming/ Servlets $44.95 $7.90 4 Aimed at Web developers with some previous Java experience, Java Servlet Programming, Second Edition, offers a solid introduction to the world of Java development with Servlets and related technologies. Thoroughly revised and newly updated with over a half-dozen new chapters, this title brings an already useful text up to speed with some leading-edge material. It excels particularly in explaining how to program dynamic Web content using Java Servlets, with a fine introduction to all the APIs, programming techniques, and tips you will need to be successful with this standard./ Besides a useful guide to APIs, the book looks at a variety of techniques for saving session state, as well as showing how Servlets can work together to power Web sites. You will learn performance tips and ways to get Servlets to work together (like forwarding and redirection), plus the basics of database programming with JDBC, to build content with "live" data. A later chapter examines what's next for Servlets with the emerging Servlet 2.3 API standard. Importantly, the authors go over deploying and configuring Web applications by editing XML files, a must-have for successfully running Servlets in real applications./ Since the first edition of this title, the choices for Java Web developers have grown much richer. Many of the new chapters in this edition look at options beyond Servlets. Short sections on application frameworks such as Tea, WebMacro, the Element Construction Set (ECS), XMLC, and JavaServer Pages (JSP) let you explore what's out there for Java developers today with a survey of some current tools that can speed up creating new Web applications./ The text closes with reference sections on Servlet APIs (and other material) that will be useful for any working developer. Although Servlets are not the only game in town, they are still important tools for successful Web development. This updated edition shows you just how to do it with plenty of basic and advanced tips for taking full advantage of this powerful Java standard. --Richard Dragan/ Topics covered:/ • Overview and history of Java Servlets/ • Fundamentals of HTTP/ • Web applications (including deployment and configuration using XML files)/ • The Servlet lifecycle (initializing, processing requests, cleanup, and caching)/ • Multimedia content (images and compressed content)/ • WAP and WML for wireless content/ • Servlet session tracking techniques (hidden form fields, cookies, and URL rewriting)/ • Security issues with Servlets (including certificates and SSL)/ • Tutorial for JDBC and Java database programming/ • Using applets and Servlets together/ • Servlet collaboration/ • Quick introduction to Java 2 Enterprise Edition (J2EE)/ • Internationalization issues/ • Survey of third-party Servlet application frameworks and tools: Tea, WebMacro, the Element Contruction Set (ECS), XMLC, and JavaServer Pages (JSP)/ • Miscellaneous tips for Servlets (including sending e-mail and using regular expressions)/ • Description of the new Servlet 2.3 API spec/ • Servlet API quick reference/ O'Reilly Media 15-01-2001 07-02-2007 Jason Hunter 753 +book 9781930110991 1930110995 us JUnit in Action JUnit in Action Paperback/ Illustrated Object-Oriented Design/ Software Design/ Testing/ Systems Analysis & Design $39.95 $21.83 4.5 A guide to unit testing Java applications (including J2EE applications) using the JUnit framework and its extensions, this book provides techniques for solving real-world problems such as unit testing legacy applications, writing real tests for real objects, automating tests, testing in isolation, and unit testing J2EE and database applications. Using a sample-driven approach, various unit testing strategies are covered, such as how to unit test EJBs, database applications, JSPs, and Taglibs. Also addressed are testing strategies using freely available open source frameworks and tools, and how to unit test in isolation with Mock Objects. Testing J2EE applications by running tests from inside the container for performing integration unit tests is discussed, as is how to automate unit testing in automated builds (such as Ant and Maven) for performing continuous integration./ Manning Publications 28-10-2003 07-02-2007 Ted Husted/ Vincent Massol 384 +book 9781932394498 1932394494 us RSS and Atom in Action: Web 2.0 Building Blocks RSS and Atom in Action: Web 2.0 Building Blocks Paperback/ Illustrated Web Site Design/ Internet $39.95 $19.98 4.5 RSS and Atom in Action is organized into two parts. The first part introduces the blog technologies of news feed formats and publishing protocols-the building blocks. The second part shows how to put to those blocks together to assemble interesting and useful blog applications. In keeping with the behind Manning's "In Action" series, this book shows the reader, through numerous examples in Java and C#, how to parse Atom and RSS format newsfeeds, how to generate valid newsfeeds and serve them efficiently, and how to automate blogging via web services based on the new Atom protocol and the older MetaWeblog API. The book also shows how to develop a complete blog client library that readers can use in their own applications. The second half of the book is devoted to a dozen blog apps-small but immediately useful example applications such as a community aggregator, a file distribution newsfeed, a blog cross-poster, an email-to-blog gateway, Ant tasks for blogging softwarebuilds, and more./ Manning Publications 31-07-2006 07-02-2007 Dave Johnson 300 +book 9780684868769 0684868768 us Emergence: The Connected Lives of Ants, Brains, Cities, and Software Emergence: The Connected Lives of Ants, Brains, Cities, and Software Paperback Urban/ Chaos & Systems/ History of Science/ Acoustics & Sound/ System Theory/ General & Reference/ Systems Analysis & Design/ Information Theory $15.00 $4.40 3.5 An individual ant, like an individual neuron, is just about as dumb as can be. Connect enough of them together properly, though, and you get spontaneous intelligence. Web pundit Steven Johnson explains what we know about this phenomenon with a rare lucidity in Emergence: The Connected Lives of Ants, Brains, Cities, and Software. Starting with the weird behavior of the semi-colonial organisms we call slime molds, Johnson details the development of increasingly complex and familiar behavior among simple components: cells, insects, and software developers all find their place in greater schemes./ / Most game players, alas, live on something close to day-trader time, at least when they're in the middle of a game--thinking more about their next move than their next meal, and usually blissfully oblivious to the ten- or twenty-year trajectory of software development. No one wants to play with a toy that's going to be fun after a few decades of tinkering--the toys have to be engaging now, or kids will find other toys./ Johnson has a knack for explaining complicated and counterintuitive ideas cleverly without stealing the scene. Though we're far from fully understanding how complex behavior manifests from simple units and rules, our awareness that such emergence is possible is guiding research across disciplines. Readers unfamiliar with the sciences of complexity will find Emergence an excellent starting point, while those who were chaotic before it was cool will appreciate its updates and wider scope. --Rob Lightner/ Scribner 27-08-2002 11-02-2007 Steven Johnson 288 +book 9780743241663 0743241665 us Mind Wide Open: Your Brain and the Neuroscience of Everyday Life Mind Wide Open: Your Brain and the Neuroscience of Everyday Life Paperback Consciousness & Thought/ Applied Psychology/ Neuropsychology/ Personality/ Physiology/ Neuroscience $15.00 $4.96 4 Given the opportunity to watch the inner workings of his own brain, Steven Johnson jumps at the chance. He reveals the results in Mind Wide Open, an engaging and personal account of his foray into edgy brain science. In the 21st century, Johnson observes, we have become used to ideas such as "adrenaline rushes" and "serotonin levels," without really recognizing that complex neurobiology has become a commonplace thing to talk about. He sees recent laboratory revelations about the brain as crucial for understanding ourselves and our psyches in new, post-Freudian ways. Readers shy about slapping electrodes on their own temples can get a vicarious scientific thrill as Johnson tries out empathy tests, neurofeedback, and fMRI scans. The results paint a distinct picture of the author, and uncover general brain secrets at the same time. Memory, fear, love, alertness--all the multitude of states housed in our brains are shown to be the results of chemical and electrical interactions constantly fed and changed by input from our senses. Mind Wide Open both satisfies curiosity and provokes more questions, leaving readers wondering about their own gray matter. --Therese Littleton Scribner 03-05-2005 11-02-2007 Steven Johnson 288 +book 9780139376818 013937681X us The UNIX Programming Environment The UNIX Programming Environment Paperback History/ Unix/ History of Ideas $49.99 $14.04 4.5 Prentice Hall 03-02-1984 07-02-2007 Brian W. Kernighan/ Rob Pike 357 +book 9780312421434 0312421435 us No Logo: No Space, No Choice, No Jobs No Logo: No Space, No Choice, No Jobs Paperback Company Histories/ Labor Policy/ International/ Labor & Industrial Relations/ Production & Operations/ Advertising/ Global/ Ethics/ Anthropology/ Consumer Guides/ Media Studies/ Culture $15.00 $6.38 4 We live in an era where image is nearly everything, where the proliferation of brand-name culture has created, to take one hyperbolic example from Naomi Klein's No Logo, "walking, talking, life-sized Tommy [Hilfiger] dolls, mummified in fully branded Tommy worlds." Brand identities are even flourishing online, she notes--and for some retailers, perhaps best of all online: "Liberated from the real-world burdens of stores and product manufacturing, these brands are free to soar, less as the disseminators of goods or services than as collective hallucinations."/ In No Logo, Klein patiently demonstrates, step by step, how brands have become ubiquitous, not just in media and on the street but increasingly in the schools as well. (The controversy over advertiser-sponsored Channel One may be old hat, but many readers will be surprised to learn about ads in school lavatories and exclusive concessions in school cafeterias.) The global companies claim to support diversity, but their version of "corporate multiculturalism" is merely intended to create more buying options for consumers. When Klein talks about how easy it is for retailers like Wal-Mart and Blockbuster to "censor" the contents of videotapes and albums, she also considers the role corporate conglomeration plays in the process. How much would one expect Paramount Pictures, for example, to protest against Blockbuster's policies, given that they're both divisions of Viacom?/ Klein also looks at the workers who keep these companies running, most of whom never share in any of the great rewards. The president of Borders, when asked whether the bookstore chain could pay its clerks a "living wage," wrote that "while the concept is romantically appealing, it ignores the practicalities and realities of our business environment." Those clerks should probably just be grateful they're not stuck in an Asian sweatshop, making pennies an hour to produce Nike sneakers or other must-have fashion items. Klein also discusses at some length the tactic of hiring "permatemps" who can do most of the work and receive few, if any, benefits like health care, paid vacations, or stock options. While many workers are glad to be part of the "Free Agent Nation," observers note that, particularly in the high-tech industry, such policies make it increasingly difficult to organize workers and advocate for change./ But resistance is growing, and the backlash against the brands has set in. Street-level education programs have taught kids in the inner cities, for example, not only about Nike's abusive labor practices but about the astronomical markup in their prices. Boycotts have commenced: as one urban teen put it, "Nike, we made you. We can break you." But there's more to the revolution, as Klein optimistically recounts: "Ethical shareholders, culture jammers, street reclaimers, McUnion organizers, human-rights hacktivists, school-logo fighters and Internet corporate watchdogs are at the early stages of demanding a citizen-centered alternative to the international rule of the brands ... as global, and as capable of coordinated action, as the multinational corporations it seeks to subvert."No Logo is a comprehensive account of what the global economy has wrought and the actions taking place to thwart it. --Ron Hogan/ Picador 06-04-2002 11-02-2007 Naomi Klein 528 +book 0029236723101 0789723107 us Don't Make Me Think: A Common Sense Approach to Web Usability Don't Make Me Think: A Common Sense Approach to Web Usability Paperback Web Site Design/ Internet Publishing/ Interface Design $35.00 $14.98 5 Usability design is one of the most important--yet often least attractive--tasks for a Web developer. In Don't Make Me Think, author Steve Krug lightens up the subject with good humor and excellent, to-the-point examples./ The title of the book is its chief personal design premise. All of the tips, techniques, and examples presented revolve around users being able to surf merrily through a well-designed site with minimal cognitive strain. Readers will quickly come to agree with many of the book's assumptions, such as "We don't read pages--we scan them" and "We don't figure out how things work--we muddle through." Coming to grips with such hard facts sets the stage for Web design that then produces topnotch sites./ Using an attractive mix of full-color screen shots, cute cartoons and diagrams, and informative sidebars, the book keeps your attention and drives home some crucial points. Much of the content is devoted to proper use of conventions and content layout, and the "before and after" examples are superb. Topics such as the wise use of rollovers and usability testing are covered using a consistently practical approach./ This is the type of book you can blow through in a couple of evenings. But despite its conciseness, it will give you an expert's ability to judge Web design. You'll never form a first impression of a site in the same way again. --Stephen W. Plain/ Topics covered:/ • User patterns/ • Designing for scanning/ • Wise use of copy/ • Navigation design/ • Home page layout/ • Usability testing/ New Riders Press 13-10-2000 07-02-2007 Steve Krug 195 +book 9781585730407 1585730408 us Pocket Menu Reader China (Pocket Dictionaries) (Langenscheidt's Pocket Menu Reader) Pocket Menu Reader China (Pocket Dictionaries) (Langenscheidt's Pocket Menu Reader) Paperback Chinese/ Dictionaries; Polyglot/ Phrasebooks - General/ Dining $7.95 $4.98 1 Each Pocket Menu Reader is an indispensable gastronomic dictionary, phrasebook, and guidebook. It includes more than 1,500 words with translations and pronunciations, comprehensive treatment of the country's cuisine, an alphabetical list of dishes and culinary terms, plus a gourmet's selection of recipes. Langenscheidt Publishers 15-11-2000 11-02-2007 Langenscheidt 189 +book 9780897500487 0897500482 us Tao of Jeet Kune Do Tao of Jeet Kune Do Paperback Contemporary/ New Age $16.95 $7.00 4.5 To watch Bruce Lee on film is an amazing experience. Those who have read Tao of Jeet Kune Do, however, know that Lee's prose can also be exhilarating. This praiseworthy and enduring bestseller (mainly written over six months when Lee was bedridden with back problems) compiles philisophical aphorisms, explanations on technique, and sketches by the master himself. Ohara Publications 07-02-1993 06-02-2007 Bruce Lee 208 +book 9780974175706 0974175706 us Getting Around in Chinese Getting Around in Chinese Paperback Study & Teaching $25.00 $13.25 3 Marco Liang & Co. 02-02-2003 11-02-2007 Marco Liang 669 +book 9780201570090 0201570092 us China: Empire of Living Symbols China: Empire of Living Symbols Hardcover Photo Essays/ Chinese/ Linguistics $39.90 $1,203.99 4.5 Perseus Books 11-02-1991 11-02-2007 Cecilia Lindqvist 423 +book 9781556432767 1556432763 us Ba Gua: Hidden Knowledge in the Taoist Internal Martial Art Ba Gua: Hidden Knowledge in the Taoist Internal Martial Art Paperback Reference $16.95 $6.49 4 North Atlantic Books 12-02-1998 11-02-2007 Hsing-Han Liu/ John Bracy 138 +book 9780738204314 0738204315 us The Cluetrain Manifesto: The End of Business as Usual The Cluetrain Manifesto: The End of Business as Usual Paperback Strategy & Competition/ Theory/ Customer Service/ Systems & Planning/ Consumerism/ Web Marketing/ Social Theory/ Peripherals $14.00 $1.79 4 How would you classify a book that begins with the salutation, "People of Earth..."? While the captains of industry might dismiss it as mere science fiction, The Cluetrain Manifesto is definitely of this day and age. Aiming squarely at the solar plexus of corporate America, authors Christopher Locke, Rick Levine, Doc Searls, and David Weinberger show how the Internet is turning business upside down. They proclaim that, thanks to conversations taking place on Web sites and message boards, and in e-mail and chat rooms, employees and customers alike have found voices that undermine the traditional command-and-control hierarchy that organizes most corporate marketing groups. "Markets are conversations," the authors write, and those conversations are "getting smarter faster than most companies." In their view, the lowly customer service rep wields far more power and influence in today's marketplace than the well-oiled front office PR machine./ The Cluetrain Manifesto began as a Web site (www.cluetrain.com) in 1999 when the authors, who have worked variously at IBM, Sun Microsystems, the Linux Journal, and NPR, posted 95 theses that pronounced what they felt was the new reality of the networked marketplace. For example, thesis no. 2: "Markets consist of human beings, not demographic sectors"; thesis no. 20: "Companies need to realize their markets are often laughing. At them"; thesis no. 62: "Markets do not want to talk to flacks and hucksters. They want to participate in the conversations going on behind the corporate firewall"; thesis no. 74: "We are immune to advertising. Just forget it." The book enlarges on these themes through seven essays filled with dozens of stories and observations about how business gets done in America and how the Internet will change it all. While Cluetrain will strike many as loud and over the top, the message itself remains quite relevant and unique. This book is for anyone interested in the Internet and e-commerce, and is especially important for those businesses struggling to navigate the topography of the wired marketplace. All aboard! --Harry C. Edwards/ Perseus Books Group 09-01-2001 11-02-2007 Christopher Locke/ Rick Levine/ Doc Searls/ David Weinberger 190 +book 9780806906164 0806906162 us Chinese System Of Natural Cures Chinese System Of Natural Cures Paperback Herbal Remedies/ Basic Science/ History/ Chinese Medicine $11.95 $1.98 3 Discover traditional Chinese herbal healing formulas--and how to use the Four Energies, the Five Flavors, and the Four Movements to prescribe various herbal treatments, as well as acupuncture and other methods of pain relief. Detailed sections of specific treatments of patients' complaints, and recommended herbal treatments for diagnosed diseases, including high cholesterol, diabetes, heart and coronary problems, arthritis, allergies, and more./ Sterling 31-12-1994 11-02-2007 Henry C. Lu 160 +book 9780156799805 0156799804 us The Secret of the Golden Flower: A Chinese Book of Life The Secret of the Golden Flower: A Chinese Book of Life Paperback Taoism/ New Age/ Behavioral Psychology $12.00 $1.24 4 1955. The point of view established in this volume is that the spirit must lean on science as its guide in the world of reality, and that science must turn to the spirit for the meaning of life. This book lends us a new approach to the East, and it also strengthens the point of view evolving in the West with respect to the psyche. Wilhelm provides the reader with the text and explanation, while another section contains commentary by Jung. Harvest//HBJ Book 08-02-1962 08-02-2007 Tung-Pin Lu 149 +book 9780394717272 0394717279 us Acupuncture: The Ancient Chinese Art of Healing and How it Works Scientifically Acupuncture: The Ancient Chinese Art of Healing and How it Works Scientifically Paperback Acupuncture & Acupressure/ Healing/ China/ Pharmacology $11.00 $0.01 Dr. Felix Mann, President of the Medical Acupuncture Society, is one of the outstanding Western practitioners of the ancient Chinese art, which he has been using for some years in London. In this complete revision of his 1962 book -- over half of which is entirely new material -- he describes in detail for the first time how acupuncture works from a scientific point of view, explaining the neurophysiological mechanism involved as well as the basic principles and laws according to the theories of traditional Chinese medicine. Written for both the layman and the medical profession, the book illustrates its points with case histories drawn from Dr. Mann's own patients in England. Vintage 12-01-1973 06-02-2007 Felix Mann 256 +book 9780135974445 0135974445 us Agile Software Development, Principles, Patterns, and Practices Agile Software Development, Principles, Patterns, and Practices Hardcover C & C++ Windows Programming/ Object-Oriented Design/ Software Development/ Qualifying Textbooks - Winter 2007 $68.20 $38.50 4.5 Prentice Hall 15-10-2002 03-02-2007 Robert C. Martin 552 +book 9780974514062 0974514063 us Pragmatic Version Control Using Subversion Pragmatic Version Control Using Subversion Paperback/ Illustrated Software Design/ Software Development/ Software Engineering $29.95 $23.94 4.5 This book covers the theory behind version control and how it can help developers become more efficient, work better as a team, and keep on top of software complexity. All projects need version control: it's the lifeblood of any project's infrastructure, yet half of all project teams in the U.S. don't use any version control at all. Many others don't use it well and end up experiencing time-consuming problems. Version control, done well, is your "undo" button for the project: nothing is final, and mistakes are easily rolled back. This book describes Subversion, the latest and hottest open source version control system, using a recipe-based approach that will get you up and running quickly--and correctly. Learn how to use Subversion the right way--the pragmatic way. With this book, you can:/ • Keep all project assets safe--not just source code--and never run the risk of losing a great idea/ • Know how to undo bad decisions--even directories and symlinks are versioned/ • Learn how to share code safely, and work in parallel for maximum efficiency/ • Install Subversion and organize, administer and backup your repository/ • Share code over a network with Apache, svnserve, or ssh/ • Create and manage releases, code branches, merges and bug fixes/ • Manage 3rd party code safely/ Now there's no excuse not to use professional-grade version control. Pragmatic Bookshelf 08-02-2005 07-02-2007 Mike Mason 207 +book 9780804836340 0804836345 us Chinese Character Fast Finder Chinese Character Fast Finder Paperback English (All)/ Chinese $19.95 $13.48 5 Chinese Character Fast Finder allows users to find Chinese characters based on their appearance alone, without knowing their pronunciation, radical or stroke count. This reference book has been designed for serious learners of Chinese as well as readers with an interest in written Chinese./ Convenient features include printed thumb-index marks for rapid access to any character; all the characters prescribed for the Chinese government's official HSK (Hanyu Shuiping Koshi) Language Proficiency Test, and simplified characters and their pinyin pronunciation. Tuttle Publishing 15-03-2005 07-02-2007 Laurence Matthews 256 +book 9780596009281 0596009283 us Firefox Hacks: Tips & Tools for Next-Generation Web Browsing (Hacks) Firefox Hacks: Tips & Tools for Next-Generation Web Browsing (Hacks) Paperback/ Illustrated Privacy/ Network Security/ Software Development/ Web Browsers/ Web Programming/ Internet Security/ Qualifying Textbooks - Winter 2007 $24.95 $12.74 4.5 Firefox Hacks is ideal for power users who want to take full advantage of Firefox from Mozilla, the next-generation web browser that is rapidly subverting Internet Explorer's once-dominant audience. It's also the first book that specifically dedicates itself to this technology. Firefox is winning such widespread approval for a number of reasons, including the fact that it lets users browse faster and more efficiently. Perhaps its most appealing strength, though, is its increased security something that is covered in great detail in Firefox Hacks. Clearly the web browser of the future, Firefox includes most of the features that browser users are familiar with, along with several new features, such as a bookmarks toolbar and tabbed pages that allow users to quickly switch among several web sites. Firefox Hacks offers all the valuable tips and tools you need to maximize the effectiveness of this hot web application. It's all covered, including how to customize its deployment, appearance, features, and functionality. You'll even learn how to install, use, and alter extensions and plug-ins. Aimed at clever people who may or may not be capable of basic programming tasks, this convenient resource describes 100 techniques for 100 strategies that effectively exploit Firefox. Or, put another way, readers of every stripe will find all the user-friendly tips, tools, and tricks they need to make a productive switch to Firefox. With Firefox Hacks, a superior and safer browsing experience is truly only pages away. The latest in O'Reilly's celebrated Hacks series, Firefox Hacks smartly complements other web-application titles such as Google Hacks and PayPal Hacks. O'Reilly Media 14-03-2005 07-02-2007 Nigel McFarlane 377 +book 9780321356703 0321356705 us Software Security: Building Security In (Addison-Wesley Software Security Series) Software Security: Building Security In (Addison-Wesley Software Security Series) Paperback Privacy/ Network Security/ Software Development $49.99 $6.40 5 Addison-Wesley Professional 23-01-2006 07-02-2007 1 Gary McGraw 448 +book 9780124848306 0124848303 us Programming for the Newton Using Windows Programming for the Newton Using Windows Paperback Object-Oriented Design/ Software Development/ Windows - General/ PCs $34.95 $9.46 5 Morgan Kaufmann Pub 09-02-1996 07-02-2007 Julie McKeehan/ Neil Rhodes 440 +book 0676251832068 0804832064 us Reading and Writing Chinese: A Guide to the Chinese Writing System Reading and Writing Chinese: A Guide to the Chinese Writing System Paperback Chinese/ Phrasebooks - General/ Southeast Asian/ Reading Skills/ Study & Teaching/ Writing Skills $24.95 $12.50 4 Reading and Writing Chinese has been the standard text for foreign students and teachers of the Chinese Writing System since Tuttle first published it over 20 years ago. This new, completely revised edition offers students a more convenient, efficient, and up-to-date introduction to the writing system./ Charles E. Tuttle Co. 09-02-1999 11-02-2007 William McNaughton/ Li Ying 368 +book 9780877736769 0877736766 us WAY OF CHAUNG TZU (Shambhala Pocket Classics) WAY OF CHAUNG TZU (Shambhala Pocket Classics) Paperback Taoism/ Eastern Philosophy/ Comparative Religion/ Paperback $6.00 $5.80 5 Working from existing translations, Thomas Merton composed a series of personal versions from his favorites among the classic sayings of Chuang Tzu, the most spiritual of the Chinese philosophers. Chuang Tzu, who wrote in the fourth and third centuries B.C., is the chief authentic historical spokesman for Taoism and its founder Lao Tzu (a legendary character known largely through Chuang Tzu's writings). Indeed it was because of Chuang Tzu and the other Taoist sages that Indian Buddhism was transformed, in China, into the unique vehicle we now call by its Japanese name — Zen. The Chinese sage abounds in wit, paradox, satire, and shattering insight into the true ground of being. Father Merton, no stranger to Asian thought, brings a vivid, modern idiom to the timeless wisdom of Tao. Illustrated with early Chinese drawings. Shambhala 30-06-1992 11-02-2007 Thomas Merton 240 +book 0636920926221 1565926226 us Cascading Style Sheets: The Definitive Guide Cascading Style Sheets: The Definitive Guide Paperback Web Site Design/ Internet Publishing/ Database Design/ Structured Design/ HTML - General/ Web Programming/ Web Authoring & Design $34.95 $1.70 4 Cascading Style Sheets can put a great deal of control and flexibility into the hands of a Web designer--in theory. In reality, however, varying browser support for CSS1 and lack of CSS2 implementation makes CSS a very tricky topic. Cascading Style Sheets: The Definitive Guide is a comprehensive text that shows how to take advantage of the benefits of CSS while keeping compatibility issues in mind./ The book is very upfront about the spotty early browser support for CSS1 and the sluggish adoption of CSS2. However, enthusiasm for the technology spills out of the pages, making a strong case for even the most skeptical reader to give CSS a whirl and count on its future. The text covers CSS1 in impressive depth--not only the syntactical conventions but also more general concepts such as specificity and inheritance. Frequent warnings and tips alert the reader to browser-compatibility pitfalls./ Entire chapters are devoted to topics like units and values, visual formatting and positioning, and the usual text, fonts, and colors. This attention to both detail and architecture helps readers build a well-rounded knowledge of CSS and equips readers for a future of real-world debugging. Cascading Style Sheets honestly explains the reasons for avoiding an in-depth discussion of the still immature CSS2, but covers the general changes over CSS1 in a brief chapter near the end of the book./ When successfully implemented, Cascading Style Sheets result in much more elegant HTML that separates form from function. This fine guide delivers on its promise as an indispensable tool for CSS coders. --Stephen W. Plain/ Topics covered:/ • HTML with CSS/ • Selectors and structure/ • Units/ • Text manipulation/ • Colors and backgrounds/ • Boxes and borders/ • Visual formatting principles/ • Positioning/ • CSS2 preview/ • CSS case studies/ O'Reilly 15-05-2000 07-02-2007 Eric Meyer 470 +book 0636920001201 0596001207 us CSS Pocket Reference CSS Pocket Reference Paperback Web Graphics/ Web Site Design/ Internet Publishing/ HTML - General/ Pocket/ Web Programming/ Web Authoring & Design $9.95 $2.11 4 CSS (Cascading Style Sheets) is the W3C-approved method for enriching the visual presentation of web pages. CSS allows web pages to become more structural, and at the same time promises that they can have a more sophisticated look than ever before. With good implementations in Internet Explorer 5.0 and Opera 3.6, and 100% CSS1 support expected in Netscapes's Mozilla browser, signs are that CSS is rapidly becoming a useful, reliable, and powerful tool for web authors./ The CSS Pocket Reference briefly introduces CSS and then lists all CSS1 properties, plus the CSS1 pseudo-elements and pseudo-classes. Since browser incompatibility is the biggest obstacle to CSS adoption, we've also included a comprehensive guide to how the browsers have implemented support for CSS1. For anyone who wants to correctly implement CSS, this is a handy condensed reference to all the details in the larger volume, Cascading Style Sheets: The Definitive Guide./ O'Reilly 16-05-2001 07-02-2007 Eric A. Meyer 96 +book 0752064712459 073571245X us Eric Meyer on CSS: Mastering the Language of Web Design Eric Meyer on CSS: Mastering the Language of Web Design Paperback Web Site Design/ Internet Publishing/ HTML - General/ jp-unknown1/ Qualifying Textbooks - Winter 2007 $45.00 $9.99 4.5 There are several other books on the market that serve as in-depth technical guides or reference books for CSS. None, however, take a more hands-on approach and use practical examples to teach readers how to solve the problems they face in designing with CSS - until now. Eric Meyer provides a variety of carefully crafted projects that teach how to use CSS and why particular methods were chosen. The web site includes all of the files needed to complete the tutorials in the book. In addition, bonus information is be posted. New Riders Press 28-06-2002 03-02-2007 Eric A. Meyer 352 +book 9780865681743 0865681740 us Xing Yi Nei Gong: Xing Yi Health Maintenance and Internal Strength Development Xing Yi Nei Gong: Xing Yi Health Maintenance and Internal Strength Development Paperback $21.95 $14.05 4.5 This is the most complete book on the art of xing yi (hsing Yi) available. It includes the complete xing yi history and lineage going back eight generations; manuscripts handed down from famous practitioners Dai Long Bang and Li Neng Ran; 16 health maintenance and power development exercises; qigong (chi kung) exerices; xing yi long spear power training exercises; and more. Unique Publications 10-02-1998 03-02-2007 Dan Miller/ Tim Cartmell 200 +book 9781883175009 1883175003 us Liang Zhen Pu Eight Diagram Palm Liang Zhen Pu Eight Diagram Palm Paperback Taichi/ jp-unknown3 $17.95 $14.99 5 High View Pubns 04-02-1993 11-02-2007 Li Zi Ming 168 +book 9780609810347 0609810340 us Bhagavad Gita: A New Translation Bhagavad Gita: A New Translation Paperback Eastern/ Bhagavad Gita $13.95 $7.45 3.5 On the list of the greatest spiritual books of all time, the Bhagavad Gita resides permanently in the top echelon. This poem of patently Indian genius sprouted an immense tree of devotional, artistic, and philosophical elaboration in the subcontinent. The scene is a battlefield with the prince Arjuna pitted against his own family, but no sooner does the poem begin than the action reverts inward. Krishna, Arjuna's avatar and spiritual guide, points the way to the supreme wisdom and perfect freedom that lie within everyone's reach. Worship and be faithful, meditate and know reality--these make up the secret of life and lead eventually to the realization that the self is the root of the world. In this titular translation, Stephen Mitchell's rhythms are faultless, making music of this ancient "Song of the Blessed One." Savor his rendition, but nibble around the edges of his introduction. In a bizarre mixture of praise and condescension, Mitchell disregards two millennia of Indian commentary, seeking illumination on the text from Daoism and Zen, with the Gita coming up just shy of full spiritual merit. Perhaps we should take it from Gandhi, who used the Gita as a handbook for life, that it nourishes on many levels. --Brian Bruya Three Rivers Press 27-08-2002 11-02-2007 Stephen Mitchell 224 +book 9780060923211 0060923210 us The Gospel According to Jesus The Gospel According to Jesus Paperback Classics/ New Testament/ Study/ Inspirational/ Christology $14.00 $2.62 3.5 A dazzling presentation of the life and teachings of Jesus by the eminent scholar and translator Stephen Mitchell. Harper Perennial 31-03-1993 08-02-2007 Stephen Mitchell 320 +book 9780690012903 069001290X us Flower, Moon, Snow: A Book of Haiku Flower, Moon, Snow: A Book of Haiku Library Binding 20th Century/ Japanese & Haiku/ United States $12.89 $4.40 Crowell 04-02-1977 03-02-2007 Kazue Mizumura 48 +book 9780060922245 0060922249 us Care of the Soul : A Guide for Cultivating Depth and Sacredness in Everyday Life Care of the Soul : A Guide for Cultivating Depth and Sacredness in Everyday Life Paperback Personal Transformation/ New Age/ Spiritual/ Pastoral Theology $14.00 $0.01 4 Care of the Soul is considered to be one of the best primers for soul work ever written. Thomas Moore, an internationally renowned theologian and former Catholic monk, offers a philosophy for living that involves accepting our humanity rather than struggling to transcend it. By nurturing the soul in everyday life, Moore shows how to cultivate dignity, peace, and depth of character. For example, in addressing the importance of daily rituals he writes, "Ritual maintains the world's holiness. As in a dream a small object may assume significance, so in a life that is animated by ritual there are no insignificant things." This is the eloquence that helped reintroduce the sacred into everyday language and contemporary values. Harper Paperbacks 26-01-1994 03-02-2007 Thomas Moore 336 +book 9780416543506 0416543502 us Pooh's Workout Book Pooh's Workout Book Hardcover Parodies/ British $13.44 5 Methuen young books 24-10-1985 11-02-2007 Ethan Mordden 176 +book 9780596007652 0596007655 us Ambient Findability Ambient Findability Paperback/ Illustrated Web Site Design/ Database Design/ Internet/ Web Programming/ Web Authoring & Design/ Qualifying Textbooks - Winter 2007 $29.95 $14.94 4 How do you find your way in an age of information overload? How can you filter streams of complex information to pull out only what you want? Why does it matter how information is structured when Google seems to magically bring up the right answer to your questions? What does it mean to be "findable" in this day and age? This eye-opening new book examines the convergence of information and connectivity. Written by Peter Morville, author of the groundbreaking Information Architecture for the World Wide Web, the book defines our current age as a state of unlimited findability. In other words, anyone can find anything at any time. Complete navigability./ Morville discusses the Internet, GIS, and other network technologies that are coming together to make unlimited findability possible. He explores how the melding of these innovations impacts society, since Web access is now a standard requirement for successful people and businesses. But before he does that, Morville looks back at the history of wayfinding and human evolution, suggesting that our fear of being lost has driven us to create maps, charts, and now, the mobile Internet./ The book's central thesis is that information literacy, information architecture, and usability are all critical components of this new world order. Hand in hand with that is the contention that only by planning and designing the best possible software, devices, and Internet, will we be able to maintain this connectivity in the future. Morville's book is highlighted with full color illustrations and rich examples that bring his prose to life./ Ambient Findability doesn't preach or pretend to know all the answers. Instead, it presents research, stories, and examples in support of its novel ideas. Are we truly at a critical point in our evolution where the quality of our digital networks will dictate how we behave as a species? Is findability indeed the primary key to a successful global marketplace in the 21st century and beyond. Peter Morville takes you on a thought-provoking tour of these memes and more -- ideas that will not only fascinate but will stir your creativity in practical ways that you can apply to your work immediately./ "A lively, enjoyable and informative tour of a topic that's only going to become more important."
--David Weinberger, Author, Small Pieces Loosely Joined and The Cluetrain Manifesto/ "I envy the young scholar who finds this inventive book, by whatever strange means are necessary. The future isn't just unwritten--it's unsearched."
--Bruce Sterling, Writer, Futurist, and Co-Founder, The Electronic Frontier Foundation/ "Search engine marketing is the hottest thing in Internet business, and deservedly so. Ambient Findability puts SEM into a broader context and provides deeper insights into human behavior. This book will help you grow your online business in a world where being found is not at all certain."
--Jakob Nielsen, Ph.D., Author, Designing Web Usability: The Practice of Simplicity/ "Information that's hard to find will remain information that's hardly found--from one of the fathers of the discipline of information architecture, and one of its most experienced practitioners, come penetrating observations on why findability is elusive and how the act of seeking changes us."
--Steve Papa, Founder and Chairman, Endeca/ "Whether it's a fact or a figure, a person or a place, Peter Morville knows how to make it findable. Morville explores the possibilities of a world where everything can always be found--and the challenges in getting there--in this wide-ranging, thought-provoking book."
--Jesse James Garrett, Author, The Elements of User Experience/ "It is easy to assume that current searching of the World Wide Web is the last word in finding and using information. Peter Morville shows us that search engines are just the beginning. Skillfully weaving together information science research with his own extensive experience, he develops for the reader a feeling for the near future when information is truly findable all around us. There are immense implications, and Morville's lively and humorous writing brings them home."
--Marcia J. Bates, Ph.D., University of California Los Angeles/ "I've always known that Peter Morville was smart. After reading Ambient Findability, I now know he's (as we say in Boston) wicked smart. This is a timely book that will have lasting effects on how we create our future.
--Jared Spool, Founding Principal, User Interface Engineering/ "In Ambient Findability, Peter Morville has put his mind and keyboard on the pulse of the electronic noosphere. With tangible examples and lively writing, he lays out the challenges and wonders of finding our way in cyberspace, and explains the mutually dependent evolution of our changing world and selves. This is a must read for everyone and a practical guide for designers."
--Gary Marchionini, Ph.D., University of North Carolina/ "Find this book! Anyone interested in making information easier to find, or understanding how finding and being found is changing, will find this thoroughly researched, engagingly written, literate, insightful and very, very cool book well worth their time. Myriad examples from rich and varied domains and a valuable idea on nearly every page. Fun to read, too!
--Joseph Janes, Ph.D., Founder, Internet Public Library/ O'Reilly Media 01-10-2005 07-02-2007 Peter Morville 188 +book 0636920924180 1565924185 us Java Threads, Second Edition Java Threads, Second Edition Paperback Parallel Computing/ Java $34.95 $0.47 3.5 Building sophisticated Java applets means learning about threading--if you need to read data from a network, for example, you can't afford to let a delay in its delivery lock up your entire applet. Java Threads introduces the Java threading API and uses non-computing analogies--such as scenarios involving bank tellers--to explain the need for synchronization and the dangers of deadlock. Scott Oaks and Henry Wong follow up their high-level examples with more detailed discussions on building a thread scheduler in Java, dealing with advanced synchronization issues, and handling exceptions. O'Reilly 20-01-1999 07-02-2007 Scott Oaks/ Henry Wong 336 +book 9780312275631 0312275633 us Awareness: The Key to Living in Balance Awareness: The Key to Living in Balance Paperback Meditation/ Mysticism/ Self-Help/ Spiritualism/ Osho/ Personal Transformation $11.95 $7.61 4.5 Underlying all meditation techniques, including martial arts-and in fact underlying all great athletic performances-is a quality of being awake and present to the moment, a quality that Osho calls awareness. Once we can identify and understand what this quality of awareness is, we have the key to self-mastery in virtually every area of our lives.According to great masters like Lao Tzu or Buddha, most of us move through our lives like sleepwalkers. Never really present in what we are doing, never fully alert to our environment, and not even aware of what motivates us to do and say the things we do.At the same time, all of us have experienced moments of awareness-or awakening, to use another-in extraordinary circumstances. On the road, in a sudden and unexpected accident, time seems to stop and one is suddenly aware of every movement, every sound, every thought. Or in moments that touch us deeply-welcoming a new baby into the world for the first time, or being with someone at the moment of death.Awareness, says Osho, is the key to being self-directed, centered, and free in every aspect of our lives. In this book, Osho teaches how to live life more attentively, mindfully, and meditatively, with love, caring and consciousness.OSHO challenges readers to examine and break free of the conditioned belief systems and prejudices that limit their capacity to life in all its richness. He has been described by the Sunday Times of London as one of the "1000 Makers of the 20th Century" and by Sunday Mid-Day (India) as one of the ten people-along with Gandhi, Nehru, and Buddha-who have changed the destiny of India. More than a decade after his death in 1990, the influence of his teachings continues to expand, reaching seekers of all ages in virtually every country of the world. St. Martin's Griffin 10-12-2001 03-02-2007 Osho 192 +book 9781580632256 1580632254 us Tao: The Pathless Path Tao: The Pathless Path Paperback Taoism/ Osho $12.95 $7.61 5 In his commentaries on five parables from the Leih Tzu, Osho brings a fresh and contemporary interpretation to the ancient wisdom of Tao. Leih Tzu was a well-known Taoist master in the fourth century B.C., and his sly critiques of a Confucius provide abundant opportunities for the reader to explore the contrasts between the rational and irrational, the male and female, the structured and the spontaneous."Who Is Really Happy" uses the discovery of a human skull on the roadside to probe into the question of immortality and how misery arises out of the existence of the ego."A Man Who Knows How to Console Himself" looks beneath the apparent cheerfulness of a wandering monk and asks if there is really a happiness that endures through life's ups and downs."No Regrets" is a parable about the difference between the knowledge that is gathered from the outside and the "knowing" that arises from within."No Rest for the Living" uses a dialogue between a despondent seeker and his master to reveal the limits of philosophy and the crippling consequences of living for the sake of some future goal. "Best Be Still, Best Be Empty" discusses the difference between the path of the will, the via affirmitiva of Christianity, Judaism, and Islam, versus the path of the mystic, the via negativa of Buddha and Lao Tzu.A Q&A section addresses how Taoist understanding applies to everyday life in concrete, practical terms. Renaissance Books 23-02-2002 11-02-2007 Osho 192 +book 9780865681729 0865681724 us Fundamentals of Pa Kua Chan, Vol. 1 Fundamentals of Pa Kua Chan, Vol. 1 Paperback $19.95 $9.00 4.5 Unique Publications 01-02-1999 11-02-2007 Bok Nam Park/ Dan Miller 204 +book B000AMLXHM us THE EMPEROR'S NEW MIND: CONCERNING COMPUTERS, MINDS, AND THE LAWS OF PHYSICS..."Ranks among the most innovative and exciting science books to be published in the last forty years." THE EMPEROR'S NEW MIND: CONCERNING COMPUTERS, MINDS, AND THE LAWS OF PHYSICS..."Ranks among the most innovative and exciting science books to be published in the last forty years." Paperback $1.89 PENGUIN BOOKS 08-02-1990 08-02-2007 ROGER PENROSE +book 9780936085241 093608524X us The Fine Art of Technical Writing The Fine Art of Technical Writing Paperback Technical $9.95 $0.95 4.5 This slender volume for the beginning technical writer doesn't delve very deeply into its subject, but The Fine Art of Technical Writing does make some nice points. Most appealing and useful is the book's premise: though its subject matter can be dry, "technical writing is a creative act." Author Carol Rosenblum Perry likens the technical writer to a ceramist, recommending that he or she get as much down on paper (or computer) as possible for the first draft, then think of that "rough text as a big, shapeless lump of clay" to be sculpted. Next is a more oblique analogy to figurative drawing. Perry urges the technical writer to consider the writing's "skeleton" (order), "body mass" (conciseness), and "muscle tone" (vigor). Finally, technical writing is compared to making music: "Writing, like music, depends on its dynamics ... varying degrees of 'loudness' and 'softness.'" Blue Heron Publishing 11-02-1991 11-02-2007 Carol Rosenblum Perry 111 +book 9780596004484 0596004486 us Version Control with Subversion Version Control with Subversion Paperback/ Illustrated Software Development $34.95 $18.69 4.5 One of the greatest frustrations in most software projects is version control: the art of managing changes to information. Today's increasingly fast pace of software development--as programmers make small changes to software one day only to undo them the next--has only heightened the problem; consecutive work on code or single-programmer software is a rare sight these days. Without careful attention to version control, concurrent and collaborative work can create more headaches than it solves. This is where Subversion comes into play. Written by members of the Subversion open source development team, Version Control with Subversion introduces the powerful new versioning tool designed to be the successor to the Concurrent Version System or CVS. CVS users will find the "look and feel" Subversion comfortably familiar, but under the surface it's far more flexible, robust, and usable, and more importantly, it improves on CVS's more notable flaws. The book begins with a general introduction to Subversion, the basic concepts behind version control, and a guided tour of Subversion's capabilities and structure. With thorough attention to detail, the authors cover every aspect of installing and configuring Subversion for managing a programming project, documentation, or any other team-based endeavor. Later chapters cover the more complex topics of branching, repository administration, and other advanced features such as properties, externals, and access control. The book ends with reference material and appendices covering a number of useful topics such as a Subversion complete reference and troubleshooting guide. Version Control with Subversion aims to be useful to readers of widely different backgrounds, from those with no previous experience in version control to experienced sysadmins. If you've never used version control, you'll find everything you need to get started in this book. And if you're a seasoned CVS pro, this book will help you make a painless leap into Subversion. O'Reilly Media 22-06-2004 07-02-2007 C. Michael Pilato/ Ben Collins-Sussman/ Brian W. Fitzpatrick 304 +book 9780060958404 0060958405 us Words and Rules: The Ingredients of Language Words and Rules: The Ingredients of Language Paperback Logic & Language/ Grammar/ Linguistics/ Neuroscience/ Cognitive $15.00 $3.50 4 Human languages are capable of expressing a literally endless number of different ideas. How do we manage it--so effortlessly that we scarcely ever stop to think about it? In Words and Rules: The Ingredients of Language, a look at the simple concepts that we use to devise works as complex as love sonnets and tax laws, renowned neuroscientist and linguist Steven Pinker shows us how. The latest linguistic research suggests that each of us stores a limited (though large) number of words and word-parts in memory and manipulates them with a much smaller number of rules to produce every writing and utterance, and Pinker explains every step of the way with engaging good humor./ Pinker's enthusiasm for the subject infects the reader, particularly as he emphasizes the relation between how we communicate and how we think. What does it mean that a small child who has never heard the word wug can tell a researcher that when one wug meets another, there are two wugs? Some rule must be telling the child that English plurals end in -s, which also explains mistakes like mouses. Is our communication linked inextricably with our thinking? Pinker says yes, and it's hard to disagree. Words and Rules is an excellent introduction to and overview of current thinking about language, and will greatly reward the careful reader with new ways of thinking about how we think, talk, and write. --Rob Lightner/ Harper Perennial 15-01-2000 03-02-2007 Steven Pinker 368 +book 9781556434303 1556434308 us Healing With Whole Foods: Asian Traditions and Modern Nutrition Healing With Whole Foods: Asian Traditions and Modern Nutrition (3rd Edition) 3rd Paperback Healthy/ Macrobiotics/ Healing/ Naturopathy/ Family Health/ Diet Therapy/ Whole Foods/ Chinese Medicine/ Healthy Living $35.00 $22.00 5 Used as a reference by students of acupuncture, this is a hefty, truly comprehensive guide to the theory and healing power of Chinese medicine. It's also a primer on nutrition--including facts about green foods, such as spirulina and blue-green algae, and the "regeneration diets" used by cancer patients and arthritics--along with an inspiring cookbook with more than 300 mostly vegetarian, nutrient-packed recipes./ The information on Chinese medicine is useful for helping to diagnose health imbalances, especially nascent illnesses. It's smartly paired with the whole-foods program because the Chinese have attributed various health-balancing properties to foods, so you can tailor your diet to help alleviate symptoms of illness. For example, Chinese medicine dictates that someone with low energy and a pale complexion (a yin deficiency) would benefit from avoiding bitter foods and increasing "sweet" foods such as soy, black sesame seeds, parsnips, rice, and oats. (Note that the Chinese definition of sweet foods is much different from the American one!)/ Pitchford says in his dedication that he hopes the reader finds "healing, awareness, and peace" from following his program. The diet is certainly acetic by American standards (no alcohol, caffeine, white flour, fried foods, or sugar, and a minimum of eggs and dairy) but the reasons he gives for avoiding these "negative energy" foods are compelling. From the adrenal damage imparted by coffee to immune dysfunction brought on by excess refined sugar, Pitchford spurs you to rethink every dietary choice and its ultimate influence on your health. Without being alarmist, he adds dietary tips for protecting yourself against the dangers of modern life, including neutralizing damage from water fluoridation (thyroid and immune-system problems may result; fluoride is a carcinogen). There's further reading on food combining, female health, heart disease, pregnancy, fasting, and weight loss. Overall, this is a wonderful book for anyone who's serious about strengthening his or her body from the inside out. --Erica Jorgensen/ North Atlantic Books 12-02-2002 11-02-2007 Paul Pitchford 750 +book 9780596002633 0596002637 us Practical RDF Practical RDF Paperback/ Illustrated XML/ Web Programming/ Qualifying Textbooks - Winter 2007 $39.95 $25.99 3.5 The Resource Description Framework (RDF) is a structure for describing and interchanging metadata on the Web--anything from library catalogs and worldwide directories to bioinformatics, Mozilla internal data structures, and knowledge bases for artificial intelligence projects. RDF provides a consistent framework and syntax for describing and querying data, making it possible to share website descriptions more easily. RDF's capabilities, however, have long been shrouded by its reputation for complexity and a difficult family of specifications. Practical RDF breaks through this reputation with immediate and solvable problems to help you understand, master, and implement RDF solutions. Practical RDF explains RDF from the ground up, providing real-world examples and descriptions of how the technology is being used in applications like Mozilla, FOAF, and Chandler, as well as infrastructure you can use to build your own applications. This book cuts to the heart of the W3C's often obscure specifications, giving you tools to apply RDF successfully in your own projects. The first part of the book focuses on the RDF specifications. After an introduction to RDF, the book covers the RDF specification documents themselves, including RDF Semantics and Concepts and Abstract Model specifications, RDF constructs, and the RDF Schema. The second section focuses on programming language support, and the tools and utilities that allow developers to review, edit, parse, store, and manipulate RDF//XML. Subsequent sections focus on RDF's data roots, programming and framework support, and practical implementation and use of RDF and RDF//XML. If you want to know how to apply RDF to information processing, Practical RDF is for you. Whether your interests lie in large-scale information aggregation and analysis or in smaller-scale projects like weblog syndication, this book will provide you with a solid foundation for working with RDF. O'Reilly Media 07-02-2003 07-02-2007 Shelley Powers 331 +book 9781556433023 1556433026 us Cheng Hsin: The Principles of Effortless Power Cheng Hsin: The Principles of Effortless Power Paperback New Age $16.95 $11.44 4.5 The basic text of one of the geniuses of martial arts in America. North Atlantic Books 01-02-1999 06-02-2007 Peter Ralston 184 +book 9781583941591 1583941592 us Zen Body-Being: An Enlightened Approach to Physical Skill, Grace, and Power Zen Body-Being: An Enlightened Approach to Physical Skill, Grace, and Power Paperback Meditation/ Personal Transformation/ Zen/ Zen Philosophy/ Physical Education $16.95 $10.51 5 In this inspiring guide, Peter Ralston presents a program of "physical education" for anyone interested in body improvement. Using simple, clear language to demystify the Zen mindset, he draws on more than three decades of experience teaching students and apprentices worldwide who have applied his body-being approach. More of a transformative guide than a specific list of exercises devoted to any particular physical approach, Zen Body-Being explains how to create a state of mental control, enhanced feeling-awareness, correct structural alignment, increased spatial acuity, and even a greater interactive presence. Exercises are simple, often involving feeling-imagery or a kind of meditative awareness that has a profound and sometimes instant effect. Where similar guides teach readers what to do, this challenging book by the man whose insights Dan Millman has said “speak to us all,” teaches readers how to be./ North Atlantic Books, Frog Ltd. 27-07-2006 18-02-2007 Peter Ralston/ Laura Ralston 200 +book 0639785334866 0071377646 us Schaum's Outline of Chinese Grammar Schaum's Outline of Chinese Grammar Paperback Chinese/ Grammar/ Vocabulary/ Study Guides/ Reference/ Schaum's Outlines $17.95 $10.15 4.5 Schaum's Outline of Chinese Grammar is designed to assist beginning and intermediate students of Mandarin Chinese develop and enhance their knowledge of Chinese grammar. Chinese morphology can be intimidating to students. By simplifying the learning process, this practical book enriches the student's understanding of the Chinese language./ The accessible summary of the major features of Chinese grammar complete with clear explanations of terms and usage is especially helpful to students. The book features 200 sets of practice exercises as well as Chinese-English and English-Chinese glossaries. It serves as a much-needed supplement to textbooks and class materials currently being used in first-and second-year college-level courses./ McGraw-Hill 13-02-2004 11-02-2007 Claudia Ross 304 +book 9780439784542 0439784549 us Harry Potter and the Half-Blood Prince Harry Potter and the Half-Blood Prince (Book 6) 6 Hardcover/ Unabridged Humorous/ Science Fiction, Fantasy, & Magic/ Contemporary/ Hardcover/ School $29.99 $3.45 4.5 The long-awaited, eagerly anticipated, arguably over-hyped Harry Potter and the Half-Blood Prince has arrived, and the question on the minds of kids, adults, fans, and skeptics alike is, "Is it worth the hype?" The answer, luckily, is simple: yep. A magnificent spectacle more than worth the price of admission, Harry Potter and the Half-Blood Prince will blow you away. However, given that so much has gone into protecting the secrets of the book (including armored trucks and injunctions), don't expect any spoilers in this review. It's much more fun not knowing what's coming--and in the case of Rowling's delicious sixth book, you don't want to know. Just sit tight, despite the earth-shattering revelations that will have your head in your hands as you hope the words will rearrange themselves into a different story. But take one warning to heart: do not open Harry Potter and the Half-Blood Prince until you have first found a secluded spot, safe from curious eyes, where you can tuck in for a good long read. Because once you start, you won't stop until you reach the very last page./ A darker book than any in the series thus far with a level of sophistication belying its genre, Harry Potter and the Half-Blood Prince moves the series into murkier waters and marks the arrival of Rowling onto the adult literary scene. While she has long been praised for her cleverness and wit, the strength of Book 6 lies in her subtle development of key characters, as well as her carefully nuanced depiction of a community at war. In Harry Potter and the Half-Blood Prince, no one and nothing is safe, including preconceived notions of good and evil and of right and wrong. With each book in her increasingly remarkable series, fans have nervously watched J.K. Rowling raise the stakes; gone are the simple delights of butterbeer and enchanted candy, and days when the worst ailment could be cured by a bite of chocolate. A series that began as a colorful lark full of magic and discovery has become a dark and deadly war zone. But this should not come as a shock to loyal readers. Rowling readied fans with Harry Potter and the Goblet of Fire and Harry Potter and the Order of the Phoenix by killing off popular characters and engaging the young students in battle. Still, there is an unexpected bleakness from the start of Book 6 that casts a mean shadow over Quidditch games, silly flirtations, and mountains of homework. Ready or not, the tremendous ending of Harry Potter and the Half-Blood Prince will leave stunned fans wondering what great and terrible events await in Book 7 if this sinister darkness is meant to light the way. --Daphne Durham/ Visit the Harry Potter Store/ / Our Harry Potter Store features all things Harry, including books (box sets and collector's editions), audio CDs and cassettes, DVDs, soundtracks, games, and more. 

Begin at the Beginning/ Harry Potter and the Sorcerer's Stone/ / Hardcover / Paperback/ Harry Potter and the Chamber of Secrets/ / Hardcover/ Paperback/ Harry Potter and the Prisoner of Azkaban/ / Hardcover/ Paperback/ Harry Potter and the Goblet of Fire/ / Hardcover/ Paperback/ Harry Potter and the Order of the Phoenix/ / Hardcover/ Paperback/ 
Why We Love Harry
Favorite Moments from the Series
There are plenty of reasons to love Rowling's wildly popular series--no doubt you have several dozen of your own. Our list features favorite moments, characters, and artifacts from the first five books. Keep in mind that this list is by no means exhaustive (what we love about Harry could fill ten books!) and does not include any of the spectacular revelatory moments that would spoil the books for those (few) who have not read them. Enjoy./ Harry Potter and the Sorcerer's Stone/ / * Harry's first trip to the zoo with the Dursleys, when a boa constrictor winks at him. / * When the Dursleys' house is suddenly besieged by letters for Harry from Hogwarts. Readers learn how much the Dursleys have been keeping from Harry. Rowling does a wonderful job in displaying the lengths to which Uncle Vernon will go to deny that magic exists. / * Harry's first visit to Diagon Alley with Hagrid. Full of curiosities and rich with magic and marvel, Harry's first trip includes a trip to Gringotts and Ollivanders, where Harry gets his wand (holly and phoenix feather) and discovers yet another connection to He-Who-Must-No-Be-Named. This moment is the reader's first full introduction to Rowling's world of witchcraft and wizards./ * Harry's experience with the Sorting Hat./ Harry Potter and the Chamber of Secrets/ / * The de-gnoming of the Weasleys' garden. Harry discovers that even wizards have chores--gnomes must be grabbed (ignoring angry protests "Gerroff me! Gerroff me!"), swung about (to make them too dizzy to come back), and tossed out of the garden--this delightful scene highlights Rowling's clever and witty genius. / * Harry's first experience with a Howler, sent to Ron by his mother. / * The Dueling Club battle between Harry and Malfoy. Gilderoy Lockhart starts the Dueling Club to help students practice spells on each other, but he is not prepared for the intensity of the animosity between Harry and Draco. Since they are still young, their minibattle is innocent enough, including tickling and dancing charms./ Harry Potter and the Prisoner of Azkaban/ / * Ron's attempt to use a telephone to call Harry at the Dursleys'. / * Harry's first encounter with a Dementor on the train (and just about any other encounter with Dementors). Harry's brush with the Dementors is terrifying and prepares Potter fans for a darker, scarier book. / * Harry, Ron, and Hermione's behavior in Professor Trelawney's Divination class. Some of the best moments in Rowling's books occur when she reminds us that the wizards-in-training at Hogwarts are, after all, just children. Clearly, even at a school of witchcraft and wizardry, classes can be boring and seem pointless to children. / * The Boggart lesson in Professor Lupin's classroom. / * Harry, Ron, and Hermione's knock-down confrontation with Snape./ Harry Potter and the Goblet of Fire/ / * Hermione's disgust at the reception for the veela (Bulgarian National Team Mascots) at the Quidditch World Cup. Rowling's fourth book addresses issues about growing up--the dynamic between the boys and girls at Hogwarts starts to change. Nowhere is this more plain than the hilarious scene in which magical cheerleaders nearly convince Harry and Ron to jump from the stands to impress them. / * Viktor Krum's crush on Hermione--and Ron's objection to it. / * Malfoy's "Potter Stinks" badge. / * Hermione's creation of S.P.E.W., the intolerant bigotry of the Death Eaters, and the danger of the Triwizard Tournament. Add in the changing dynamics between girls and boys at Hogwarts, and suddenly Rowling's fourth book has a weight and seriousness not as present in early books in the series. Candy and tickle spells are left behind as the students tackle darker, more serious issues and take on larger responsibilities, including the knowledge of illegal curses./ Harry Potter and the Order of the Phoenix/ / / * Harry's outburst to his friends at No. 12 Grimmauld Place. A combination of frustration over being kept in the dark and fear that he will be expelled fuels much of Harry's anger, and it all comes out at once, directly aimed at Ron and Hermione. Rowling perfectly portrays Harry's frustration at being too old to shirk responsibility, but too young to be accepted as part of the fight that he knows is coming. / * Harry's detention with Professor Umbridge. Rowling shows her darker side, leading readers to believe that Hogwarts is no longer a safe haven for young wizards. Dolores represents a bureaucratic tyrant capable of real evil, and Harry is forced to endure their private battle of wills alone. / * Harry and Cho's painfully awkward interactions. Rowling clearly remembers what it was like to be a teenager. / * Harry's Occlumency lessons with Snape. / * Dumbledore's confession to Harry./ / Magic, Mystery, and Mayhem: A Conversation with J.K. Rowling
/ / "I am an extraordinarily lucky person, doing what I love best in the world. I'm sure that I will always be a writer. It was wonderful enough just to be published. The greatest reward is the enthusiasm of the readers."--J.K. Rowling/ Find out more about Harry's creator in our exclusive interview with J.K. Rowling./ 

Did You Know?/ / The Little White Horse was J.K. Rowling's favorite book as a child./ / Jane Austen is Rowling's favorite author./ / Roddy Doyle is Rowling's favorite living writer./ A Few Words from Mary GrandPré
/ / "When I illustrate a cover or a book, I draw upon what the author tells me; that's how I see my responsibility as an illustrator. J.K. Rowling is very descriptive in her writing--she gives an illustrator a lot to work with. Each story is packed full of rich visual descriptions of the atmosphere, the mood, the setting, and all the different creatures and people. She makes it easy for me. The images just develop as I sketch and retrace until it feels right and matches her vision." Check out more Harry Potter art from illustrator Mary GrandPré. 
/ Scholastic, Inc. 16-07-2005 18-02-2007 J. K. Rowling 672 +book 9780767900027 0767900022 us The Illuminated Rumi The Illuminated Rumi Hardcover Middle Eastern/ Inspirational & Religious/ Eastern European/ Mysticism/ Folk Art/ Ancient, Classical & Medieval $30.00 $13.99 4.5 Rise up nimbly and go on your strange journey to the ocean of meanings.../ / In the mid-thirteenth century, in a dusty marketplace in Konya, Turkey, a city where Muslim, Christian, Hindu, and Buddhist travelers mingled, Jelaluddin Rumi, a popular philosopher and scholar, met Shams of Tabriz, a wandering dervish.  Their meeting forever altered the course of Rumi's life and influenced the mystical evolution of the planet.  The bond they formed was everlasting--a powerful transcendent friendship that would flow through Rumi as some of the world's best-loved ecstatic poetry./ / Rumi's passionate, playful poems find and celebrate sacred life in everyday existence.  They speak across all traditions, to all peoples, and today his relevance and popularity continue to grow.  In The Illuminated Rumi, Coleman Barks, widely regarded as the world's premier translator of Rumi's writings, presents some of his most brilliant work, including many new translations.  To complement Rumi's universal vision, Michael Green has worked the ancient art of illumination into a new, visually stunning form that joins typography, original art, old masters, photographs, and prints with sacred images from around the world./ / The Illuminated Rumi is a truly groundbreaking collaboration that interweaves word and image: a magnificent meeting of ancient tradition and modern interpretation that uniquely captures the spiritual wealth of Rumi's teachings.  Coleman Barks's wise and witty commentary, together with Michael Green's art, makes this a classic guide to the life of the soul for a whole new generation of seekers. Broadway 13-10-1997 11-02-2007 Jalal Al-Din Rumi 128 +book 9780140195682 0140195688 us Sitting: A Guide to Buddhist Meditation Sitting: A Guide to Buddhist Meditation Paperback Rituals & Practice $12.00 $2.90 4.5 Penguin (Non-Classics) 01-05-1998 03-02-2007 Diana St. Ruth 96 +book 9780060970796 0060970790 us The Man Who Mistook His Wife for a Hat: And Other Clinical Tales The Man Who Mistook His Wife for a Hat: And Other Clinical Tales Paperback Doctors & Medicine/ Self-Help & Psychology/ Clinical Psychology/ Neuropsychology/ Mental Illness $13.00 $0.95 4.5 In his most extraordinary book, "one of the great clinical writers of the 20th century"(The New York Times) recounts the case histories of patients lost in the bizarre, apparently inescapable world of neurological disorders. Oliver Sacks's The Man Who Mistook His Wife for a Hat tells the stories of individuals afflicted with fantastic perceptual and intellectual aberrations: patients who have lost their memories and with them the greater part of their pasts; who are no longer able to recognize people and common objects; who are stricken with violent tics and grimaces or who shout involuntary obscenities; whose limbs have become alien; who have been dismissed as retarded yet are gifted with uncanny artistic or mathematical talents./ If inconceivably strange, these brilliant tales remain, in Dr. Sacks's splendid and sympathetic telling, deeply human. They are studies of life struggling against incredible adversity, and they enable us to enter the world of the neurologically impaired, to imagine with our hearts what it must be to live and feel as they do. A great healer, Sacks never loses sight of medicine's ultimate responsibility: "the suffering, afflicted, fighting human subject."/ / Harpercollins 01-02-1987 11-02-2007 Oliver W. Sacks 256 +book 9781590591253 1590591259 us Enterprise Java Development on a Budget: Leveraging Java Open Source Technologies Enterprise Java Development on a Budget: Leveraging Java Open Source Technologies Paperback Software Development/ Logic $49.99 $7.43 4.5 Open source has had a profound effect on the Java community. Many Java open source projects have even become de-facto standards. The principal purpose of Enterprise Java Development on a Budget is to guide you through the development of a real enterprise Java application using nothing but open source Java tools, projects, and frameworks./ This book is organized by activities and by particular open source projects that can help you take on the challenges of building the different tiers of your applications. The authors also present a realistic example application that covers most areas of enterprise application development. You'll find information on how to use and configure JBoss, Ant, XDoclet, Struts, ArgoUML, OJB, Hibernate, JUnit, SWT//JFace, and others. Not only will you learn how to use each individual tool, but you'll also understand how to use them in synergy to create robust enterprise Java applications within your budget./ Enterprise Java Development on a Budget combines coverage of best practices with information on the right open source Java tools and technologies, all of which will help support your Java development budget and goals./ Apress 10-11-2003 08-02-2007 5 1 Brian Sam-Bodden/ Christopher M. Judd 656 +book 9780976694069 0976694069 us Enterprise Integration with Ruby Enterprise Integration with Ruby Paperback/ Illustrated Assembly Language Programming/ Object-Oriented Design $32.95 $15.50 4.5 Typical enterprises use dozens, hundreds, and sometimes even thousands of applications, components, services, and databases. Many of them are custom built in-house or by third parties, some are bought, others are based on open source projects, and the origin of a few--usually the most critical ones--is completely unknown./ A lot of applications are very old, some are fairly new, and seemingly no two of them were written using the same tools. They run on heterogeneous operating systems and hardware, use databases and messaging systems from various vendors, and were written in different programming languages./ See how to glue these disparate applications together using popular technologies such as:/ • LDAP, Oracle, and MySQL/ • XML Documents and DTDs/ • Sockets, HTTP, and REST/ • XML//RPC, SOAP, and others/ • ...and more./ If you're on the hook to integrate enterprise-class systems together, the tips and techniques in this book will help./ Pragmatic Bookshelf 01-04-2006 08-02-2007 Maik Schmidt 330 +book 0639785413059 0071419837 us Teach Yourself Beginner's Chinese Script Teach Yourself Beginner's Chinese Script Paperback Chinese/ Phrasebooks - General/ Alphabet/ Reading Skills/ Writing Skills $10.95 $4.50 3 Now learning non-Roman-alphabet languages is as easy as A-B-C!/ Readers wanting to learn the basics of reading and writing a new language that employs script will find all they need in the Teach Yourself Beginner's Script series. Each book includes a step-by-step introduction to reading and writing in a new language as well as tips and practice exercises to build learners' skills. Thanks to the experts at Teach Yourself, script will no longer be all "Greek" to language learners--unless of course, it is Greek script! Teach Yourself Beginner's Script series books feature:/ • Origins of the language/ • A systematic approach to mastering the script/ • Lots of "hands-on" exercises and activities/ • Practical examples from real-life situations/ McGraw-Hill 06-06-2003 11-02-2007 Elizabeth Scurfield/ Song Lianyi 192 +book 9781590302835 1590302834 us Zen Training: Methods and Philosophy (Shambhala Classics) Zen Training: Methods and Philosophy (Shambhala Classics) Paperback Zen/ Meditation/ Buddha $16.95 $9.95 5 Zen Training is a comprehensive handbook for zazen, seated meditation practice, and an authoritative presentation of the Zen path. The book marked a turning point in Zen literature in its critical reevaluation of the enlightenment experience, which the author believes has often been emphasized at the expense of other important aspects of Zen training. In addition, Zen Training goes beyond the first flashes of enlightenment to explore how one lives as well as trains in Zen. The author also draws many significant parallels between Zen and Western philosophy and psychology, comparing traditional Zen concepts with the theories of being and cognition of such thinkers as Heidegger and Husserl. Shambhala 13-09-2005 06-02-2007 Katsuki Sekida 264 +book 9781583941454 1583941452 us Combat Techniques of Taiji, Xingyi, and Bagua: Principles and Practices of Internal Martial Arts Combat Techniques of Taiji, Xingyi, and Bagua: Principles and Practices of Internal Martial Arts Paperback Taichi $22.95 $13.20 3.5 The combat techniques of Tai Ji, Ba Gua, and Xing Yi were forbidden during China's Cultural Revolution, but the teachings of grandmaster Wang Pei Shing have survived. This comprehensive guide, written by one of his students, selects core movements from each practice and gives the student powerful tools to recognize the unique strategies and skills, and to develop a deeper understanding, of each style. It contains complete instructions for a 16-posture form to gain mastery of combat techniques. The book helps practitioners achieve a new level of practice, where deeply ingrained skills are brought forth in a more fluid, intuitive, and fast-paced fashion. Blue Snake Books//Frog, Ltd. 09-02-2006 11-02-2007 Lu Shengli 400 +book 0752063324547 0672324547 us HTTP Developer's Handbook HTTP Developer's Handbook Paperback HTML - General $39.99 $24.97 4.5 HTTP is the protocol that powers the Web. As Web applications become more sophisticated, and as emerging technologies continue to rely heavily on HTTP, understanding this protocol is becoming more and more essential for professional Web developers. By learning HTTP protocol, Web developers gain a deeper understanding of the Web's architecture and can create even better Web applications that are more reliable, faster, and more secure./ The HTTP Developer's Handbook is written specifically for Web developers. It begins by introducing the protocol and explaining it in a straightforward manner. It then illustrates how to leverage this information to improve applications. Extensive information and examples are given covering a wide variety of issues, such as state and session management, caching, SSL, software architecture, and application security./ Sams 19-03-2003 07-02-2007 Chris Shiflett 312 +book 9780517887943 0517887940 us Feng Shui Step by Step : Arranging Your Home for Health and Happiness--with Personalized Astrological Charts Feng Shui Step by Step : Arranging Your Home for Health and Happiness--with Personalized Astrological Charts Paperback Household Hints/ Psychology & Counseling/ Parapsychology/ Feng Shui $20.00 $0.01 4 Simons, a feng shui master and astrologer, teaches readers how to feng shui their homes in a clear, step-by-step fashion and gives personalized advice based on readers' dates of birth. Simons presents not only the popular eight-point method but also divining techniques and other authentic Chinese methods that make analysis more complete. Illustrations. Three Rivers Press 12-11-1996 07-02-2007 T. Raphael Simons 256 +book 9780380788620 0380788624 us Cryptonomicon Cryptonomicon Paperback United States/ Contemporary/ Literary/ Historical/ Spy Stories & Tales of Intrigue/ Technothrillers/ High Tech/ Paperback/ Action & Adventure $16.00 $1.95 4 Neal Stephenson enjoys cult status among science fiction fans and techie types thanks to Snow Crash, which so completely redefined conventional notions of the high-tech future that it became a self-fulfilling prophecy. But if his cyberpunk classic was big, Cryptonomicon is huge... gargantuan... massive, not just in size (a hefty 918 pages including appendices) but in scope and appeal. It's the hip, readable heir to Gravity's Rainbow and the Illuminatus trilogy. And it's only the first of a proposed series--for more information, read our interview with Stephenson./ Cryptonomicon zooms all over the world, careening conspiratorially back and forth between two time periods--World War II and the present. Our 1940s heroes are the brilliant mathematician Lawrence Waterhouse, cryptanalyst extraordinaire, and gung ho, morphine-addicted marine Bobby Shaftoe. They're part of Detachment 2702, an Allied group trying to break Axis communication codes while simultaneously preventing the enemy from figuring out that their codes have been broken. Their job boils down to layer upon layer of deception. Dr. Alan Turing is also a member of 2702, and he explains the unit's strange workings to Waterhouse. "When we want to sink a convoy, we send out an observation plane first.... Of course, to observe is not its real duty--we already know exactly where the convoy is. Its real duty is to be observed.... Then, when we come round and sink them, the Germans will not find it suspicious."/ All of this secrecy resonates in the present-day story line, in which the grandchildren of the WWII heroes--inimitable programming geek Randy Waterhouse and the lovely and powerful Amy Shaftoe--team up to help create an offshore data haven in Southeast Asia and maybe uncover some gold once destined for Nazi coffers. To top off the paranoiac tone of the book, the mysterious Enoch Root, key member of Detachment 2702 and the Societas Eruditorum, pops up with an unbreakable encryption scheme left over from WWII to befuddle the 1990s protagonists with conspiratorial ties./ Cryptonomicon is vintage Stephenson from start to finish: short on plot, but long on detail so precise it's exhausting. Every page has a math problem, a quotable in-joke, an amazing idea, or a bit of sharp prose. Cryptonomicon is also packed with truly weird characters, funky tech, and crypto--all the crypto you'll ever need, in fact, not to mention all the computer jargon of the moment. A word to the wise: if you read this book in one sitting, you may die of information overload (and starvation). --Therese Littleton/ Harper Perennial 02-05-2000 07-02-2007 Neal Stephenson 928 +book 9780887100260 0887100260 us Fifty-Five T'ang Poems: A Text in the Reading and Understanding of T'Ang Poetry (Far Eastern Publications Series) Fifty-Five T'ang Poems: A Text in the Reading and Understanding of T'Ang Poetry (Far Eastern Publications Series) Paperback Movements & Periods/ Anthologies/ Chinese $26.00 $24.95 3 Four masters of the shi form of Chinese poetry, who are generally considered to be giants in the entire history of Chinese literature, are represented in this book: three from the eighth century, and one from the ninth. A few works by other well-known poets are also included. The author provides a general background sketch, instruction to the student, a description of the phonological system and the spelling used, as well as an outline of the grammar of T'ang shi, insofar as it is known. Yale University Press 15-08-2006 11-02-2007 Hugh M. Stimson 256 +book 9780865681859 0865681856 us Xing Yi Quan Xue: The Study of Form-Mind Boxing Xing Yi Quan Xue: The Study of Form-Mind Boxing Paperback $21.95 $15.22 4.5 This is the first English language edition of Sun Lu Tang's 1915 classic on xing yi (hsing yi). In addition to the original text and photographs by Sun Lu Tang, a complete biography and additional photos of Master Sun have been added. Unique Publications 06-02-2000 06-02-2007 Sun Lu Tang 312 +book 9780596003425 0596003420 us Learning Unix for Mac OS X Learning Unix for Mac OS X Paperback MacOS/ Macintosh/ Macs/ Linux/ X Windows & Motif $19.95 $0.01 3 The success of Apple's operating system, Mac OS X, and its Unix roots has brought many new potential Unix users searching for information. The Terminal application and that empty command line can be daunting at first, but users understand it can bring them power and flexibility. Learning Unix for Mac OS X is a concise introduction to just what a reader needs to know to get a started with Unix on Mac OS X. Many Mac users are familiar and comfortable with the easy-to-use elegance of the GUI. With Mac OS X, they now have the ability to not only continue to use their preferred platform, but to explore the powerful capabilities of Unix. Learning Unix for Mac OS X gives the reader information on how to use the Terminal application, become functional with the command interface, explore many Unix applications, and learn how to take advantage of the strengths of both interfaces./ The reader will find all the common commands simply explained with accompanying examples, exercises, and opportunities for experimentation. The book even includes problem checklists along the way to help the reader if they get stuck. The books begins with a introduction to the Unix environment to encourage the reader to get comfortable with the command line. The coverage then expands to launching and configuring the Terminal application--the heart of the Unix interface for the Mac OS X user. The text also introduces how to manage, create, edit, and transfer files. Most everyone using a computer today knows the importance of the internet. And Learning Unix for Mac OS X provides instruction on how to use function such as mail, chat, and web browsing from the command line. A unique challenge for Mac OS X users is printing from the command line. The book contains an entire chapter on how to configure and utilize the various print functions./ The book has been reviewed by Apple for technological accuracy and brandishes the Apple Development Connection (ADC) logo./ O'Reilly 05-02-2002 07-02-2007 Dave Taylor/ Jerry Peek 156 +book 9780974514055 0974514055 us Programming Ruby: The Pragmatic Programmers' Guide, Second Edition Programming Ruby: The Pragmatic Programmers' Guide, Second Edition Paperback/ Illustrated Object-Oriented Design/ Qualifying Textbooks - Winter 2007 $44.95 $20.53 4.5 Ruby is an increasingly popular, fully object-oriented dynamic programming language, hailed by many practitioners as the finest and most useful language available today. When Ruby first burst onto the scene in the Western world, the Pragmatic Programmers were there with the definitive reference manual, Programming Ruby: The Pragmatic Programmer's Guide. Now in its second edition, author Dave Thomas has expanded the famous Pickaxe book with over 200 pages of new content, covering all the new and improved language features of Ruby 1.8 and standard library modules. The Pickaxe contains four major sections:/ • An acclaimed tutorial on using Ruby./ • The definitive reference to the language./ • Complete documentation on all built-in classes, modules, and methods/ • Complete descriptions of all 98 standard libraries./ If you enjoyed the First Edition, you'll appreciate the new and expanded content, including: enhanced coverage of installation, packaging, documenting Ruby source code, threading and synchronization, and enhancing Ruby's capabilities using C-language extensions. Programming for the world-wide web is easy in Ruby, with new chapters on XML//RPC, SOAP, distributed Ruby, templating systems and other web services. There's even a new chapter on unit testing. This is the definitive reference manual for Ruby, including a description of all the standard library modules, a complete reference to all built-in classes and modules (including more than 250 significant changes since the First Edition). Coverage of other features has grown tremendously, including details on how to harness the sophisticated capabilities of irb, so you can dynamically examine and experiment with your running code. "Ruby is a wonderfully powerful and useful language, and whenever I'm working with it this book is at my side" --Martin Fowler, Chief Scientist, ThoughtWorks Pragmatic Bookshelf 01-10-2004 07-02-2007 Dave Thomas/ Chad Fowler/ Andy Hunt 828 +book 9780974514000 0974514004 us Pragmatic Version Control Using CVS Pragmatic Version Control Using CVS Paperback/ Illustrated Software Development/ Information Systems/ Information Theory $29.95 $14.35 4 This book is a recipe-based approach to using the CVS Version Control system that will get you up and running quickly--and correctly. All projects need version control: it's a foundational piece of any project's infrastructure. Yet half of all project teams in the U.S. don't use any version control at all. Many others don't use it well, and end up experiencing time-consuming problems. Version Control, done well, is your "undo" button for the project: nothing is final, and mistakes are easily rolled back. With version control, you'll never again lose a good idea because someone overwrote your file edits. You can always find out easily who made what changes to the source code--and why. Version control is a project-wide time machine. Dial in a date and see exactly what the entire project looked like yesterday, last Tuesday, or even last year. This book describes a practical, easy-to-follow way of using CVS, the most commonly used version control system in the world (and it's available for free). Instead of presenting the grand Theory of Version Control and describing every possible option (whether you'd ever use it or not), this book focuses on the practical application of CVS. It builds a set of examples of use that parallel the life of typical projects, showing you how to adopt and then enhance your pragmatic use of CVS. With this book, you can:/ • Keep project all assets (not just source code) safe, and never run the risk of losing a great idea/ • Know how to undo bad decisions--no matter when they were made/ • Learn how to share code safely, and work in parallel for maximum efficiency/ • See how to avoid costly code freezes/ • Manage 3rd party code/ Now there's no excuse not to use professional-grade version control. The Pragmatic Programmers 09-02-2003 07-02-2007 David Thomas/ Andrew Hunt 161 +book 7805961006053 0596100604 us Astronomy Hacks Astronomy Hacks Paperback/ Illustrated Astronomy/ Star-Gazing $24.95 $13.93 5 Why use the traditional approach to study the stars when you can turn computers, handheld devices, and telescopes into out-of-this-world stargazing tools? Whether you're a first timer or an advanced hobbyist, you'll find Astronomy Hacks both useful and fun. From upgrading your optical finder to photographing stars, this book is the perfect cosmic companion. This handy field guide covers the basics of observing, and what you need to know about tweaking, tuning, adjusting, and tricking out a 'scope. Expect priceless tips and tools for using a Dobsonian Telescope, the large-aperture telescope you can inexpensively build in your garage. Get advice on protocols involved with using electronics including in dark places without ruining the party. Astronomy Hacks begins the space exploration by getting you set up with the right equipment for observing and admiring the stars in an urban setting. Along for the trip are first rate tips for making most of observations. The hacks show you how to:/ • Dark-Adapt Your Notebook Computer/ • Choose the Best Binocular/ • Clean Your Eyepieces and Lenses Safely/ • Upgrade Your Optical Finder/ • Photograph the Stars with Basic Equipment/ The O'Reilly Hacks series has reclaimed the term "hacking" to mean innovating, unearthing, and creating shortcuts, gizmos, and gears. With these hacks, you don't dream it-you do it--and Astronomy Hacks brings space dreams to life. The book is essential for anyone who wants to get the most out of an evening under the stars and have memorable celestial adventures. O'Reilly Media 17-06-2005 08-02-2007 Robert Bruce Thompson/ Barbara Fritchman Thompson 388 +book 9780877736752 0877736758 us The Tibetan Book of the Dead (Shambala Pocket Classics) The Tibetan Book of the Dead (Shambala Pocket Classics) Paperback Motivational/ Book of the Dead (Tibetan)/ Mysticism/ Eastern Philosophy/ Death/ Rituals & Practice $7.00 $2.56 4.5 In this classic scripture of Tibetan Buddhism—traditionally read aloud to the dying to help them attain liberation—death and rebirth are seen as a process that provides an opportunity to recognize the true nature of mind. This unabridged translation of The Tibetan Book of the Dead emphasizes the practical advice that the book offers to the living. The insightful commentary by Chögyam Trungpa, written in clear, concise language, explains what the text teaches us about human psychology. This book will be of interest to people concerned with death and dying, as well as those who seek greater spiritual understanding in everyday life. Shambhala 13-10-1992 11-02-2007 Chogyam Trungpa 236 +book 9781592400874 1592400876 us Eats, Shoots & Leaves: The Zero Tolerance Approach to Punctuation Eats, Shoots & Leaves: The Zero Tolerance Approach to Punctuation Hardcover Essays/ Grammar/ Reference/ Writing Skills $19.95 $1.84 4 A bona fide publishing phenomenon, Lynne Truss's now classic #1 New York Times bestseller Eats, Shoots & Leaves makes its paperback debut after selling over 3 million copies worldwide in hardcover./ We all know the basics of punctuation. Or do we? A look at most neighborhood signage tells a different story. Through sloppy usage and low standards on the Internet, in e-mail, and now text messages, we have made proper punctuation an endangered species./ In Eats, Shoots & Leaves, former editor Truss dares to say, in her delightfully urbane, witty, and very English way, that it is time to look at our commas and semicolons and see them as the wonderful and necessary things they are. This is a book for people who love punctuation and get upset when it is mishandled. From the invention of the question mark in the time of Charlemagne to George Orwell shunning the semicolon, this lively history makes a powerful case for the preservation of a system of printing conventions that is much too subtle to be mucked about with. BACKCOVER: Praise for Lynne Truss and Eats, Shoots & Leaves:

Eats, Shoots & Leaves“makes correct usage so cool that you have to admire Ms. Truss.”
—Janet Maslin, The New York Times

“Witty, smart, passionate.”
—Los Angeles Times Book Review, Best Books Of 2004: Nonfiction 

“Who knew grammar could be so much fun?”
—Newsweek

“Witty and instructive. . . . Truss is an entertaining, well-read scold in a culture that could use more scolding.”
—USA Today“Truss is William Safire crossed with John Cleese's Basil Fawlty.”
—Entertainment Weekly

“Lynne Truss has done the English-speaking world a huge service.”
—The Christian Science Monitor

“This book changed my life in small, perfect ways like learning how to make better coffee or fold an omelet. It's the perfect gift for anyone who cares about grammar and a gentle introduction for those who don't care enough.”
—The Boston Sunday Globe

“Lynne Truss makes [punctuation] a joy to contemplate.”
—Elle

“If Lynne Truss were Roman Catholic I'd nominate her for sainthood.” —Frank McCourt, author of Angela's Ashes

“Truss's scholarship is impressive and never dry.”
—Edmund Morris, The New York Times Book Review/ Gotham 12-04-2004 03-02-2007 Lynne Truss 209 +book 9780679724346 0679724346 us Tao Te Ching [Text Only] Tao Te Ching [Text Only] Paperback Taoism/ Tao Te Ching/ Chinese/ Bible & Other Sacred Texts $9.95 $2.88 4.5 Available for the first time in a handy, easy-to-use size, here is the most accessible and authoritative modern English translation of the ancient Chinese classic. This new Vintage edition includes an introduction and notes by the well-known writer and scholar of philosophy and comparative religion, Jacob Needleman. Vintage 28-08-1989 11-02-2007 Lao Tsu/ Jane English/ Jacob Needleman 144 +book 9780844285269 0844285269 us Easy Chinese Phrasebook & Dictionary Easy Chinese Phrasebook & Dictionary Paperback English (All)/ English (British)/ Chinese/ Conversation/ Phrasebooks - General/ Southeast Asian/ Linguistics $12.95 $3.00 4.5 Two books in one—a practical phrasebook of essential Chinese vocabulary and expressions, plus a 3,500 English//Chinese dictionary. McGraw-Hill 07-12-1990 11-02-2007 Yeou-Koung Tung 272 +book 9780877738510 0877738513 us Art of Peace (Shambhala Pocket Classics) Art of Peace (Shambhala Pocket Classics) Paperback New Age/ Eastern Philosophy/ Aikido/ Martial Arts $6.95 $2.59 4.5 These inspirational teachings show that the real way of the warrior is based on compassion, wisdom, fearlessness, and love of nature. Drawn from the talks and writings of Morihei Ueshiba, founder of the popular Japanese martial art known as Aikido, The Art of Peace, presented here in a pocket-sized edition, offers a nonviolent way to victory and a convincing counterpoint to such classics as Musashi's Book of Five Rings and Sun Tzu's Art of War. Shambhala 10-11-1992 11-02-2007 Morihei Ueshiba/ John Stevens 126 +book 9780912111476 091211147X us Learn to Read Chinese: An Introduction to the Language and Concepts of Current Zhongyi Literature (Learn to Read Chinese) Learn to Read Chinese: An Introduction to the Language and Concepts of Current Zhongyi Literature (Learn to Read Chinese) Paperback Acupuncture & Acupressure/ Chinese/ Southeast Asian/ Linguistics/ Chinese Medicine $30.00 $17.50 These two volumes teach the language of contemporary Chinese technical literature. The subject matter is Chinese medicine, making these texts ideal for those who wish to learn Chinese from real-world sources. All 128 of the texts chosen are excerpted from the introduction to Chinese medicine written by Qin Bowei, one of the founders of TCM and a medical writer known for his clear, precise and detailed clinical expression. The work is thus a superb supplement for students of Chinese and an effective course of study for clinicians or scholars who wish to read Chinese technical periodicals, papers and texts./ The first volume teaches vocabulary. Each text is an exercise; readers transliterate, then translate a passage based on the simplified character vocabulary provided with each passage and its preceding passages. A completed transliteration in Pinyin and a finished English translation accompany the Chinese. The subject matter provides an exposure to authentic contemporary discussions of the fundamental principles of Chinese medicine./ The second volume teaches analysis of Chinese texts through the principles of Natural Language development. By showing how to identify the basic statement in a sentence and the adjunct statements that complete its meaning, just as children learn to read their native language, the reader is given access to Chinese texts as quickly as is possible. When the course is completed, users are working with typical modern Chinese medical sources./ Paradigm Publications (MA) 09-02-1994 11-02-2007 Paul U. Unschuld 2 +book 9780521777681 0521777682 us The Elements of Java Style The Elements of Java Style Paperback Object-Oriented Design/ Software Development/ Computers & Internet $14.99 $2.98 4 The Elements of Java Style, written by renowned author Scott Ambler, Alan Vermeulen, and a team of programmers from Rogue Wave Software, is directed at anyone who writes Java code. Many books explain the syntax and basic use of Java; however, this essential guide explains not only what you can do with the syntax, but what you ought to do. Just as Strunk and White's The Elements of Style provides rules of usage for the English language, this text furnishes a set of rules for Java practitioners. While illustrating these rules with parallel examples of correct and incorrect usage, the authors offer a collection of standards, conventions, and guidelines for writing solid Java code that will be easy to understand, maintain, and enhance. Java developers and programmers who read this book will write better Java code, and become more productive as well. Indeed, anyone who writes Java code or plans to learn how to write Java code should have this book next to his//her computer. Cambridge University Press 01-02-2000 07-02-2007 Allan Vermeulen/ Scott W. Ambler/ Greg Bumgardner/ Eldon Metz/ Trevor Misfeldt/ Jim Shur/ Alan Vermeulen/ Patrick Thompson 125 +book 0723812607006 0471463620 us Java Open Source Programming: with XDoclet, JUnit, WebWork, Hibernate Java Open Source Programming: with XDoclet, JUnit, WebWork, Hibernate Paperback Software Development/ Computers & Internet $45.00 $1.65 4 The Java language itself is not strictly open-source (Sun has held onto control, albeit with lots of public input). There is, however, a large open-source development community around this highly capable language. Java Open Source Programming describes and provides tutorials on some of the most interesting public Java projects, and is designed to enable a Java programmer (who's worked through the basic language's initial learning curve) to take on more ambitious assignments. The authors generally treat the covered open-source packages as resources to be used, rather than projects to be contributed to, and so it's fair to think of this volume as the "missing manual" for downloaded code. In that spirit, the authors devote many sections to "how to" subjects (addressing, for example, a good way to retrieve stored objects from a database and the procedure for calling an action in XWork)./ Java Open Source Programming takes a bit of a risk by devoting a lot of space to the development of a complex application (an online pet shop), as such a didactic strategy can be hard to follow. The authors pull it off, though, and manage to show that their covered technologies can be used to create a feature-rich and robust application that uses the versatile model-view-controller (MVC) pattern. This book will suit you well if you're planning an MVC Java project and want to take advantage of open-source packages. --David Wall/ Topics covered: The most popular open-source Java packages, particularly those concerned with Web applications and the model-view-controller (MVC) pattern. Specific packages covered include JUnit and Mocks (code testing), Hibernate (persistent storage of objects in databases), WebWork (MVC), SiteMesh (Web page layout), Lucene (site searching), and WebDoclet (configuration file generation)./ Wiley 28-11-2003 07-02-2007 Joseph Walnes/ Ara Abrahamian/ Mike Cannon-Brookes/ Patrick A. Lightbody 480 +book 9780201082593 0201082594 us Artificial Intelligence Edition (Addison-Wesley series in computer science) Artificial Intelligence Edition (Addison-Wesley series in computer science) Paperback Questions & Answers/ Information Systems $32.61 $0.01 3.5 This book is one of the oldest and most popular introductions to artificial intelligence. An accomplished artificial intelligence (AI) scientist, Winston heads MIT's Artificial Intelligence Laboratory, and his hands-on AI research experience lends authority to what he writes. Winston provides detailed pseudo-code for most of the algorithms discussed, so you will be able to implement and test the algorithms immediately. The book contains exercises to test your knowledge of the subject and helpful introductions and summaries to guide you through the material. Addison-wesley 04-02-1984 08-02-2007 Patrick Henry Winston 527 +book 9781558605701 1558605703 us Managing Gigabytes: Compressing and Indexing Documents and Images (The Morgan Kaufmann Series in Multimedia and Information Systems) Managing Gigabytes: Compressing and Indexing Documents and Images (The Morgan Kaufmann Series in Multimedia and Information Systems) Hardcover Storage/ Compression/ Software Development/ Software Project Management/ Electronic Documents/ General & Reference/ Engineering/ Peripherals/ Digital Image Processing/ Qualifying Textbooks - Winter 2007 $74.95 $49.98 4.5 Of all the tasks programmers are asked to perform, storing, compressing, and retrieving information are some of the most challenging--and critical to many applications. Managing Gigabytes: Compressing and Indexing Documents and Images is a treasure trove of theory, practical illustration, and general discussion in this fascinating technical subject./ Ian Witten, Alistair Moffat, and Timothy Bell have updated their original work with this even more impressive second edition. This version adds recent techniques such as block-sorting, new indexing techniques, new lossless compression strategies, and many other elements to the mix. In short, this work is a comprehensive summary of text and image compression, indexing, and querying techniques. The history of relevant algorithm development is woven well with a practical discussion of challenges, pitfalls, and specific solutions./ This title is a textbook-style exposition on the topic, with its information organized very clearly into topics such as compression, indexing, and so forth. In addition to diagrams and example text transformations, the authors use "pseudo-code" to present algorithms in a language-independent manner wherever possible. They also supplement the reading with mg--their own implementation of the techniques. The mg C language source code is freely available on the Web./ Alone, this book is an impressive collection of information. Nevertheless, the authors list numerous titles for further reading in selected topics. Whether you're in the midst of application development and need solutions fast or are merely curious about how top-notch information management is done, this hardcover is an excellent investment. --Stephen W. Plain/ Topics covered: Text compression models, including Huffman, LZW, and their variants; trends in information management; index creation and compression; image compression; performance issues; and overall system implementation./ Morgan Kaufmann 15-05-1999 07-02-2007 Ian H. Witten/ Alistair Moffat/ Timothy C. Bell 519 +book 9780596101190 0596101198 us Open Source for the Enterprise: Managing Risks Reaping Rewards Open Source for the Enterprise: Managing Risks Reaping Rewards Paperback/ Illustrated Technical Support/ Programming/ Risks/ Linux $22.95 $12.50 5 Open source software is changing the world of Information Technology. But making it work for your company is far more complicated than simply installing a copy of Linux. If you are serious about using open source to cut costs, accelerate development, and reduce vendor lock-in, you must institutionalize skills and create new ways of working. You must understand how open source is different from commercial software and what responsibilities and risks it brings. Open Source for the Enterprise is a sober guide to putting open source to work in the modern IT department./ Open source software is software whose code is freely available to anyone who wants to change and redistribute it. New commercial support services, smaller licensing fees, increased collaboration, and a friendlier platform to sell products and services are just a few of the reasons open source is so attractive to IT departments. Some of the open source projects that are in current, widespread use in businesses large and small include Linux, FreeBSD, Apache, MySQL, PostgreSQL, JBOSS, and Perl. These have been used to such great effect by Google, Amazon, Yahoo!, and major commercial and financial firms, that a wave of publicity has resulted in recent years, bordering on hype. Large vendors such as IBM, Novell, and Hewlett Packard have made open source a lynchpin of their offerings. Open source has entered a new area where it is being used as a marketing device, a collaborative software development methodology, and a business model./ This book provides something far more valuable than either the cheerleading or the fear-mongering one hears about open source. The authors are Dan Woods, former CTO of TheStreet.com and a consultant and author of several books about IT, and Gautam Guliani, Director of Software Architecture at Kaplan Test Prep & Admissions. Each has used open source software for some 15 years at IT departments large and small. They have collected the wisdom of a host of experts from IT departments, open source communities, and software companies./ Open Source for the Enterprise provides a top to bottom view not only of the technology, but of the skills required to manage it and the organizational issues that must be addressed. Here are the sorts of questions answered in the book:/ • Why is there a "productization gap" in most open source projects?/ • How can the maturity of open source be evaluated?/ • How can the ROI of open source be calculated?/ • What skills are needed to use open source?/ • What sorts of open source projects are appropriate for IT departments at the beginner, intermediate, advanced, and expert levels?/ • What questions need to be answered by an open source strategy?/ • What policies for governance can be instituted to control the adoption of open source?/ • What new commercial services can help manage the risks of open source?/ • Do differences in open source licenses matter?/ • How will using open source transform an IT department?/ Praise for Open Source for the Enterprise: "Open Source has become a strategic business issue; decisions on how and where to choose to use Open Source now have a major impact on the overall direction of IT abilities to support the business both with capabilities and by controlling costs. This is a new game and one generally not covered in existing books on Open Source which continue to assume that the readers are 'deep dive' technologists, Open Source for the Enterprise provides everyone from business managers to technologists with the balanced view that has been missing. Well worth the time to read, and also worth encouraging others in your enterprise to read as well." ----Andy Mulholland - Global CTO Capgemini/ "Open Source for the Enterprise is required reading for anyone working with or looking to adopt open source technologies in a corporate environment. Its practical, no-BS approach will make sure you're armed with the information you need to deploy applications successfully (as well as helping you know when to say "no"). If you're trying to sell open source to management, this book will give you the ammunition you need. If you're a manager trying to drive down cost using open source, this book will tell you what questions to ask your staff. In short, it's a clear, concise explanation of how to successfully leverage open source without making the big mistakes that can get you fired." ----Kevin Bedell - founding editor of LinuxWorld Magazine/ O'Reilly Media 27-07-2005 08-02-2007 Dan Woods/ Gautam Guliani 217 +movie 0786936259223 B00030590I us Hero Hero DVD China/ Leung Chiu Wai, Tony/ Jet Li $19.99 $5.49 4 Master filmmaker Quentin Tarantino presents HERO -- starring martial arts legend Jet Li in a visually stunning martial arts epic where a fearless warrior rises up to defy an empire and unite a nation! With supernatural skill ... and no fear ... a nameless soldier (Jet Li) embarks on a mission of revenge against the fearsome army that massacred his people. Now, to achieve the justice he seeks, he must take on the empire's most ruthless assassins and reach the enemy he has sworn to defeat! Acclaimed by critics and honored with numerous awards, HERO was nominated for both an Oscar® (2002 Best Foreign Language Film)and Golden Globe! Miramax 30-11-2004 07-02-2007 5 Yimou Zhang Jet Li/ Tony Leung Chiu Wai/ Maggie Cheung/ Ziyi Zhang/ Daoming Chen/ Donnie Yen/ Liu Zhong Yuan/ Zheng Tia Yong/ Yan Qin/ Chang Xiao Yang/ Zhang Ya Kun/ Ma Wen Hua/ Jin Ming/ Xu Kuang Hua/ Wang Shou Xin/ Hei Zi/ Cao Hua/ Li Lei/ Xia Bin/ Peng Qiang/ Zhang Yimou Closed-captioned/ Color/ Dolby/ Dubbed/ Subtitled/ Widescreen/ NTSC/ 2.35:1 PG-13 27-08-2004 99 +book 0752064712350 0735712352 us Cocoon: Building XML Applications Cocoon: Building XML Applications Paperback Software Development/ HTML - General/ XML/ Combinatorics/ jp-unknown1 $39.99 $2.30 4 Cocoon: Building XML Applications is the guide to the Apache Cocoon project. The book contains the much needed documentation on the Cocoon project, but it does not limit itself to just being a developer's handbook. The book motivates the use of XML and XML software (in particular open source software). It contains everything a beginner needs to get going with Cocoon as well as the detailed information a developer needs to develop new and exciting components to extend the XML publishing framework. Although each chapter builds upon the previous ones, the book is designed so that the chapters can also be read as individual guides to the topics they discuss. Varied "hands-on" examples are used to make the underlying concepts and technologies absolutely clear to anyone starting out with Cocoon. Chapters that detail the author's experience in building Internet applications are used to embed Cocoon into the "real world" and complete the picture. [md]Matthew Langham and Carsten Ziegeler Sams 24-07-2002 07-02-2007 Carsten Ziegeler/ Matthew Langham 504 \ No newline at end of file diff --git a/solr/client/ruby/solr-ruby/examples/marc/marc_importer.rb b/solr/client/ruby/solr-ruby/examples/marc/marc_importer.rb new file mode 100755 index 00000000000..f4f7376d6f8 --- /dev/null +++ b/solr/client/ruby/solr-ruby/examples/marc/marc_importer.rb @@ -0,0 +1,106 @@ +#!/usr/bin/env ruby +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'marc' +require 'solr' + +solr_url = ENV["SOLR_URL"] || "http://localhost:8983/solr" +marc_filename = ARGV[0] +file_number = marc_filename.scan(/\d\d/) +debug = ARGV[1] == "-debug" + +$KCODE = 'UTF8' + +mapping = { + # :solr_field_name => String + # :solr_field_name => Array of Strings + # :solr_field_name => Proc [Proc operates on record] + # String = 3 digit control field number or 3 digit data field number + subfield letter + + :id => '001', + :subject_genre_facet => ['600v', '610v', '611v', '650v', '651v', '655a'], + :subject_era_facet => ['650d', '650y', '651y', '655y'], + :subject_topic_facet => ['650a', '650b', '650x'], + :subject_geographic_facet => ['650c', '650z', '651a', '651x', '651z', '655z'], + :year_facet => Proc.new do |r| + extract_record_data(r,'260c').collect {|f| f.scan(/\d\d\d\d/)}.flatten + end, + :title_text => '245a', + :author_text => '100a', + :call_number_text => '050a', + :isbn_text => '010a', + :filename_facet => Proc.new {|r| file_number}, +} + +connection = Solr::Connection.new(solr_url) + +if marc_filename =~ /.gz$/ + puts "Unzipping data file..." + temp_filename = "/tmp/marc_data_#{file_number}.mrc" + system("cp #{marc_filename} #{temp_filename}.gz") + system("gunzip #{temp_filename}") + marc_filename = temp_filename +end + +reader = MARC::Reader.new(marc_filename) +count = 0 + +def extract_record_data(record, fields) + extracted_data = [] + + fields.each do |field| + tag = field[0,3] + + extracted_fields = record.find_all {|f| f.tag === tag} + + extracted_fields.each do |field_instance| + if tag < '010' # control field + extracted_data << field_instance.value rescue nil + else # data field + subfield = field[3].chr + extracted_data << field_instance[subfield] rescue nil + end + end + end + + extracted_data.compact.uniq +end + +puts "Indexing #{marc_filename}..." +for record in reader + doc = {} + mapping.each do |key,value| + data = nil + case value + when Proc + data = value.call(record) + + when String, Array + data = extract_record_data(record, value) + data = nil if data.empty? + end + + doc[key] = data if data + end + + puts doc.inspect,"------" if debug + + connection.send(Solr::Request::AddDocument.new(doc)) unless debug + + count += 1 + + puts count if count % 100 == 0 +end + +connection.send(Solr::Request::Commit.new) unless debug +puts "Done" diff --git a/solr/client/ruby/solr-ruby/examples/tang/tang_importer.rb b/solr/client/ruby/solr-ruby/examples/tang/tang_importer.rb new file mode 100755 index 00000000000..eb22d5b0232 --- /dev/null +++ b/solr/client/ruby/solr-ruby/examples/tang/tang_importer.rb @@ -0,0 +1,58 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'hpricot' +require 'solr' + +solr_url = ENV["SOLR_URL"] || "http://localhost:8983/solr" +debug = ARGV[1] == "-debug" + +solr = Solr::Connection.new(solr_url) + +html = Hpricot(open(ARGV[0])) +max = 320 + +def next_blockquote(elem) + elem = elem.next_sibling + until elem.name == "blockquote" do + elem = elem.next_sibling + end + + elem +end + +for current_index in (1..max) do + section_start = html.at("//blockquote[text()='#{format('%03d',current_index)}']") + type_zh = next_blockquote(section_start) + author_zh = next_blockquote(type_zh) + title_zh = next_blockquote(author_zh) + body_zh = next_blockquote(title_zh) + + type_en = next_blockquote(body_zh) + author_en = next_blockquote(type_en) + title_en = next_blockquote(author_en) + body_en = next_blockquote(title_en) + doc = {:type_zh_facet => type_zh, :author_zh_facet => author_zh, :title_zh_text => title_zh, :body_zh_text => body_zh, + :type_en_facet => type_en, :author_en_facet => author_en, :title_en_text => title_en, :body_en_text => body_en + } + doc.each {|k,v| doc[k] = v.inner_text} + doc[:id] = current_index # TODO: namespace the id, something like "etext_tang:#{current_index}" + doc[:source_facet] = 'etext_tang' + doc[:language_facet] = ['chi','eng'] + + puts "----",doc[:id],doc[:title_en_text],doc[:author_en_facet],doc[:type_en_facet] +# puts doc.inspect if debug + solr.add doc unless debug +end + +solr.commit unless debug +#solr.optimize unless debug \ No newline at end of file diff --git a/solr/client/ruby/solr-ruby/lib/solr.rb b/solr/client/ruby/solr-ruby/lib/solr.rb new file mode 100755 index 00000000000..aea686fffea --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr.rb @@ -0,0 +1,21 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +module Solr; end +require 'solr/exception' +require 'solr/request' +require 'solr/connection' +require 'solr/response' +require 'solr/util' +require 'solr/xml' +require 'solr/importer' +require 'solr/indexer' diff --git a/solr/client/ruby/solr-ruby/lib/solr/connection.rb b/solr/client/ruby/solr-ruby/lib/solr/connection.rb new file mode 100755 index 00000000000..6e1aadcd1ea --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/connection.rb @@ -0,0 +1,179 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'net/http' + +# TODO: add a convenience method to POST a Solr .xml file, like Solr's example post.sh + +class Solr::Connection + attr_reader :url, :autocommit, :connection + + # create a connection to a solr instance using the url for the solr + # application context: + # + # conn = Solr::Connection.new("http://example.com:8080/solr") + # + # if you would prefer to have all adds/updates autocommitted, + # use :autocommit => :on + # + # conn = Solr::Connection.new('http://example.com:8080/solr', + # :autocommit => :on) + + def initialize(url="http://localhost:8983/solr", opts={}) + @url = URI.parse(url) + unless @url.kind_of? URI::HTTP + raise "invalid http url: #{url}" + end + + # TODO: Autocommit seems nice at one level, but it currently is confusing because + # only calls to Connection#add/#update/#delete, though a Connection#send(AddDocument.new(...)) + # does not autocommit. Maybe #send should check for the request types that require a commit and + # commit in #send instead of the individual methods? + @autocommit = opts[:autocommit] == :on + + # Not actually opening the connection yet, just setting up the persistent connection. + @connection = Net::HTTP.new(@url.host, @url.port) + + @connection.read_timeout = opts[:timeout] if opts[:timeout] + end + + # add a document to the index. you can pass in either a hash + # + # conn.add(:id => 123, :title => 'Tlon, Uqbar, Orbis Tertius') + # + # or a Solr::Document + # + # conn.add(Solr::Document.new(:id => 123, :title = 'On Writing') + # + # true/false will be returned to designate success/failure + + def add(doc) + request = Solr::Request::AddDocument.new(doc) + response = send(request) + commit if @autocommit + return response.ok? + end + + # update a document in the index (really just an alias to add) + + def update(doc) + return add(doc) + end + + # performs a standard query and returns a Solr::Response::Standard + # + # response = conn.query('borges') + # + # alternative you can pass in a block and iterate over hits + # + # conn.query('borges') do |hit| + # puts hit + # end + # + # options include: + # + # :sort, :default_field, :rows, :filter_queries, :debug_query, + # :explain_other, :facets, :highlighting, :mlt, + # :operator => :or / :and + # :start => defaults to 0 + # :field_list => array, defaults to ["*", "score"] + + def query(query, options={}, &action) + # TODO: Shouldn't this return an exception if the Solr status is not ok? (rather than true/false). + create_and_send_query(Solr::Request::Standard, options.update(:query => query), &action) + end + + # performs a dismax search and returns a Solr::Response::Standard + # + # response = conn.search('borges') + # + # options are same as query, but also include: + # + # :tie_breaker, :query_fields, :minimum_match, :phrase_fields, + # :phrase_slop, :boost_query, :boost_functions + + def search(query, options={}, &action) + create_and_send_query(Solr::Request::Dismax, options.update(:query => query), &action) + end + + # sends a commit message to the server + def commit(options={}) + response = send(Solr::Request::Commit.new(options)) + return response.ok? + end + + # sends an optimize message to the server + def optimize + response = send(Solr::Request::Optimize.new) + return response.ok? + end + + # pings the connection and returns true/false if it is alive or not + def ping + begin + response = send(Solr::Request::Ping.new) + return response.ok? + rescue + return false + end + end + + # delete a document from the index using the document id + def delete(document_id) + response = send(Solr::Request::Delete.new(:id => document_id)) + commit if @autocommit + response.ok? + end + + # delete using a query + def delete_by_query(query) + response = send(Solr::Request::Delete.new(:query => query)) + commit if @autocommit + response.ok? + end + + def info + send(Solr::Request::IndexInfo.new) + end + + # send a given Solr::Request and return a RubyResponse or XmlResponse + # depending on the type of request + def send(request) + data = post(request) + Solr::Response::Base.make_response(request, data) + end + + # send the http post request to solr; for convenience there are shortcuts + # to some requests: add(), query(), commit(), delete() or send() + def post(request) + response = @connection.post(@url.path + "/" + request.handler, + request.to_s, + { "Content-Type" => request.content_type }) + + case response + when Net::HTTPSuccess then response.body + else + response.error! + end + + end + +private + + def create_and_send_query(klass, options = {}, &action) + request = klass.new(options) + response = send(request) + return response unless action + response.each {|hit| action.call(hit)} + end + +end diff --git a/solr/client/ruby/solr-ruby/lib/solr/document.rb b/solr/client/ruby/solr-ruby/lib/solr/document.rb new file mode 100644 index 00000000000..ba03efd6b0d --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/document.rb @@ -0,0 +1,73 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'solr/xml' +require 'solr/field' + +class Solr::Document + include Enumerable + attr_accessor :boost + + # Create a new Solr::Document, optionally passing in a hash of + # key/value pairs for the fields + # + # doc = Solr::Document.new(:creator => 'Jorge Luis Borges') + def initialize(hash={}) + @fields = [] + self << hash + end + + # Append a Solr::Field + # + # doc << Solr::Field.new(:creator => 'Jorge Luis Borges') + # + # If you are truly lazy you can simply pass in a hash: + # + # doc << {:creator => 'Jorge Luis Borges'} + def <<(fields) + case fields + when Hash + fields.each_pair do |name,value| + if value.respond_to?(:each) && !value.is_a?(String) + value.each {|v| @fields << Solr::Field.new(name => v)} + else + @fields << Solr::Field.new(name => value) + end + end + when Solr::Field + @fields << fields + else + raise "must pass in Solr::Field or Hash" + end + end + + # shorthand to allow hash lookups + # doc['name'] + def [](name) + field = @fields.find {|f| f.name == name.to_s} + return field.value if field + return nil + end + + # shorthand to assign as a hash + def []=(name,value) + @fields << Solr::Field.new(name => value) + end + + # convert the Document to a REXML::Element + def to_xml + e = Solr::XML::Element.new 'doc' + e.attributes['boost'] = @boost.to_s if @boost + @fields.each {|f| e.add_element(f.to_xml)} + return e + end +end diff --git a/solr/client/ruby/solr-ruby/lib/solr/exception.rb b/solr/client/ruby/solr-ruby/lib/solr/exception.rb new file mode 100644 index 00000000000..a439e6728a5 --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/exception.rb @@ -0,0 +1,13 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +class Solr::Exception < Exception; end diff --git a/solr/client/ruby/solr-ruby/lib/solr/field.rb b/solr/client/ruby/solr-ruby/lib/solr/field.rb new file mode 100644 index 00000000000..0731d0e0afd --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/field.rb @@ -0,0 +1,39 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'solr/xml' +require 'time' + +class Solr::Field + VALID_PARAMS = [:boost] + attr_accessor :name + attr_accessor :value + attr_accessor :boost + + # Accepts an optional :boost parameter, used to boost the relevance of a particular field. + def initialize(params) + @boost = params[:boost] + name_key = (params.keys - VALID_PARAMS).first + @name, @value = name_key.to_s, params[name_key] + # Convert any Time values into UTC/XML schema format (which Solr requires). + @value = @value.respond_to?(:utc) ? @value.utc.xmlschema : @value.to_s + end + + def to_xml + e = Solr::XML::Element.new 'field' + e.attributes['name'] = @name + e.attributes['boost'] = @boost.to_s if @boost + e.text = @value + return e + end + +end diff --git a/solr/client/ruby/solr-ruby/lib/solr/importer.rb b/solr/client/ruby/solr-ruby/lib/solr/importer.rb new file mode 100755 index 00000000000..d607b2c39c3 --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/importer.rb @@ -0,0 +1,19 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +module Solr; module Importer; end; end +require 'solr/importer/mapper' +require 'solr/importer/array_mapper' +require 'solr/importer/delimited_file_source' +require 'solr/importer/hpricot_mapper' +require 'solr/importer/xpath_mapper' +require 'solr/importer/solr_source' \ No newline at end of file diff --git a/solr/client/ruby/solr-ruby/lib/solr/importer/array_mapper.rb b/solr/client/ruby/solr-ruby/lib/solr/importer/array_mapper.rb new file mode 100755 index 00000000000..abef9075545 --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/importer/array_mapper.rb @@ -0,0 +1,26 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + + +class Solr::Importer::ArrayMapper < Solr::Importer::Mapper + # TODO document that initializer takes an array of Mappers [mapper1, mapper2, ... mapperN] + + # TODO: make merge conflict handling configurable. as is, the last map fields win. + def map(orig_data_array) + mapped_data = {} + orig_data_array.each_with_index do |data,i| + mapped_data.merge!(@mapping[i].map(data)) + end + mapped_data + end +end \ No newline at end of file diff --git a/solr/client/ruby/solr-ruby/lib/solr/importer/delimited_file_source.rb b/solr/client/ruby/solr-ruby/lib/solr/importer/delimited_file_source.rb new file mode 100755 index 00000000000..70f226a2eb0 --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/importer/delimited_file_source.rb @@ -0,0 +1,38 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# For files with the first line containing field names +# Currently not designed for enormous files, as all lines are +# read into an array +class Solr::Importer::DelimitedFileSource + include Enumerable + + def initialize(filename, splitter=/\t/) + @filename = filename + @splitter = splitter + end + + def each + lines = IO.readlines(@filename) + headers = lines[0].split(@splitter).collect{|h| h.chomp} + + lines[1..-1].each do |line| + data = headers.zip(line.split(@splitter).collect{|s| s.chomp}) + def data.[](key) + self.assoc(key.to_s)[1] + end + + yield(data) + end + end + +end diff --git a/solr/client/ruby/solr-ruby/lib/solr/importer/hpricot_mapper.rb b/solr/client/ruby/solr-ruby/lib/solr/importer/hpricot_mapper.rb new file mode 100644 index 00000000000..53a48e4314d --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/importer/hpricot_mapper.rb @@ -0,0 +1,27 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +begin + require 'hpricot' + + class Solr::Importer::HpricotMapper < Solr::Importer::Mapper + def field_data(doc, path) + doc.search(path.to_s).collect { |e| e.inner_html } + end + end +rescue LoadError => e # If we can't load hpricot + class Solr::Importer::HpricotMapper + def initialize(mapping, options={}) + raise "Hpricot not installed." + end + end +end \ No newline at end of file diff --git a/solr/client/ruby/solr-ruby/lib/solr/importer/mapper.rb b/solr/client/ruby/solr-ruby/lib/solr/importer/mapper.rb new file mode 100755 index 00000000000..55b199f8e11 --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/importer/mapper.rb @@ -0,0 +1,51 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +class Solr::Importer::Mapper + def initialize(mapping, options={}) + @mapping = mapping + @options = options + end + + def field_data(orig_data, field_name) + orig_data[field_name] + end + + def mapped_field_value(orig_data, field_mapping) + case field_mapping + when String + field_mapping + when Proc + field_mapping.call(orig_data) # TODO pass in more context, like self or a function for field_data, etc + when Symbol + field_data(orig_data, @options[:stringify_symbols] ? field_mapping.to_s : field_mapping) + when Enumerable + field_mapping.collect {|orig_field_name| mapped_field_value(orig_data, orig_field_name)}.flatten + else + raise "Unknown mapping for #{field_mapping}" + end + end + + def map(orig_data) + mapped_data = {} + @mapping.each do |solr_name, field_mapping| + value = mapped_field_value(orig_data, field_mapping) + mapped_data[solr_name] = value if value + end + + mapped_data + end + + + + +end diff --git a/solr/client/ruby/solr-ruby/lib/solr/importer/solr_source.rb b/solr/client/ruby/solr-ruby/lib/solr/importer/solr_source.rb new file mode 100755 index 00000000000..fe2f4f66e0a --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/importer/solr_source.rb @@ -0,0 +1,43 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'solr' + +class Solr::Importer::SolrSource + def initialize(solr_url, query, filter_queries=nil, options={}) + @connection = Solr::Connection.new(solr_url) + @query = query + @filter_queries = filter_queries + + @page_size = options[:page_size] || 1000 + @field_list = options[:field_list] || ["*"] + end + + def each + done = false + start = 0 + until done do + # request N documents from a starting point + request = Solr::Request::Standard.new(:query => @query, + :rows => @page_size, + :start => start, + :field_list => @field_list, + :filter_queries => @filter_queries) + response = @connection.send(request) + response.each do |doc| + yield doc # TODO: perhaps convert to HashWithIndifferentAccess.new(doc), so stringify_keys isn't necessary + end + done = start + @page_size >= response.total_hits + start = start + @page_size + end + end +end diff --git a/solr/client/ruby/solr-ruby/lib/solr/importer/xpath_mapper.rb b/solr/client/ruby/solr-ruby/lib/solr/importer/xpath_mapper.rb new file mode 100755 index 00000000000..772e1c3c1f0 --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/importer/xpath_mapper.rb @@ -0,0 +1,35 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +begin + require 'xml/libxml' + + # For files with the first line containing field names + class Solr::Importer::XPathMapper < Solr::Importer::Mapper + def field_data(doc, xpath) + doc.find(xpath.to_s).collect do |node| + case node + when XML::Attr + node.value + when XML::Node + node.content + end + end + end + end +rescue LoadError => e # If we can't load libxml + class Solr::Importer::XPathMapper + def initialize(mapping, options={}) + raise "libxml not installed" + end + end +end diff --git a/solr/client/ruby/solr-ruby/lib/solr/indexer.rb b/solr/client/ruby/solr-ruby/lib/solr/indexer.rb new file mode 100755 index 00000000000..5210f05a7c4 --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/indexer.rb @@ -0,0 +1,52 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +class Solr::Indexer + attr_reader :solr + + # TODO: document options! + def initialize(data_source, mapper_or_mapping, options={}) + solr_url = options[:solr_url] || ENV["SOLR_URL"] || "http://localhost:8983/solr" + @solr = Solr::Connection.new(solr_url, options) #TODO - these options contain the solr_url and debug keys also, so tidy up what gets passed + + @data_source = data_source + @mapper = mapper_or_mapping.is_a?(Hash) ? Solr::Importer::Mapper.new(mapper_or_mapping) : mapper_or_mapping + + @buffer_docs = options[:buffer_docs] + @debug = options[:debug] + end + + def index + buffer = [] + @data_source.each do |record| + document = @mapper.map(record) + + # TODO: check arrity of block, if 3, pass counter as 3rd argument + yield(record, document) if block_given? # TODO check return of block, if not true then don't index, or perhaps if document.empty? + + buffer << document + + if !@buffer_docs || buffer.size == @buffer_docs + add_docs(buffer) + buffer.clear + end + end + add_docs(buffer) if !buffer.empty? + + @solr.commit unless @debug + end + + def add_docs(documents) + @solr.add(documents) unless @debug + puts documents.inspect if @debug + end +end diff --git a/solr/client/ruby/solr-ruby/lib/solr/request.rb b/solr/client/ruby/solr-ruby/lib/solr/request.rb new file mode 100755 index 00000000000..a3ce7da3396 --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/request.rb @@ -0,0 +1,26 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +module Solr; module Request; end; end +require 'solr/request/add_document' +require 'solr/request/modify_document' +require 'solr/request/base' +require 'solr/request/commit' +require 'solr/request/delete' +require 'solr/request/ping' +require 'solr/request/select' +require 'solr/request/standard' +require 'solr/request/spellcheck' +require 'solr/request/dismax' +require 'solr/request/update' +require 'solr/request/index_info' +require 'solr/request/optimize' diff --git a/solr/client/ruby/solr-ruby/lib/solr/request/add_document.rb b/solr/client/ruby/solr-ruby/lib/solr/request/add_document.rb new file mode 100644 index 00000000000..bb3d018f526 --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/request/add_document.rb @@ -0,0 +1,63 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'solr/xml' +require 'solr/request/base' +require 'solr/document' +require 'solr/request/update' + +class Solr::Request::AddDocument < Solr::Request::Update + + # create the request, optionally passing in a Solr::Document + # + # request = Solr::Request::AddDocument.new doc + # + # as a short cut you can pass in a Hash instead: + # + # request = Solr::Request::AddDocument.new :creator => 'Jorge Luis Borges' + # + # or an array, to add multiple documents at the same time: + # + # request = Solr::Request::AddDocument.new([doc1, doc2, doc3]) + + def initialize(doc={}) + @docs = [] + if doc.is_a?(Array) + doc.each { |d| add_doc(d) } + else + add_doc(doc) + end + end + + # returns the request as a string suitable for posting + + def to_s + e = Solr::XML::Element.new 'add' + for doc in @docs + e.add_element doc.to_xml + end + return e.to_s + end + + private + def add_doc(doc) + case doc + when Hash + @docs << Solr::Document.new(doc) + when Solr::Document + @docs << doc + else + raise "must pass in Solr::Document or Hash" + end + end + +end diff --git a/solr/client/ruby/solr-ruby/lib/solr/request/base.rb b/solr/client/ruby/solr-ruby/lib/solr/request/base.rb new file mode 100644 index 00000000000..4b65b1f9484 --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/request/base.rb @@ -0,0 +1,36 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +class Solr::Request::Base + + + #TODO : Add base support for the debugQuery flag, and such that the response provides debug output easily + + # returns either :xml or :ruby depending on what the + # response type is for a given request + + def response_format + raise "unknown request type: #{self.class}" + end + + def content_type + 'text/xml; charset=utf-8' + end + + # returns the solr handler or url fragment that can + # respond to this type of request + + def handler + raise "unknown request type: #{self.class}" + end + +end diff --git a/solr/client/ruby/solr-ruby/lib/solr/request/commit.rb b/solr/client/ruby/solr-ruby/lib/solr/request/commit.rb new file mode 100644 index 00000000000..bcf1308d8b1 --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/request/commit.rb @@ -0,0 +1,31 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'solr/xml' + +class Solr::Request::Commit < Solr::Request::Update + + def initialize(options={}) + @wait_searcher = options[:wait_searcher] || true + @wait_flush = options[:wait_flush] || true + end + + + def to_s + e = Solr::XML::Element.new('commit') + e.attributes['waitSearcher'] = @wait_searcher ? 'true' : 'false' + e.attributes['waitFlush'] = @wait_flush ? 'true' : 'false' + + e.to_s + end + +end diff --git a/solr/client/ruby/solr-ruby/lib/solr/request/delete.rb b/solr/client/ruby/solr-ruby/lib/solr/request/delete.rb new file mode 100644 index 00000000000..916b44ad70a --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/request/delete.rb @@ -0,0 +1,50 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'solr/xml' + +class Solr::Request::Delete < Solr::Request::Update + + # A delete request can be for a specific document id + # + # request = Solr::Request::Delete.new(:id => 1234) + # + # or by query: + # + # request = Solr::Request::Delete.new(:query => + # + def initialize(options) + unless options.kind_of?(Hash) and (options[:id] or options[:query]) + raise Solr::Exception.new("must pass in :id or :query") + end + if options[:id] and options[:query] + raise Solr::Exception.new("can't pass in both :id and :query") + end + @document_id = options[:id] + @query = options[:query] + end + + def to_s + delete_element = Solr::XML::Element.new('delete') + if @document_id + id_element = Solr::XML::Element.new('id') + id_element.text = @document_id + delete_element.add_element(id_element) + elsif @query + query = Solr::XML::Element.new('query') + query.text = @query + delete_element.add_element(query) + end + delete_element.to_s + end +end + diff --git a/solr/client/ruby/solr-ruby/lib/solr/request/dismax.rb b/solr/client/ruby/solr-ruby/lib/solr/request/dismax.rb new file mode 100644 index 00000000000..13d19771ec9 --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/request/dismax.rb @@ -0,0 +1,46 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +class Solr::Request::Dismax < Solr::Request::Standard + + VALID_PARAMS.replace(VALID_PARAMS + [:tie_breaker, :query_fields, :minimum_match, :phrase_fields, :phrase_slop, + :boost_query, :boost_functions]) + + def initialize(params) + @alternate_query = params.delete(:alternate_query) + @sort_values = params.delete(:sort) + + super + + @query_type = "dismax" + end + + def to_hash + hash = super + hash[:tie] = @params[:tie_breaker] + hash[:mm] = @params[:minimum_match] + hash[:qf] = @params[:query_fields] + hash[:pf] = @params[:phrase_fields] + hash[:ps] = @params[:phrase_slop] + hash[:bq] = @params[:boost_query] + hash[:bf] = @params[:boost_functions] + hash["q.alt"] = @alternate_query + # FIXME: 2007-02-13 -- This code is duplicated in + # Solr::Request::Standard. It should be refactored into a single location. + hash[:sort] = @sort_values.collect do |sort| + key = sort.keys[0] + "#{key.to_s} #{sort[key] == :descending ? 'desc' : 'asc'}" + end.join(',') if @sort_values + return hash + end + +end \ No newline at end of file diff --git a/solr/client/ruby/solr-ruby/lib/solr/request/index_info.rb b/solr/client/ruby/solr-ruby/lib/solr/request/index_info.rb new file mode 100755 index 00000000000..d4eeea54d67 --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/request/index_info.rb @@ -0,0 +1,22 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +class Solr::Request::IndexInfo < Solr::Request::Select + + def handler + 'admin/luke' + end + + def to_hash + {:numTerms => 0}.merge(super.to_hash) + end +end diff --git a/solr/client/ruby/solr-ruby/lib/solr/request/modify_document.rb b/solr/client/ruby/solr-ruby/lib/solr/request/modify_document.rb new file mode 100755 index 00000000000..6276d979c1e --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/request/modify_document.rb @@ -0,0 +1,51 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'solr/xml' +require 'solr/request/base' +require 'solr/document' +require 'solr/request/update' + +class Solr::Request::ModifyDocument < Solr::Request::Update + + # Example: ModifyDocument.new(:id => 10, :overwrite => {:field_name => "new value"}) + def initialize(update_data) + modes = [] + @doc = {} + [:overwrite, :append, :distinct, :increment, :delete].each do |mode| + field_data = update_data[mode] + if field_data + field_data.each do |field_name, field_value| + modes << "#{field_name}:#{mode.to_s.upcase}" + @doc[field_name] = field_value if field_value # if value is nil, omit so it can be removed + end + update_data.delete mode + end + end + @mode = modes.join(",") + + # only one key should be left over, the id + @doc[update_data.keys[0].to_s] = update_data.values[0] + end + + # returns the request as a string suitable for posting + def to_s + e = Solr::XML::Element.new 'add' + e.add_element(Solr::Document.new(@doc).to_xml) + return e.to_s + end + + def handler + "update?mode=#{@mode}" + end + +end diff --git a/solr/client/ruby/solr-ruby/lib/solr/request/optimize.rb b/solr/client/ruby/solr-ruby/lib/solr/request/optimize.rb new file mode 100755 index 00000000000..3bd1fc4e494 --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/request/optimize.rb @@ -0,0 +1,21 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'solr/xml' + +class Solr::Request::Optimize < Solr::Request::Update + + def to_s + Solr::XML::Element.new('optimize').to_s + end + +end \ No newline at end of file diff --git a/solr/client/ruby/solr-ruby/lib/solr/request/ping.rb b/solr/client/ruby/solr-ruby/lib/solr/request/ping.rb new file mode 100644 index 00000000000..6b420bee062 --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/request/ping.rb @@ -0,0 +1,36 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# TODO: Consider something lazy like this? +# Solr::Request::Ping = Solr::Request.simple_request :format=>:xml, :handler=>'admin/ping' +# class Solr::Request +# def self.simple_request(options) +# Class.new do +# def response_format +# options[:format] +# end +# def handler +# options[:handler] +# end +# end +# end +# end + +class Solr::Request::Ping < Solr::Request::Base + def response_format + :xml + end + + def handler + 'admin/ping' + end +end diff --git a/solr/client/ruby/solr-ruby/lib/solr/request/select.rb b/solr/client/ruby/solr-ruby/lib/solr/request/select.rb new file mode 100644 index 00000000000..95267436935 --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/request/select.rb @@ -0,0 +1,56 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'erb' + +# "Abstract" base class, only useful with subclasses that add parameters +class Solr::Request::Select < Solr::Request::Base + + attr_reader :query_type + + def initialize(qt=nil, params={}) + @query_type = qt + @select_params = params + end + + def response_format + :ruby + end + + def handler + 'select' + end + + def content_type + 'application/x-www-form-urlencoded; charset=utf-8' + end + + def to_hash + return {:qt => query_type, :wt => 'ruby'}.merge(@select_params) + end + + def to_s + raw_params = self.to_hash + + http_params = [] + raw_params.each do |key,value| + if value.respond_to?(:each) && !value.is_a?(String) + value.each { |v| http_params << "#{key}=#{ERB::Util::url_encode(v)}" unless v.nil?} + else + http_params << "#{key}=#{ERB::Util::url_encode(value)}" unless value.nil? + end + end + + http_params.join("&") + end + +end diff --git a/solr/client/ruby/solr-ruby/lib/solr/request/spellcheck.rb b/solr/client/ruby/solr-ruby/lib/solr/request/spellcheck.rb new file mode 100644 index 00000000000..eab24eb9a9a --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/request/spellcheck.rb @@ -0,0 +1,30 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +class Solr::Request::Spellcheck < Solr::Request::Select + + def initialize(params) + super('spellchecker') + @params = params + end + + def to_hash + hash = super + hash[:q] = @params[:query] + hash[:suggestionCount] = @params[:suggestion_count] + hash[:accuracy] = @params[:accuracy] + hash[:onlyMorePopular] = @params[:only_more_popular] + hash[:cmd] = @params[:command] + return hash + end + +end \ No newline at end of file diff --git a/solr/client/ruby/solr-ruby/lib/solr/request/standard.rb b/solr/client/ruby/solr-ruby/lib/solr/request/standard.rb new file mode 100755 index 00000000000..e65110bda87 --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/request/standard.rb @@ -0,0 +1,374 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +class Solr::Request::Standard < Solr::Request::Select + + VALID_PARAMS = [:query, :sort, :default_field, :operator, :start, :rows, :shards, + :filter_queries, :field_list, :debug_query, :explain_other, :facets, :highlighting, :mlt] + + def initialize(params) + super('standard') + + raise "Invalid parameters: #{(params.keys - VALID_PARAMS).join(',')}" unless + (params.keys - VALID_PARAMS).empty? + + raise ":query parameter required" unless params[:query] + + @params = params.dup + + # Validate operator + if params[:operator] + raise "Only :and/:or operators allowed" unless + [:and, :or].include?(params[:operator]) + + @params[:operator] = params[:operator].to_s.upcase + end + + # Validate start, rows can be transformed to ints + @params[:start] = params[:start].to_i if params[:start] + @params[:rows] = params[:rows].to_i if params[:rows] + + @params[:field_list] ||= ["*","score"] + + @params[:shards] ||= [] + end + + def to_hash + hash = {} + + # standard request param processing + hash[:sort] = @params[:sort].collect do |sort| + key = sort.keys[0] + "#{key.to_s} #{sort[key] == :descending ? 'desc' : 'asc'}" + end.join(',') if @params[:sort] + hash[:q] = @params[:query] + hash["q.op"] = @params[:operator] + hash[:df] = @params[:default_field] + + # common parameter processing + hash[:start] = @params[:start] + hash[:rows] = @params[:rows] + hash[:fq] = @params[:filter_queries] + hash[:fl] = @params[:field_list].join(',') + hash[:debugQuery] = @params[:debug_query] + hash[:explainOther] = @params[:explain_other] + hash[:shards] = @params[:shards].join(',') unless @params[:shards].empty? + + # facet parameter processing + if @params[:facets] + # TODO need validation of all that is under the :facets Hash too + hash[:facet] = true + hash["facet.field"] = [] + hash["facet.query"] = @params[:facets][:queries] + hash["facet.sort"] = (@params[:facets][:sort] == :count) if @params[:facets][:sort] + hash["facet.limit"] = @params[:facets][:limit] + hash["facet.missing"] = @params[:facets][:missing] + hash["facet.mincount"] = @params[:facets][:mincount] + hash["facet.prefix"] = @params[:facets][:prefix] + hash["facet.offset"] = @params[:facets][:offset] + hash["facet.method"] = @params[:facets][:method] if @params[:facets][:method] + if @params[:facets][:fields] # facet fields are optional (could be facet.query only) + @params[:facets][:fields].each do |f| + if f.kind_of? Hash + key = f.keys[0] + value = f[key] + hash["facet.field"] << key + hash["f.#{key}.facet.sort"] = (value[:sort] == :count) if value[:sort] + hash["f.#{key}.facet.limit"] = value[:limit] + hash["f.#{key}.facet.missing"] = value[:missing] + hash["f.#{key}.facet.mincount"] = value[:mincount] + hash["f.#{key}.facet.prefix"] = value[:prefix] + hash["f.#{key}.facet.offset"] = value[:offset] + else + hash["facet.field"] << f + end + end + end + end + + # highlighting parameter processing - http://wiki.apache.org/solr/HighlightingParameters + if @params[:highlighting] + hash[:hl] = true + hash["hl.fl"] = @params[:highlighting][:field_list].join(',') if @params[:highlighting][:field_list] + + snippets = @params[:highlighting][:max_snippets] + if snippets + if snippets.kind_of? Hash + if snippets[:default] + hash["hl.snippets"] = snippets[:default] + end + if snippets[:fields] + snippets[:fields].each do |k,v| + hash["f.#{k}.hl.snippets"] = v + end + end + else + hash["hl.snippets"] = snippets + end + end + + fragsize = @params[:highlighting][:fragment_size] + if fragsize + if fragsize.kind_of? Hash + if fragsize[:default] + hash["hl.fragsize"] = fragsize[:default] + end + if fragsize[:fields] + fragsize[:fields].each do |k,v| + hash["f.#{k}.hl.fragsize"] = v + end + end + else + hash["hl.fragsize"] = fragsize + end + end + + rfm = @params[:highlighting][:require_field_match] + if nil != rfm + if rfm.kind_of? Hash + if nil != rfm[:default] + hash["hl.requireFieldMatch"] = rfm[:default] + end + if rfm[:fields] + rfm[:fields].each do |k,v| + hash["f.#{k}.hl.requireFieldMatch"] = v + end + end + else + hash["hl.requireFieldMatch"] = rfm + end + end + + mac = @params[:highlighting][:max_analyzed_chars] + if mac + if mac.kind_of? Hash + if mac[:default] + hash["hl.maxAnalyzedChars"] = mac[:default] + end + if mac[:fields] + mac[:fields].each do |k,v| + hash["f.#{k}.hl.maxAnalyzedChars"] = v + end + end + else + hash["hl.maxAnalyzedChars"] = mac + end + end + + prefix = @params[:highlighting][:prefix] + if prefix + if prefix.kind_of? Hash + if prefix[:default] + hash["hl.simple.pre"] = prefix[:default] + end + if prefix[:fields] + prefix[:fields].each do |k,v| + hash["f.#{k}.hl.simple.pre"] = v + end + end + else + hash["hl.simple.pre"] = prefix + end + end + + suffix = @params[:highlighting][:suffix] + if suffix + if suffix.kind_of? Hash + if suffix[:default] + hash["hl.simple.post"] = suffix[:default] + end + if suffix[:fields] + suffix[:fields].each do |k,v| + hash["f.#{k}.hl.simple.post"] = v + end + end + else + hash["hl.simple.post"] = suffix + end + end + + formatter = @params[:highlighting][:formatter] + if formatter + if formatter.kind_of? Hash + if formatter[:default] + hash["hl.formatter"] = formatter[:default] + end + if formatter[:fields] + formatter[:fields].each do |k,v| + hash["f.#{k}.hl.formatter"] = v + end + end + else + hash["hl.formatter"] = formatter + end + end + + fragmenter = @params[:highlighting][:fragmenter] + if fragmenter + if fragmenter.kind_of? Hash + if fragmenter[:default] + hash["hl.fragmenter"] = fragmenter[:default] + end + if fragmenter[:fields] + fragmenter[:fields].each do |k,v| + hash["f.#{k}.hl.fragmenter"] = v + end + end + else + hash["hl.fragmenter"] = fragmenter + end + end + + merge_contiguous = @params[:highlighting][:merge_contiguous] + if nil != merge_contiguous + if merge_contiguous.kind_of? Hash + if nil != merge_contiguous[:default] + hash["hl.mergeContiguous"] = merge_contiguous[:default] + end + if merge_contiguous[:fields] + merge_contiguous[:fields].each do |k,v| + hash["f.#{k}.hl.mergeContiguous"] = v + end + end + else + hash["hl.mergeContiguous"] = merge_contiguous + end + end + + increment = @params[:highlighting][:increment] + if increment + if increment.kind_of? Hash + if increment[:default] + hash["hl.increment"] = increment[:default] + end + if increment[:fields] + increment[:fields].each do |k,v| + hash["f.#{k}.hl.increment"] = v + end + end + else + hash["hl.increment"] = increment + end + end + + # support "old style" + alternate_fields = @params[:highlighting][:alternate_fields] + if alternate_fields + alternate_fields.each do |f,v| + hash["f.#{f}.hl.alternateField"] = v + end + end + + alternate_field = @params[:highlighting][:alternate_field] + if alternate_field + if alternate_field.kind_of? Hash + if alternate_field[:default] + hash["hl.alternateField"] = alternate_field[:default] + end + if alternate_field[:fields] + alternate_field[:fields].each do |k,v| + hash["f.#{k}.hl.alternateField"] = v + end + end + else + hash["hl.alternateField"] = alternate_field + end + end + + mafl = @params[:highlighting][:max_alternate_field_length] + if mafl + if mafl.kind_of? Hash + if mafl[:default] + hash["hl.maxAlternateFieldLength"] = mafl[:default] + end + if mafl[:fields] + mafl[:fields].each do |k,v| + hash["f.#{k}.hl.maxAlternateFieldLength"] = v + end + else + # support "old style" + mafl.each do |k,v| + hash["f.#{k}.hl.maxAlternateFieldLength"] = v + end + end + else + hash["hl.maxAlternateFieldLength"] = mafl + end + end + + hash["hl.usePhraseHighlighter"] = @params[:highlighting][:use_phrase_highlighter] + + regex = @params[:highlighting][:regex] + if regex + if regex[:slop] + if regex[:slop].kind_of? Hash + if regex[:slop][:default] + hash["hl.regex.slop"] = regex[:slop][:default] + end + if regex[:slop][:fields] + regex[:slop][:fields].each do |k,v| + hash["f.#{k}.hl.regex.slop"] = v + end + end + else + hash["hl.regex.slop"] = regex[:slop] + end + end + if regex[:pattern] + if regex[:pattern].kind_of? Hash + if regex[:pattern][:default] + hash["hl.regex.pattern"] = regex[:pattern][:default] + end + if regex[:pattern][:fields] + regex[:pattern][:fields].each do |k,v| + hash["f.#{k}.hl.regex.pattern"] = v + end + end + else + hash["hl.regex.pattern"] = regex[:pattern] + end + end + if regex[:max_analyzed_chars] + if regex[:max_analyzed_chars].kind_of? Hash + if regex[:max_analyzed_chars][:default] + hash["hl.regex.maxAnalyzedChars"] = regex[:max_analyzed_chars][:default] + end + if regex[:max_analyzed_chars][:fields] + regex[:max_analyzed_chars][:fields].each do |k,v| + hash["f.#{k}.hl.regex.maxAnalyzedChars"] = v + end + end + else + hash["hl.regex.maxAnalyzedChars"] = regex[:max_analyzed_chars] + end + end + end + + end + + if @params[:mlt] + hash[:mlt] = true + hash["mlt.count"] = @params[:mlt][:count] + hash["mlt.fl"] = @params[:mlt][:field_list].join(',') + hash["mlt.mintf"] = @params[:mlt][:min_term_freq] + hash["mlt.mindf"] = @params[:mlt][:min_doc_freq] + hash["mlt.minwl"] = @params[:mlt][:min_word_length] + hash["mlt.maxwl"] = @params[:mlt][:max_word_length] + hash["mlt.maxqt"] = @params[:mlt][:max_query_terms] + hash["mlt.maxntp"] = @params[:mlt][:max_tokens_parsed] + hash["mlt.boost"] = @params[:mlt][:boost] + end + + hash.merge(super.to_hash) + end + +end diff --git a/solr/client/ruby/solr-ruby/lib/solr/request/update.rb b/solr/client/ruby/solr-ruby/lib/solr/request/update.rb new file mode 100644 index 00000000000..8bd84488645 --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/request/update.rb @@ -0,0 +1,23 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# a parent class for all requests that go through the solr update handler +# TODO: Use new xml update handler for better error responses +class Solr::Request::Update < Solr::Request::Base + def response_format + :xml + end + + def handler + 'update' + end +end diff --git a/solr/client/ruby/solr-ruby/lib/solr/response.rb b/solr/client/ruby/solr-ruby/lib/solr/response.rb new file mode 100755 index 00000000000..72c55fe664c --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/response.rb @@ -0,0 +1,27 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +module Solr; module Response; end; end +require 'solr/response/base' +require 'solr/response/xml' +require 'solr/response/ruby' +require 'solr/response/ping' +require 'solr/response/add_document' +require 'solr/response/modify_document' +require 'solr/response/standard' +require 'solr/response/spellcheck' +require 'solr/response/dismax' +require 'solr/response/commit' +require 'solr/response/delete' +require 'solr/response/index_info' +require 'solr/response/optimize' +require 'solr/response/select' \ No newline at end of file diff --git a/solr/client/ruby/solr-ruby/lib/solr/response/add_document.rb b/solr/client/ruby/solr-ruby/lib/solr/response/add_document.rb new file mode 100644 index 00000000000..d1e192301db --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/response/add_document.rb @@ -0,0 +1,17 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +class Solr::Response::AddDocument < Solr::Response::Xml + def initialize(xml) + super + end +end diff --git a/solr/client/ruby/solr-ruby/lib/solr/response/base.rb b/solr/client/ruby/solr-ruby/lib/solr/response/base.rb new file mode 100644 index 00000000000..a66d2a4d4f9 --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/response/base.rb @@ -0,0 +1,42 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +class Solr::Response::Base + attr_reader :raw_response + + def initialize(raw_response) + @raw_response = raw_response + end + + # factory method for creating a Solr::Response::* from + # a request and the raw response content + def self.make_response(request, raw) + + # make sure response format seems sane + unless [:xml, :ruby].include?(request.response_format) + raise Solr::Exception.new("unknown response format: #{request.response_format}" ) + end + + # TODO: Factor out this case... perhaps the request object should provide the response class instead? Or dynamically align by class name? + # Maybe the request itself could have the response handling features that get mixed in with a single general purpose response object? + + begin + klass = eval(request.class.name.sub(/Request/,'Response')) + rescue NameError + raise Solr::Exception.new("unknown request type: #{request.class}") + else + klass.new(raw) + end + + end + +end diff --git a/solr/client/ruby/solr-ruby/lib/solr/response/commit.rb b/solr/client/ruby/solr-ruby/lib/solr/response/commit.rb new file mode 100644 index 00000000000..ff937a36ac3 --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/response/commit.rb @@ -0,0 +1,17 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'rexml/xpath' + +class Solr::Response::Commit < Solr::Response::Xml +end + diff --git a/solr/client/ruby/solr-ruby/lib/solr/response/delete.rb b/solr/client/ruby/solr-ruby/lib/solr/response/delete.rb new file mode 100644 index 00000000000..0836128094c --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/response/delete.rb @@ -0,0 +1,13 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +class Solr::Response::Delete < Solr::Response::Xml; end diff --git a/solr/client/ruby/solr-ruby/lib/solr/response/dismax.rb b/solr/client/ruby/solr-ruby/lib/solr/response/dismax.rb new file mode 100644 index 00000000000..e3ff8c08c64 --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/response/dismax.rb @@ -0,0 +1,20 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +class Solr::Response::Dismax < Solr::Response::Standard + # no need for special processing + + # FIXME: 2007-02-07 -- The existence of this class indicates that + # the Request/Response pair architecture is a little hinky. Perhaps we could refactor + # out some of the most common functionality -- Common Query Parameters, Highlighting Parameters, + # Simple Facet Parameters, etc. -- into modules? +end \ No newline at end of file diff --git a/solr/client/ruby/solr-ruby/lib/solr/response/index_info.rb b/solr/client/ruby/solr-ruby/lib/solr/response/index_info.rb new file mode 100755 index 00000000000..b8e215ff268 --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/response/index_info.rb @@ -0,0 +1,26 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +class Solr::Response::IndexInfo < Solr::Response::Ruby + def initialize(ruby_code) + super + end + + def num_docs + return @data['index']['numDocs'] + end + + def field_names + return @data['fields'].keys + end + +end diff --git a/solr/client/ruby/solr-ruby/lib/solr/response/modify_document.rb b/solr/client/ruby/solr-ruby/lib/solr/response/modify_document.rb new file mode 100755 index 00000000000..44c4f5b0cb6 --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/response/modify_document.rb @@ -0,0 +1,17 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +class Solr::Response::ModifyDocument < Solr::Response::Xml + def initialize(xml) + super + end +end diff --git a/solr/client/ruby/solr-ruby/lib/solr/response/optimize.rb b/solr/client/ruby/solr-ruby/lib/solr/response/optimize.rb new file mode 100755 index 00000000000..4594d90d065 --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/response/optimize.rb @@ -0,0 +1,14 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +class Solr::Response::Optimize < Solr::Response::Commit +end \ No newline at end of file diff --git a/solr/client/ruby/solr-ruby/lib/solr/response/ping.rb b/solr/client/ruby/solr-ruby/lib/solr/response/ping.rb new file mode 100644 index 00000000000..1c405885e26 --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/response/ping.rb @@ -0,0 +1,28 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'rexml/xpath' + +class Solr::Response::Ping < Solr::Response::Xml + + def initialize(xml) + super + @ok = REXML::XPath.first(@doc, './solr/ping') ? true : false + end + + # returns true or false depending on whether the ping + # was successful or not + def ok? + @ok + end + +end diff --git a/solr/client/ruby/solr-ruby/lib/solr/response/ruby.rb b/solr/client/ruby/solr-ruby/lib/solr/response/ruby.rb new file mode 100644 index 00000000000..05424c13d83 --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/response/ruby.rb @@ -0,0 +1,42 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +class Solr::Response::Ruby < Solr::Response::Base + attr_reader :data, :header + + def initialize(ruby_code) + super + begin + #TODO: what about pulling up data/header/response to ResponseBase, + # or maybe a new middle class like SelectResponseBase since + # all Select queries return this same sort of stuff?? + # XML (&wt=xml) and Ruby (&wt=ruby) responses contain exactly the same structure. + # a goal of solrb is to make it irrelevant which gets used under the hood, + # but favor Ruby responses. + @data = eval(ruby_code) + @header = @data['responseHeader'] + raise "response should be a hash" unless @data.kind_of? Hash + raise "response header missing" unless @header.kind_of? Hash + rescue SyntaxError => e + raise Solr::Exception.new("invalid ruby code: #{e}") + end + end + + def ok? + @header['status'] == 0 + end + + def query_time + @header['QTime'] + end + +end diff --git a/solr/client/ruby/solr-ruby/lib/solr/response/select.rb b/solr/client/ruby/solr-ruby/lib/solr/response/select.rb new file mode 100755 index 00000000000..8e2185d67de --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/response/select.rb @@ -0,0 +1,17 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +class Solr::Response::Select < Solr::Response::Ruby + def initialize(ruby_code) + super + end +end diff --git a/solr/client/ruby/solr-ruby/lib/solr/response/spellcheck.rb b/solr/client/ruby/solr-ruby/lib/solr/response/spellcheck.rb new file mode 100644 index 00000000000..a4842c58424 --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/response/spellcheck.rb @@ -0,0 +1,20 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +class Solr::Response::Spellcheck < Solr::Response::Ruby + attr_reader :suggestions + + def initialize(ruby_code) + super + @suggestions = @data['suggestions'] + end +end diff --git a/solr/client/ruby/solr-ruby/lib/solr/response/standard.rb b/solr/client/ruby/solr-ruby/lib/solr/response/standard.rb new file mode 100644 index 00000000000..2e59fe9b068 --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/response/standard.rb @@ -0,0 +1,60 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +class Solr::Response::Standard < Solr::Response::Ruby + FacetValue = Struct.new(:name, :value) + include Enumerable + + def initialize(ruby_code) + super + @response = @data['response'] + raise "response section missing" unless @response.kind_of? Hash + end + + def total_hits + @response['numFound'] + end + + def start + @response['start'] + end + + def hits + @response['docs'] + end + + def max_score + @response['maxScore'] + end + + # TODO: consider the use of json.nl parameter + def field_facets(field) + facets = [] + values = @data['facet_counts']['facet_fields'][field] + Solr::Util.paired_array_each(values) do |key, value| + facets << FacetValue.new(key, value) + end + + facets + end + + def highlighted(id, field) + @data['highlighting'][id.to_s][field.to_s] rescue nil + end + + # supports enumeration of hits + # TODO revisit - should this iterate through *all* hits by re-requesting more? + def each + @response['docs'].each {|hit| yield hit} + end + +end diff --git a/solr/client/ruby/solr-ruby/lib/solr/response/xml.rb b/solr/client/ruby/solr-ruby/lib/solr/response/xml.rb new file mode 100644 index 00000000000..f48de5d047e --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/response/xml.rb @@ -0,0 +1,42 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'rexml/document' +require 'solr/exception' + +class Solr::Response::Xml < Solr::Response::Base + attr_reader :doc, :status_code, :status_message + + def initialize(xml) + super + # parse the xml + @doc = REXML::Document.new(xml) + + # look for the result code and string + # + # + # 02 + # + result = REXML::XPath.first(@doc, './response/lst[@name="responseHeader"]/int[@name="status"]') + if result + @status_code = result.text + @status_message = result.text # TODO: any need for a message? + end + rescue REXML::ParseException => e + raise Solr::Exception.new("invalid response xml: #{e}") + end + + def ok? + return @status_code == '0' + end + +end diff --git a/solr/client/ruby/solr-ruby/lib/solr/solrtasks.rb b/solr/client/ruby/solr-ruby/lib/solr/solrtasks.rb new file mode 100755 index 00000000000..3a1f76a54d5 --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/solrtasks.rb @@ -0,0 +1,27 @@ +#!/usr/bin/env ruby +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# TODO: fill out Solr tasks: start, stop, ping, optimize, etc. + +require 'rake' +require 'rake/tasklib' + +module Solr + namespace :solr do + desc "Start Solr" + task :start do + # TODO: actually start it up! + puts "Starting..." + end + end +end diff --git a/solr/client/ruby/solr-ruby/lib/solr/util.rb b/solr/client/ruby/solr-ruby/lib/solr/util.rb new file mode 100755 index 00000000000..bb134eeeefa --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/util.rb @@ -0,0 +1,32 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +class Solr::Util + # paired_array_each([key1,value1,key2,value2]) yields twice: + # |key1,value1| and |key2,value2| + def self.paired_array_each(a, &block) + 0.upto(a.size / 2 - 1) do |i| + n = i * 2 + yield(a[n], a[n+1]) + end + end + + # paired_array_to_hash([key1,value1,key2,value2]) => {key1 => value1, key2, value2} + def self.paired_array_to_hash(a) + Hash[*a] + end + + def self.query_parser_escape(string) + # backslash prefix everything that isn't a word character + string.gsub(/(\W)/,'\\\\\1') + end +end diff --git a/solr/client/ruby/solr-ruby/lib/solr/xml.rb b/solr/client/ruby/solr-ruby/lib/solr/xml.rb new file mode 100644 index 00000000000..626ecd295f3 --- /dev/null +++ b/solr/client/ruby/solr-ruby/lib/solr/xml.rb @@ -0,0 +1,47 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +module Solr::XML +end + +begin + + # If we can load rubygems and libxml-ruby... + require 'rubygems' + require 'xml/libxml' + + # then make a few modifications to XML::Node so it can stand in for REXML::Element + class XML::Node + # element.add_element(another_element) should work + alias_method :add_element, :<< + + # element.attributes['blah'] should work + def attributes + self + end + + # element.text = "blah" should work + def text=(x) + self << x.to_s + end + end + + # And use XML::Node for our XML generation + Solr::XML::Element = XML::Node + +rescue LoadError => e # If we can't load either rubygems or libxml-ruby + + # Just use REXML. + require 'rexml/document' + Solr::XML::Element = REXML::Element + +end \ No newline at end of file diff --git a/solr/client/ruby/solr-ruby/script/setup.rb b/solr/client/ruby/solr-ruby/script/setup.rb new file mode 100755 index 00000000000..b7256c92c1b --- /dev/null +++ b/solr/client/ruby/solr-ruby/script/setup.rb @@ -0,0 +1,14 @@ +#!/usr/bin/env ruby +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +include Solr diff --git a/solr/client/ruby/solr-ruby/script/solrshell b/solr/client/ruby/solr-ruby/script/solrshell new file mode 100755 index 00000000000..46938fa0924 --- /dev/null +++ b/solr/client/ruby/solr-ruby/script/solrshell @@ -0,0 +1,18 @@ +#!/usr/bin/env ruby +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +solr_lib = File.dirname(__FILE__) + '/../lib/solr' +setup = File.dirname(__FILE__) + '/setup' +irb_name = RUBY_PLATFORM =~ /mswin32/ ? 'irb.bat' : 'irb' + +exec "#{irb_name} -r #{solr_lib} -r #{setup} --simple-prompt" diff --git a/solr/client/ruby/solr-ruby/solr/conf/admin-extra.html b/solr/client/ruby/solr-ruby/solr/conf/admin-extra.html new file mode 100644 index 00000000000..aa739da862c --- /dev/null +++ b/solr/client/ruby/solr-ruby/solr/conf/admin-extra.html @@ -0,0 +1,31 @@ + + + diff --git a/solr/client/ruby/solr-ruby/solr/conf/protwords.txt b/solr/client/ruby/solr-ruby/solr/conf/protwords.txt new file mode 100644 index 00000000000..1dfc0abecbf --- /dev/null +++ b/solr/client/ruby/solr-ruby/solr/conf/protwords.txt @@ -0,0 +1,21 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +#----------------------------------------------------------------------- +# Use a protected word file to protect against the stemmer reducing two +# unrelated words to the same base word. + +# Some non-words that normally won't be encountered, +# just to test that they won't be stemmed. +dontstems +zwhacky + diff --git a/solr/client/ruby/solr-ruby/solr/conf/schema.xml b/solr/client/ruby/solr-ruby/solr/conf/schema.xml new file mode 100755 index 00000000000..386fd584223 --- /dev/null +++ b/solr/client/ruby/solr-ruby/solr/conf/schema.xml @@ -0,0 +1,221 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + id + + + text + + + + + + + + + + + + diff --git a/solr/client/ruby/solr-ruby/solr/conf/scripts.conf b/solr/client/ruby/solr-ruby/solr/conf/scripts.conf new file mode 100644 index 00000000000..e993bbfbafd --- /dev/null +++ b/solr/client/ruby/solr-ruby/solr/conf/scripts.conf @@ -0,0 +1,24 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +user= +solr_hostname=localhost +solr_port=8983 +rsyncd_port=18983 +data_dir= +webapp_name=solr +master_host= +master_data_dir= +master_status_dir= diff --git a/solr/client/ruby/solr-ruby/solr/conf/solrconfig.xml b/solr/client/ruby/solr-ruby/solr/conf/solrconfig.xml new file mode 100755 index 00000000000..dca160ba7c5 --- /dev/null +++ b/solr/client/ruby/solr-ruby/solr/conf/solrconfig.xml @@ -0,0 +1,394 @@ + + + + + + ${solr.abortOnConfigurationError:true} + + + ${solr.data.dir:./solr/data} + + + + false + 32 + 10 + 2147483647 + 10000 + 1000 + 10000 + + + + + false + 32 + 10 + 2147483647 + 10000 + + + false + + + + + + + + + + + + + + + + + + + 1024 + + + + + + + + + + + + + false + + + + + + + + 10 + + + + + + + + + + + + + + + + + + false + + + 4 + + + + + + + + + + + + + + + explicit + + + + + + + + + + explicit + text^1.0 title_text^2.0 description_text^1.5 id^10.0 + 2<-1 5<-2 6<90% + *:* + + + + + + + on + 1 + genre_facet + medium_facet + rating_facet + publisher_facet + + + + + + + + + 1 + 0.5 + + + + + + + + spell + + + + + word + + + + + + + + + + + + + + + + explicit + true + + + + + + + + 5 + + + + + solr + + + + diff --git a/solr/client/ruby/solr-ruby/solr/conf/stopwords.txt b/solr/client/ruby/solr-ruby/solr/conf/stopwords.txt new file mode 100644 index 00000000000..b5824da3263 --- /dev/null +++ b/solr/client/ruby/solr-ruby/solr/conf/stopwords.txt @@ -0,0 +1,58 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +#----------------------------------------------------------------------- +# a couple of test stopwords to test that the words are really being +# configured from this file: +stopworda +stopwordb + +#Standard english stop words taken from Lucene's StopAnalyzer +a +an +and +are +as +at +be +but +by +for +if +in +into +is +it +no +not +of +on +or +s +such +t +that +the +their +then +there +these +they +this +to +was +will +with + diff --git a/solr/client/ruby/solr-ruby/solr/conf/synonyms.txt b/solr/client/ruby/solr-ruby/solr/conf/synonyms.txt new file mode 100644 index 00000000000..b0e31cb7ec8 --- /dev/null +++ b/solr/client/ruby/solr-ruby/solr/conf/synonyms.txt @@ -0,0 +1,31 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +#----------------------------------------------------------------------- +#some test synonym mappings unlikely to appear in real input text +aaa => aaaa +bbb => bbbb1 bbbb2 +ccc => cccc1,cccc2 +a\=>a => b\=>b +a\,a => b\,b +fooaaa,baraaa,bazaaa + +# Some synonym groups specific to this example +GB,gib,gigabyte,gigabytes +MB,mib,megabyte,megabytes +Television, Televisions, TV, TVs +#notice we use "gib" instead of "GiB" so any WordDelimiterFilter coming +#after us won't split it into two words. + +# Synonym mappings can be used for spelling correction too +pixima => pixma + diff --git a/solr/client/ruby/solr-ruby/solr/conf/xslt/example.xsl b/solr/client/ruby/solr-ruby/solr/conf/xslt/example.xsl new file mode 100644 index 00000000000..75178f29d27 --- /dev/null +++ b/solr/client/ruby/solr-ruby/solr/conf/xslt/example.xsl @@ -0,0 +1,132 @@ + + + + + + + + + + + + + + + <xsl:value-of select="$title"/> + + + +

    +
    + This has been formatted by the sample "example.xsl" transform - + use your own XSLT to get a nicer page +
    + + + +
    + + + +
    + + + + +
    +
    +
    + + + + + + + + + + + + + + javascript:toggle("");? +
    + + exp + + + + + +
    + + +
    + + + + + + + +
      + +
    • +
      +
    + + +
    + + + + + + + + + + + + + + + + + + + + +
    diff --git a/solr/client/ruby/solr-ruby/test/conf/admin-extra.html b/solr/client/ruby/solr-ruby/test/conf/admin-extra.html new file mode 100644 index 00000000000..aa739da862c --- /dev/null +++ b/solr/client/ruby/solr-ruby/test/conf/admin-extra.html @@ -0,0 +1,31 @@ + + + diff --git a/solr/client/ruby/solr-ruby/test/conf/protwords.txt b/solr/client/ruby/solr-ruby/test/conf/protwords.txt new file mode 100644 index 00000000000..1dfc0abecbf --- /dev/null +++ b/solr/client/ruby/solr-ruby/test/conf/protwords.txt @@ -0,0 +1,21 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +#----------------------------------------------------------------------- +# Use a protected word file to protect against the stemmer reducing two +# unrelated words to the same base word. + +# Some non-words that normally won't be encountered, +# just to test that they won't be stemmed. +dontstems +zwhacky + diff --git a/solr/client/ruby/solr-ruby/test/conf/schema.xml b/solr/client/ruby/solr-ruby/test/conf/schema.xml new file mode 100755 index 00000000000..67c921513e5 --- /dev/null +++ b/solr/client/ruby/solr-ruby/test/conf/schema.xml @@ -0,0 +1,237 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + id + + + text + + + + + + + + + + + diff --git a/solr/client/ruby/solr-ruby/test/conf/scripts.conf b/solr/client/ruby/solr-ruby/test/conf/scripts.conf new file mode 100644 index 00000000000..e993bbfbafd --- /dev/null +++ b/solr/client/ruby/solr-ruby/test/conf/scripts.conf @@ -0,0 +1,24 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +user= +solr_hostname=localhost +solr_port=8983 +rsyncd_port=18983 +data_dir= +webapp_name=solr +master_host= +master_data_dir= +master_status_dir= diff --git a/solr/client/ruby/solr-ruby/test/conf/solrconfig.xml b/solr/client/ruby/solr-ruby/test/conf/solrconfig.xml new file mode 100755 index 00000000000..a74f5f15d1a --- /dev/null +++ b/solr/client/ruby/solr-ruby/test/conf/solrconfig.xml @@ -0,0 +1,376 @@ + + + + + + ${solr.abortOnConfigurationError:true} + + + + + + + false + 32 + 10 + 2147483647 + 10000 + 1000 + 10000 + + + + + false + 32 + 10 + 2147483647 + 10000 + + + false + + + + + + + + + + + + + + + + + + + 1024 + + + + + + + + + + + + + false + + + + + + + + 10 + + + + + + + + + + + + + + + + + + false + + + 4 + + + + + + + + + + + + + + + explicit + + + + + + + + explicit + 0.01 + + text^0.5 + + + + + + + + + id,test + + + 2<-1 5<-2 6<90% + + 100 + *:* + + + + + + + + + + 1 + 0.5 + + + + + + + + spell + + + + + word + + + + + + + + + + + + + + + + explicit + true + + + + + + + + 5 + + + + + solr + + + + diff --git a/solr/client/ruby/solr-ruby/test/conf/stopwords.txt b/solr/client/ruby/solr-ruby/test/conf/stopwords.txt new file mode 100644 index 00000000000..b5824da3263 --- /dev/null +++ b/solr/client/ruby/solr-ruby/test/conf/stopwords.txt @@ -0,0 +1,58 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +#----------------------------------------------------------------------- +# a couple of test stopwords to test that the words are really being +# configured from this file: +stopworda +stopwordb + +#Standard english stop words taken from Lucene's StopAnalyzer +a +an +and +are +as +at +be +but +by +for +if +in +into +is +it +no +not +of +on +or +s +such +t +that +the +their +then +there +these +they +this +to +was +will +with + diff --git a/solr/client/ruby/solr-ruby/test/conf/synonyms.txt b/solr/client/ruby/solr-ruby/test/conf/synonyms.txt new file mode 100644 index 00000000000..b0e31cb7ec8 --- /dev/null +++ b/solr/client/ruby/solr-ruby/test/conf/synonyms.txt @@ -0,0 +1,31 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +#----------------------------------------------------------------------- +#some test synonym mappings unlikely to appear in real input text +aaa => aaaa +bbb => bbbb1 bbbb2 +ccc => cccc1,cccc2 +a\=>a => b\=>b +a\,a => b\,b +fooaaa,baraaa,bazaaa + +# Some synonym groups specific to this example +GB,gib,gigabyte,gigabytes +MB,mib,megabyte,megabytes +Television, Televisions, TV, TVs +#notice we use "gib" instead of "GiB" so any WordDelimiterFilter coming +#after us won't split it into two words. + +# Synonym mappings can be used for spelling correction too +pixima => pixma + diff --git a/solr/client/ruby/solr-ruby/test/functional/server_test.rb b/solr/client/ruby/solr-ruby/test/functional/server_test.rb new file mode 100644 index 00000000000..c94a7dd40f7 --- /dev/null +++ b/solr/client/ruby/solr-ruby/test/functional/server_test.rb @@ -0,0 +1,218 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'test/unit' +require 'solr' + +class BadRequest < Solr::Request::Standard + def response_format + :invalid + end +end + +class ServerTest < Test::Unit::TestCase + include Solr + + def setup + @connection = Connection.new("http://localhost:8888/solr", :autocommit => :on) + clean + end + + def test_full_lifecycle + # make sure autocommit is on + assert @connection.autocommit + + # make sure this doc isn't there to begin with + @connection.delete(123456) + + # add it + @connection.add(:id => 123456, :text => 'Borges') # add :some_date => 'NOW/HOUR' to test richer data type handling + # now = DateTime.now + + # look for it + response = @connection.query('Borges') + assert_equal 1, response.total_hits + hit = response.hits[0] + assert_equal '123456', hit['id'] + # assert_equal now.year, hit['whatever_date'].year + + # look for it via dismax + response = @connection.search('Borges') + assert_equal 1, response.total_hits + assert_equal '123456', response.hits[0]['id'] + + # delete it + @connection.delete(123456) + + # make sure it's gone + response = @connection.query('Borges') + assert_equal 0, response.total_hits + end + + def test_i18n_full_lifecycle + # make sure autocommit is on + assert @connection.autocommit + + # make sure this doc isn't there to begin with + @connection.delete(123456) + + # add it + @connection.add(:id => 123456, :text => 'Åäöêâîôû') + + # look for it + response = @connection.query('Åäöêâîôû') + assert_equal 1, response.total_hits + assert_equal '123456', response.hits[0]['id'] + + # delete it + @connection.delete(123456) + + # make sure it's gone + response = @connection.query('Åäöêâîôû Öëäïöü') + assert_equal 0, response.total_hits + end + + def test_sorting + @connection.add(:id => 1, :text => 'aaa woot') + @connection.add(:id => 2, :text => 'bbb woot') + @connection.add(:id => 3, :text => 'ccc woot') + @connection.commit + + results = @connection.query('woot', :sort => [:id => :descending], :rows => 2) + assert_equal([3, 2], results.hits.map { |h| h['id'].to_i }) + + results = @connection.search('woot', :sort => [:id => :descending], :rows => 2) + assert_equal([3, 2], results.hits.map { |h| h['id'].to_i }) + + @connection.delete_by_query("id:1 OR id:2 OR id:3") + end + + def test_bad_connection + conn = Solr::Connection.new 'http://127.0.0.1:9999/invalid' + begin + conn.send(Solr::Request::Ping.new) + flunk "Expected exception not raised" + rescue ::Exception + # expected + assert true + end + end + + def test_bad_url + conn = Solr::Connection.new 'http://localhost:8888/invalid' + assert_raise(Net::HTTPServerException) do + conn.send(Solr::Request::Ping.new) + end + end + + def test_commit + response = @connection.send(Solr::Request::Commit.new) + assert response.ok? + end + + def test_optimize + response = @connection.send(Solr::Request::Optimize.new) + assert response.ok? + end + +# TODO: add test_ping back... something seems to have changed with the response, so adjustments are needed. +# non-critical - if Solr is broken we'll know from other tests! +# def test_ping +# assert_equal true, @connection.ping +# end + + def test_delete_with_query + assert_equal true, @connection.delete_by_query('[* TO *]') + end + + def test_ping_with_bad_server + conn = Solr::Connection.new 'http://localhost:8888/invalid' + assert_equal false, conn.ping + end + + def test_invalid_response_format + request = BadRequest.new(:query => "solr") + assert_raise(Solr::Exception) do + @connection.send(request) + end + end + + def test_escaping + doc = Solr::Document.new :id => 47, :ruby_text => 'puts "ouch!"' + @connection.add(doc) + @connection.commit + + request = Solr::Request::Standard.new :query => 'ouch' + result = @connection.send(request) + + assert_match /puts/, result.raw_response + end + + def test_add_document + doc = {:id => 999, :text => 'hi there!'} + request = Solr::Request::AddDocument.new(doc) + response = @connection.send(request) + assert response.status_code == '0' + end + + def test_update + @connection.update(:id => 999, :text => 'update test') + end + + def test_no_such_field + doc = {:id => 999, :bogus => 'foo'} + request = Solr::Request::AddDocument.new(doc) + assert_raise(Net::HTTPServerException) do + response = @connection.send(request) + end + # assert_equal false, response.ok? + # assert_match "ERROR:unknown field 'bogus'", response.status_message + end + + def test_index_info + doc = {:id => 999, :test_index_facet => 'value'} + @connection.add(doc) + ii = Solr::Request::IndexInfo.new + info = @connection.send(Solr::Request::IndexInfo.new) + assert info.field_names.include?("id") && info.field_names.include?("test_index_facet") + assert_equal 1, info.num_docs + end + + def test_highlighting + @connection.add(:id => 1, :title_text => "Apache Solr") + + request = Solr::Request::Standard.new(:query => 'solr', + :highlighting => { + :field_list => ['title_text'], + :max_snippets => 3, + :prefix => ">>", + :suffix => "<<" + } + ) + + response = @connection.send(request) + assert_equal ["Apache >>Solr<<"], response.highlighted(1, :title_text) + end + + def test_entities + @connection.add(:id => 1, :title_text => " ") + response = @connection.query('nbsp') + assert_equal 1, response.total_hits + assert_equal '1', response.hits[0]['id'] + end + + # wipe the index clean + def clean + @connection.delete_by_query('*:*') + end + +end diff --git a/solr/client/ruby/solr-ruby/test/functional/test_solr_server.rb b/solr/client/ruby/solr-ruby/test/functional/test_solr_server.rb new file mode 100644 index 00000000000..fa01072e66c --- /dev/null +++ b/solr/client/ruby/solr-ruby/test/functional/test_solr_server.rb @@ -0,0 +1,104 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# A singleton class for starting/stopping a Solr server for testing purposes +# The behavior of TestSolrServer can be modified prior to start() by changing +# port, solr_home, and quiet properties. + +class TestSolrServer + require 'singleton' + include Singleton + attr_accessor :port, :jetty_home, :solr_home, :quiet + + # configure the singleton with some defaults + def initialize + @pid = nil + end + + def self.wrap(params = {}) + error = false + solr_server = self.instance + solr_server.quiet = params[:quiet] || true + solr_server.jetty_home = params[:jetty_home] + solr_server.solr_home = params[:solr_home] + solr_server.port = params[:jetty_port] || 8888 + begin + puts "starting solr server on #{RUBY_PLATFORM}" + solr_server.start + sleep params[:startup_wait] || 5 + yield + rescue + error = true + ensure + puts "stopping solr server" + solr_server.stop + end + + return error + end + + def jetty_command + "java -Djetty.port=#{@port} -Dsolr.solr.home=#{@solr_home} -jar start.jar" + end + + def start + puts "jetty_home: #{@jetty_home}" + puts "solr_home: #{@solr_home}" + puts "jetty_command: #{jetty_command}" + platform_specific_start + end + + def stop + platform_specific_stop + end + + if RUBY_PLATFORM =~ /mswin32/ + require 'win32/process' + + # start the solr server + def platform_specific_start + Dir.chdir(@jetty_home) do + @pid = Process.create( + :app_name => jetty_command, + :creation_flags => Process::DETACHED_PROCESS, + :process_inherit => false, + :thread_inherit => true, + :cwd => "#{@jetty_home}" + ).process_id + end + end + + # stop a running solr server + def platform_specific_stop + Process.kill(1, @pid) + Process.wait + end + else # Not Windows + # start the solr server + def platform_specific_start + puts self.inspect + Dir.chdir(@jetty_home) do + @pid = fork do + STDERR.close if @quiet + exec jetty_command + end + end + end + + # stop a running solr server + def platform_specific_stop + Process.kill('TERM', @pid) + Process.wait + end + end + +end diff --git a/solr/client/ruby/solr-ruby/test/unit/add_document_test.rb b/solr/client/ruby/solr-ruby/test/unit/add_document_test.rb new file mode 100644 index 00000000000..414cbfc47cf --- /dev/null +++ b/solr/client/ruby/solr-ruby/test/unit/add_document_test.rb @@ -0,0 +1,40 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'solr_mock_base' + +class AddDocumentTest < SolrMockBaseTestCase + + def test_add_document_response + conn = Solr::Connection.new('http://localhost:9999/solr') + set_post_return('02') + doc = {:id => '123', :text => 'Tlon, Uqbar, Orbis Tertius'} + response = conn.send(Solr::Request::AddDocument.new(doc)) + assert_equal true, response.ok? + end + + def test_bad_add_document_response + conn = Solr::Connection.new('http://localhost:9999/solr') + set_post_return('12') + doc = {:id => '123', :text => 'Tlon, Uqbar, Orbis Tertius'} + response = conn.send(Solr::Request::AddDocument.new(doc)) + assert_equal false, response.ok? + end + + def test_shorthand + conn = Solr::Connection.new('http://localhost:9999/solr') + set_post_return('02') + doc = {:id => '123', :text => 'Tlon, Uqbar, Orbis Tertius'} + assert_equal true, conn.add(:id => '123', :text => 'Tlon, Uqbar, Orbis Tetius') + end + +end diff --git a/solr/client/ruby/solr-ruby/test/unit/array_mapper_test.rb b/solr/client/ruby/solr-ruby/test/unit/array_mapper_test.rb new file mode 100755 index 00000000000..55ba3e61ef9 --- /dev/null +++ b/solr/client/ruby/solr-ruby/test/unit/array_mapper_test.rb @@ -0,0 +1,37 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'solr' +require 'test/unit' + +include Solr::Importer + +class ArrayMapperTest < Test::Unit::TestCase + def test_simple + mapping1 = {:one => "uno"} + mapping2 = {:two => "dos"} + + mapper = Solr::Importer::ArrayMapper.new([Mapper.new(mapping1),Mapper.new(mapping2)]) + mapped_data = mapper.map([{},{}]) + assert_equal "uno", mapped_data[:one] + assert_equal "dos", mapped_data[:two] + end + + def test_field_conflict_goes_to_last + mapping1 = {:same => "uno"} + mapping2 = {:same => "dos"} + + mapper = Solr::Importer::ArrayMapper.new([Mapper.new(mapping1),Mapper.new(mapping2)]) + mapped_data = mapper.map([{},{}]) + assert_equal "dos", mapped_data[:same] + end +end \ No newline at end of file diff --git a/solr/client/ruby/solr-ruby/test/unit/changes_yaml_test.rb b/solr/client/ruby/solr-ruby/test/unit/changes_yaml_test.rb new file mode 100755 index 00000000000..69f8c14d2c3 --- /dev/null +++ b/solr/client/ruby/solr-ruby/test/unit/changes_yaml_test.rb @@ -0,0 +1,21 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'test/unit' + +class ChangesYamlTest < Test::Unit::TestCase + def test_parse + change_log = YAML.load_file(File.expand_path(File.dirname(__FILE__)) + "/../../CHANGES.yml") + assert_equal Date.parse("2007-02-15"), change_log["v0.0.1"]["release_date"] + assert_equal ["initial release"], change_log["v0.0.1"]["changes"] + end +end diff --git a/solr/client/ruby/solr-ruby/test/unit/commit_test.rb b/solr/client/ruby/solr-ruby/test/unit/commit_test.rb new file mode 100644 index 00000000000..908ae5e576c --- /dev/null +++ b/solr/client/ruby/solr-ruby/test/unit/commit_test.rb @@ -0,0 +1,41 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'solr_mock_base' + +class CommitTest < SolrMockBaseTestCase + + def test_commit + xml = '02' + conn = Solr::Connection.new('http://localhost:9999/solr') + set_post_return(xml) + response = conn.send(Solr::Request::Commit.new) + assert_kind_of Solr::Response::Commit, response + assert_equal true, response.ok? + + # test shorthand + assert_equal true, conn.commit + end + + # def test_invalid_commit + # xml = '12' + # conn = Solr::Connection.new('http://localhost:9999/solr') + # set_post_return(xml) + # response = conn.send(Solr::Request::Commit.new) + # assert_kind_of Solr::Response::Commit, response + # assert_equal false, response.ok? + # + # # test shorthand + # assert_equal false, conn.commit + # end + +end diff --git a/solr/client/ruby/solr-ruby/test/unit/connection_test.rb b/solr/client/ruby/solr-ruby/test/unit/connection_test.rb new file mode 100755 index 00000000000..cba4f169b17 --- /dev/null +++ b/solr/client/ruby/solr-ruby/test/unit/connection_test.rb @@ -0,0 +1,55 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'test/unit' +require 'solr' +require 'solr_mock_base' + +class ConnectionTest < SolrMockBaseTestCase + def test_mock + connection = Connection.new("http://localhost:9999") + set_post_return("foo") + assert_equal "foo", connection.post(Solr::Request::AddDocument.new) + end + + def test_bad_url + assert_raise(RuntimeError) do + Connection.new("ftp://localhost:9999") + end + end + + def test_connection_initialize + connection = Solr::Connection.new("http://localhost:8983/solr") + assert_equal 'localhost', connection.url.host + assert_equal 8983, connection.url.port + assert_equal '/solr', connection.url.path + end + + def test_non_standard_context + connection = Solr::Connection.new("http://localhost:8983/index") + assert_equal '/index', connection.url.path + end + + def test_xml_response + connection = Connection.new("http://localhost:9999") + set_post_return "" + response = connection.send(Solr::Request::Ping.new) + assert_equal "", response.raw_response + end + + def test_ruby_response + connection = Connection.new("http://localhost:9999") + set_post_return "{'responseHeader' => {}, 'response' => {}}" + response = connection.send(Solr::Request::Standard.new(:query => 'foo')) + assert_equal({'responseHeader' => {}, 'response' => {}}, response.data) + end +end diff --git a/solr/client/ruby/solr-ruby/test/unit/data_mapper_test.rb b/solr/client/ruby/solr-ruby/test/unit/data_mapper_test.rb new file mode 100755 index 00000000000..ca9ab78cb6c --- /dev/null +++ b/solr/client/ruby/solr-ruby/test/unit/data_mapper_test.rb @@ -0,0 +1,75 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'solr' +require 'test/unit' + +class DataMapperTest < Test::Unit::TestCase + + def test_static_mapping + mapping = {:static => "value", + :static_array => ["value1", "value2"]} + + mapper = Solr::Importer::Mapper.new(mapping) + mapped_data = mapper.map({}) + + assert_equal "value", mapped_data[:static] + assert_equal ["value1", "value2"], mapped_data[:static_array] + end + + def test_simple_mapping + orig_data = {:orig_field => "value", + :multi1 => "val1", :multi2 => "val2"} + mapping = {:solr_field => :orig_field, + :mapped_array => [:multi1, :multi2], } + + mapper = Solr::Importer::Mapper.new(mapping) + mapped_data = mapper.map(orig_data) + + assert_equal "value", mapped_data[:solr_field] + assert_equal ["val1", "val2"], mapped_data[:mapped_array] + end + + def test_proc + orig_data = {:orig_field => "value"} + mapping = {:solr_field => Proc.new {|record| ">#{record[:orig_field]}<"}} + + mapper = Solr::Importer::Mapper.new(mapping) + mapped_data = mapper.map(orig_data) + + assert_equal ">value<", mapped_data[:solr_field] + end + + def test_overridden_field + mapping = {:solr_field => [:orig_field1, :orig_field2]} + orig_data = {:orig_field1 => "value1", :orig_field2 => "value2", } + + mapper = Solr::Importer::Mapper.new(mapping) + def mapper.field_data(orig_data, field_name) + ["~#{super(orig_data, field_name)}~"] # array tests that the value is flattened + end + mapped_data = mapper.map(orig_data) + + assert_equal ["~value1~", "~value2~"], mapped_data[:solr_field] + end + + def test_unknown_mapping + mapping = {:solr_field => /foo/} # regexp currently not a valid mapping type + + mapper = Solr::Importer::Mapper.new(mapping) + + assert_raise(RuntimeError) do + mapped_data = mapper.map({}) + end + end + +end diff --git a/solr/client/ruby/solr-ruby/test/unit/delete_test.rb b/solr/client/ruby/solr-ruby/test/unit/delete_test.rb new file mode 100644 index 00000000000..bd00c7c4611 --- /dev/null +++ b/solr/client/ruby/solr-ruby/test/unit/delete_test.rb @@ -0,0 +1,56 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'solr_mock_base' + +class DeleteTest < SolrMockBaseTestCase + + def test_delete_request + request = Solr::Request::Delete.new(:id => '123') + assert_match(/[\s]*123<\/id>[\s]*<\/delete>/m, request.to_s) + end + + def test_delete_by_query_request + request = Solr::Request::Delete.new(:query => 'name:summers') + assert_match(/[\s]*name:summers<\/query>[\s]*<\/delete>/m, request.to_s) + end + + def test_delete_response + conn = Solr::Connection.new 'http://localhost:9999/solr' + set_post_return('02') + response = conn.send(Solr::Request::Delete.new(:id => 123)) + assert_equal true, response.ok? + end + + def test_bad_delete_request + assert_raise(Solr::Exception) do + Solr::Request::Delete.new(:bogus => :param) + end + + assert_raise(Solr::Exception) do + Solr::Request::Delete.new(:id => 529, :query => "id:529") + end + end + + def test_bad_delete_response + conn = Solr::Connection.new 'http://localhost:9999/solr' + set_post_return('uhoh') + response = conn.send(Solr::Request::Delete.new(:id => 123)) + assert_equal false, response.ok? + end + + def test_delete_by_i18n_query_request + request = Solr::Request::Delete.new(:query => 'ëäïöü') + assert_match(/[\s]*ëäïöü<\/query>[\s]*<\/delete>/m, request.to_s) + end + +end diff --git a/solr/client/ruby/solr-ruby/test/unit/delimited_file_source_test.rb b/solr/client/ruby/solr-ruby/test/unit/delimited_file_source_test.rb new file mode 100755 index 00000000000..bb52fcc11ab --- /dev/null +++ b/solr/client/ruby/solr-ruby/test/unit/delimited_file_source_test.rb @@ -0,0 +1,29 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'solr' +require 'test/unit' + +class DelimitedFileSourceTest < Test::Unit::TestCase + + def test_load + filename = File.expand_path(File.dirname(__FILE__)) + "/tab_delimited.txt" + + source = Solr::Importer::DelimitedFileSource.new(filename,/\t/) + assert_equal source.to_a.size, 1 + + source.each do |data| + assert_equal data[:asin], '0865681740' + end + end + +end diff --git a/solr/client/ruby/solr-ruby/test/unit/dismax_request_test.rb b/solr/client/ruby/solr-ruby/test/unit/dismax_request_test.rb new file mode 100644 index 00000000000..7141334720d --- /dev/null +++ b/solr/client/ruby/solr-ruby/test/unit/dismax_request_test.rb @@ -0,0 +1,26 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'test/unit' +require 'solr' + +class DismaxRequestTest < Test::Unit::TestCase + + def test_basic_query + request = Solr::Request::Dismax.new(:query => 'query', :phrase_slop => '1000', :sort => [{:deedle => :descending}]) + assert_match(/q=query/, request.to_s) + assert_match(/qt=dismax/, request.to_s) + assert_match(/ps=1000/, request.to_s) + assert_match(/sort=deedle%20desc/, request.to_s) + end + +end \ No newline at end of file diff --git a/solr/client/ruby/solr-ruby/test/unit/document_test.rb b/solr/client/ruby/solr-ruby/test/unit/document_test.rb new file mode 100644 index 00000000000..6bb6e14a026 --- /dev/null +++ b/solr/client/ruby/solr-ruby/test/unit/document_test.rb @@ -0,0 +1,69 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'test/unit' +require 'solr' + +class DocumentTest < Test::Unit::TestCase + + def test_xml + doc = Solr::Document.new + doc << Solr::Field.new(:creator => 'Erik Hatcher') + assert_kind_of Solr::XML::Element, doc.to_xml + assert_match(/[\s]*Erik Hatcher<\/field>[\s]*<\/doc>/m, doc.to_xml.to_s) + end + + def test_repeatable + doc = Solr::Document.new + doc << Solr::Field.new(:creator => 'Erik Hatcher') + doc << Solr::Field.new(:creator => 'Otis Gospodnetic') + assert_kind_of Solr::XML::Element, doc.to_xml + assert_match(/[\s]*Erik Hatcher<\/field>[\s]*Otis Gospodnetic<\/field>[\s]*<\/doc>/m, doc.to_xml.to_s) + end + + def test_repeatable_in_hash + doc = Solr::Document.new({:creator => ['Erik Hatcher', 'Otis Gospodnetic']}) + assert_match(/[\s]*Erik Hatcher<\/field>[\s]*Otis Gospodnetic<\/field>[\s]*<\/doc>/m, doc.to_xml.to_s) + end + + def test_bad_doc + doc = Solr::Document.new + assert_raise(RuntimeError) do + doc << "invalid" + end + end + + def test_hash_shorthand + doc = Solr::Document.new :creator => 'Erik Hatcher', :title => 'Lucene in Action' + assert_equal 'Erik Hatcher', doc[:creator] + assert_equal 'Lucene in Action', doc[:title] + assert_equal nil, doc[:foo] + + doc = Solr::Document.new + doc << {:creator => 'Erik Hatcher', :title => 'Lucene in Action'} + doc[:subject] = 'Search' + assert_equal 'Erik Hatcher', doc[:creator] + assert_equal 'Lucene in Action', doc[:title] + assert_equal 'Search', doc[:subject] + end + + def test_boost + doc = Solr::Document.new :name => "McGrump" + doc.boost = 300.28 + assert_match(/[\s]*McGrump<\/field>[\s]*<\/doc>/, doc.to_xml.to_s) + end + + def test_string_values + doc = Solr::Document.new :name => "multi\nline" + assert_match(/[\s]*multi\nline<\/field>[\s]*<\/doc>/, doc.to_xml.to_s) + end +end diff --git a/solr/client/ruby/solr-ruby/test/unit/field_test.rb b/solr/client/ruby/solr-ruby/test/unit/field_test.rb new file mode 100644 index 00000000000..44e6d5c4c27 --- /dev/null +++ b/solr/client/ruby/solr-ruby/test/unit/field_test.rb @@ -0,0 +1,48 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'test/unit' +require 'solr' + +class FieldTest < Test::Unit::TestCase + + def test_xml + field = Solr::Field.new :creator => 'Erik Hatcher' + assert_kind_of Solr::XML::Element, field.to_xml + assert_match(/Erik Hatcher<\/field>/, field.to_xml.to_s) + end + + def test_escaped_xml + field = Solr::Field.new :creator => 'Erik Hatcher & His Amazing Leaping Ability' + assert_kind_of Solr::XML::Element, field.to_xml + assert_match(/Erik Hatcher & His Amazing Leaping Ability<\/field>/, field.to_xml.to_s) + end + + def test_xml_date + field = Solr::Field.new :time => Time.now + assert_kind_of Solr::XML::Element, field.to_xml + assert_match(/[\d]{4}-[\d]{2}-[\d]{2}T[\d]{2}:[\d]{2}:[\d]{2}Z<\/field>/, field.to_xml.to_s) + end + + def test_i18n_xml + field = Solr::Field.new :i18nstring => 'Äêâîôû Öëäïöü' + assert_kind_of Solr::XML::Element, field.to_xml + assert_match(/Äêâîôû Öëäïöü<\/field>/m, field.to_xml.to_s) + end + + def test_boost_values + field = Solr::Field.new(:blah => "squee", :boost => 3.0) + assert_kind_of Solr::XML::Element, field.to_xml + assert_match(/squee<\/field>/, field.to_xml.to_s) + end + +end diff --git a/solr/client/ruby/solr-ruby/test/unit/hpricot_mapper_test.rb b/solr/client/ruby/solr-ruby/test/unit/hpricot_mapper_test.rb new file mode 100644 index 00000000000..075064e4581 --- /dev/null +++ b/solr/client/ruby/solr-ruby/test/unit/hpricot_mapper_test.rb @@ -0,0 +1,44 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +begin + require 'solr' + require 'test/unit' + require 'hpricot' + + class HpricotMapperTest < Test::Unit::TestCase + + def setup + @doc = open(File.expand_path(File.dirname(__FILE__)) + "/hpricot_test_file.xml"){|f| Hpricot.XML(f)} + end + + def test_simple_hpricot_path + mapping = {:field1 => :'child[@attribute="attribute1"]', + :field2 => :'child[@attribute="attribute2"]', + :field3 => :'child[@attribute="attribute3"]', + :field4 => :'child[@attribute="attribute3"] grandchild', + :field5 => :'child'} + + mapper = Solr::Importer::HpricotMapper.new(mapping) + mapped_data = mapper.map(@doc) + + assert_equal ['text1'], mapped_data[:field1] + assert_equal ['text2'], mapped_data[:field2] + assert_equal ['text3grandchild 3 text'], mapped_data[:field3] + assert_equal ['grandchild 3 text'], mapped_data[:field4] + assert_equal ['text1', 'text2', 'text3grandchild 3 text'], mapped_data[:field5] + end + + end +rescue LoadError => e + puts "HpricotMapperTest not run because #{e}" +end \ No newline at end of file diff --git a/solr/client/ruby/solr-ruby/test/unit/hpricot_test_file.xml b/solr/client/ruby/solr-ruby/test/unit/hpricot_test_file.xml new file mode 100644 index 00000000000..3f3c7214484 --- /dev/null +++ b/solr/client/ruby/solr-ruby/test/unit/hpricot_test_file.xml @@ -0,0 +1,26 @@ + + + + text1 + text2 + text3grandchild 3 text + + \ No newline at end of file diff --git a/solr/client/ruby/solr-ruby/test/unit/indexer_test.rb b/solr/client/ruby/solr-ruby/test/unit/indexer_test.rb new file mode 100755 index 00000000000..58d1a8defd8 --- /dev/null +++ b/solr/client/ruby/solr-ruby/test/unit/indexer_test.rb @@ -0,0 +1,57 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'test/unit' +require 'solr' + +class Solr::Indexer + attr_reader :added + def add_docs(doc) + @added ||= [] + @added << doc + end +end + +class IndexerTest < Test::Unit::TestCase + def test_mapping_or_mapping + mapping = {:field => "foo"} + indexer = Solr::Indexer.new([1,2,3], mapping, :debug => true) + indexer.index + assert_equal 3, indexer.added.size + + indexer = Solr::Indexer.new([1,2,3,4], Solr::Importer::Mapper.new(mapping), :debug => true) + indexer.index + assert_equal 4, indexer.added.size + end + + def test_batch + mapping = {:field => "foo"} + indexer = Solr::Indexer.new([1,2,3], mapping, :debug => true, :buffer_docs => 2) + indexer.index + assert_equal 2, indexer.added.size + end + +end + + +# source = DataSource.new +# +# mapping = { +# :id => :isbn, +# :name => :author, +# :source => "BOOKS", +# :year => Proc.new {|record| record.date[0,4] }, +# } +# +# Solr::Indexer.index(source, mapper) do |orig_data, solr_document| +# solr_document[:timestamp] = Time.now +# end \ No newline at end of file diff --git a/solr/client/ruby/solr-ruby/test/unit/modify_document_test.rb b/solr/client/ruby/solr-ruby/test/unit/modify_document_test.rb new file mode 100755 index 00000000000..9291dd16623 --- /dev/null +++ b/solr/client/ruby/solr-ruby/test/unit/modify_document_test.rb @@ -0,0 +1,24 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'test/unit' +require 'solr' + +class ModifyDocumentTest < Test::Unit::TestCase + + def test_update_formatting + request = Solr::Request::ModifyDocument.new(:id => 10, :overwrite => {:name => ['val1', 'val2'], :copyfield => nil}) + assert_equal :xml, request.response_format + assert_match /copyfield\:OVERWRITE/, request.handler + assert_match /name\:OVERWRITE/, request.handler + end +end diff --git a/solr/client/ruby/solr-ruby/test/unit/ping_test.rb b/solr/client/ruby/solr-ruby/test/unit/ping_test.rb new file mode 100644 index 00000000000..7d4dd3001d7 --- /dev/null +++ b/solr/client/ruby/solr-ruby/test/unit/ping_test.rb @@ -0,0 +1,51 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'solr_mock_base' + +class PingTest < SolrMockBaseTestCase + + def test_ping_response + xml = +< + + + + + + +PING_RESPONSE + conn = Solr::Connection.new('http://localhost:9999') + set_post_return(xml) + response = conn.send(Solr::Request::Ping.new) + assert_kind_of Solr::Response::Ping, response + assert_equal true, response.ok? + + # test shorthand + assert true, conn.ping + end + + def test_bad_ping_response + xml = "bar" + conn = Solr::Connection.new('http://localhost:9999') + set_post_return(xml) + response = conn.send(Solr::Request::Ping.new) + assert_kind_of Solr::Response::Ping, response + assert_equal false, response.ok? + + # test shorthand + assert_equal false, conn.ping + end + +end diff --git a/solr/client/ruby/solr-ruby/test/unit/request_test.rb b/solr/client/ruby/solr-ruby/test/unit/request_test.rb new file mode 100755 index 00000000000..ca2e5897a16 --- /dev/null +++ b/solr/client/ruby/solr-ruby/test/unit/request_test.rb @@ -0,0 +1,61 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'test/unit' +require 'solr' + +class BadRequest < Solr::Request::Base +end + +class RequestTest < Test::Unit::TestCase + + def test_commit_request + request = Solr::Request::Commit.new + assert_equal :xml, request.response_format + assert_equal 'update', request.handler + assert_match(//, request.to_s) + end + + def test_add_doc_request + request = Solr::Request::AddDocument.new(:title => "title") + assert_match(/[\s]*[\s]*title<\/field>[\s]*<\/doc>[\s]*<\/add>/m, request.to_s) + assert_equal :xml, request.response_format + assert_equal 'update', request.handler + + assert_raise(RuntimeError) do + Solr::Request::AddDocument.new("invalid") + end + end + + def test_add_multidoc_request + request = Solr::Request::AddDocument.new([{:title => "title1"}, {:title => "title2"}]) + assert_match(/[\s]*[\s]*title1<\/field>[\s]*<\/doc>[\s]*[\s]*title2<\/field>[\s]*<\/doc>[\s]*<\/add>/m, request.to_s) + assert_equal :xml, request.response_format + assert_equal 'update', request.handler + end + + def test_ping_request + request = Solr::Request::Ping.new + assert_equal :xml, request.response_format + end + + def test_bad_request_class + assert_raise(RuntimeError) do + BadRequest.new.response_format + end + + assert_raise(RuntimeError) do + BadRequest.new.handler + end + end + +end diff --git a/solr/client/ruby/solr-ruby/test/unit/response_test.rb b/solr/client/ruby/solr-ruby/test/unit/response_test.rb new file mode 100644 index 00000000000..1496262beed --- /dev/null +++ b/solr/client/ruby/solr-ruby/test/unit/response_test.rb @@ -0,0 +1,43 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'test/unit' +require 'solr' +require 'solr_mock_base' + + +class ResponseTest < SolrMockBaseTestCase + + def test_response_xml_error + begin + Solr::Response::Xml.new("invalid xml&") + flunk("failed to get Solr::Exception as expected") + rescue Exception => exception + assert_kind_of Solr::Exception, exception + assert_match 'invalid response xml', exception.to_s + end + end + + def test_invalid_ruby + assert_raise(Solr::Exception) do + Solr::Response::Ruby.new(' {...') + end + end + + # This is now an acceptable use of Select, for the default request handler with no parameters (other than &wt=ruby) + # def test_bogus_request_handling + # assert_raise(Solr::Exception) do + # Solr::Response::Base.make_response(Solr::Request::Select.new, "response data") + # end + # end + +end diff --git a/solr/client/ruby/solr-ruby/test/unit/select_test.rb b/solr/client/ruby/solr-ruby/test/unit/select_test.rb new file mode 100755 index 00000000000..ae1a40bb719 --- /dev/null +++ b/solr/client/ruby/solr-ruby/test/unit/select_test.rb @@ -0,0 +1,25 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'test/unit' +require 'solr' + +class SelectTest < Test::Unit::TestCase + + def test_basic_query + request = Solr::Request::Select.new('custom', :q => 'query') + assert_equal :ruby, request.response_format + assert_equal 'select', request.handler + assert_equal 'query', request.to_hash[:q] + end + +end diff --git a/solr/client/ruby/solr-ruby/test/unit/solr_mock_base.rb b/solr/client/ruby/solr-ruby/test/unit/solr_mock_base.rb new file mode 100755 index 00000000000..86e676a0e05 --- /dev/null +++ b/solr/client/ruby/solr-ruby/test/unit/solr_mock_base.rb @@ -0,0 +1,40 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'test/unit' +require 'solr' + +# TODO: Maybe replace this with flexmock +class SolrMockBaseTestCase < Test::Unit::TestCase + include Solr + + def setup + Connection.send(:alias_method, :orig_post, :post) + end + + def teardown + Connection.send(:alias_method, :post, :orig_post) + end + + def set_post_return(value) + Connection.class_eval %{ + def post(request) + %q{#{value}} + end + } + end + + def test_dummy + # So Test::Unit is happy running this class + end + +end diff --git a/solr/client/ruby/solr-ruby/test/unit/spellcheck_response_test.rb b/solr/client/ruby/solr-ruby/test/unit/spellcheck_response_test.rb new file mode 100644 index 00000000000..0d9217425f0 --- /dev/null +++ b/solr/client/ruby/solr-ruby/test/unit/spellcheck_response_test.rb @@ -0,0 +1,26 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'solr_mock_base' + +class SpellcheckResponseTest < SolrMockBaseTestCase + def test_basic + ruby_code = "{'responseHeader'=>{'status'=>0,'QTime'=>5},'suggestions'=>['whately','whatcha','whatever']}" + conn = Solr::Connection.new 'http://localhost:9999' + set_post_return(ruby_code) + response = conn.send(Solr::Request::Spellcheck.new(:query => 'whateva')) + assert_equal true, response.ok? + assert_equal 3, response.suggestions.size + assert_equal ['whately','whatcha','whatever'], response.suggestions + end +end + diff --git a/solr/client/ruby/solr-ruby/test/unit/spellchecker_request_test.rb b/solr/client/ruby/solr-ruby/test/unit/spellchecker_request_test.rb new file mode 100644 index 00000000000..9a603443e29 --- /dev/null +++ b/solr/client/ruby/solr-ruby/test/unit/spellchecker_request_test.rb @@ -0,0 +1,27 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'test/unit' +require 'solr' + +class SpellcheckRequestTest < Test::Unit::TestCase + def test_spellcheck_request + request = Solr::Request::Spellcheck.new(:query => 'whateva', :suggestion_count => 5, :accuracy => 0.7, :only_more_popular => true) + assert_equal :ruby, request.response_format + assert_equal 'select', request.handler + hash = request.to_hash + assert_equal 'whateva', hash[:q] + assert_equal 5, hash[:suggestionCount] + assert_equal 0.7, hash[:accuracy] + assert_equal true, hash[:onlyMorePopular] + end +end diff --git a/solr/client/ruby/solr-ruby/test/unit/standard_request_test.rb b/solr/client/ruby/solr-ruby/test/unit/standard_request_test.rb new file mode 100755 index 00000000000..5cacfc1d435 --- /dev/null +++ b/solr/client/ruby/solr-ruby/test/unit/standard_request_test.rb @@ -0,0 +1,324 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'test/unit' +require 'solr' + +class StandardRequestTest < Test::Unit::TestCase + + def test_basic_query + request = Solr::Request::Standard.new(:query => 'query') + assert_equal :ruby, request.response_format + assert_equal 'select', request.handler + assert_equal 'query', request.to_hash[:q] + assert_match /q=query/, request.to_s + end + + def test_bad_params + assert_raise(RuntimeError) do + Solr::Request::Standard.new(:foo => "invalid") + end + + assert_raise(RuntimeError) do + Solr::Request::Standard.new(:query => "valid", :foo => "invalid") + end + + assert_raise(RuntimeError) do + Solr::Request::Standard.new(:query => "valid", :operator => :bogus) + end + end + + def test_common_params + request = Solr::Request::Standard.new(:query => 'query', :start => 10, :rows => 50, + :filter_queries => ['fq1', 'fq2'], :field_list => ['id','title','score'], :operator => :and) + assert_equal 10, request.to_hash[:start] + assert_equal 50, request.to_hash[:rows] + assert_equal ['fq1','fq2'], request.to_hash[:fq] + assert_equal "id,title,score", request.to_hash[:fl] + assert_equal "AND", request.to_hash["q.op"] + end + + def test_missing_params + request = Solr::Request::Standard.new(:query => 'query', :debug_query => false, :facets => {:fields =>[:category_facet]}) + assert_nil request.to_hash[:rows] + assert_no_match /rows/, request.to_s + assert_no_match /facet\.sort/, request.to_s + assert_match /debugQuery/, request.to_s + end + + def test_only_facet_query + request = Solr::Request::Standard.new(:query => 'query', + :facets => { + :queries => ["q1", "q2"], + } + ) + + hash = request.to_hash + assert_equal ["q1", "q2"], hash["facet.query"] + end + + def test_facet_params_all + request = Solr::Request::Standard.new(:query => 'query', + :facets => { + :fields => [:genre, + # field that overrides the global facet parameters + {:year => {:limit => 50, :mincount => 0, :missing => false, :sort => :term, :prefix=>"199", :offset => 7}}], + :queries => ["q1", "q2"], + :prefix => "cat", + :offset => 3, :limit => 5, :zeros => true, :mincount => 20, :sort => :count # global facet parameters + } + ) + + hash = request.to_hash + assert_equal true, hash[:facet] + assert_equal [:genre, :year], hash["facet.field"] + assert_equal ["q1", "q2"], hash["facet.query"] + assert_equal 5, hash["facet.limit"] + assert_equal 20, hash["facet.mincount"] + assert_equal true, hash["facet.sort"] + assert_equal "cat", hash["facet.prefix"] + assert_equal 50, hash["f.year.facet.limit"] + assert_equal 0, hash["f.year.facet.mincount"] + assert_equal false, hash["f.year.facet.sort"] + assert_equal "199", hash["f.year.facet.prefix"] + assert_equal 3, hash["facet.offset"] + assert_equal 7, hash["f.year.facet.offset"] + end + + def test_basic_sort + request = Solr::Request::Standard.new(:query => 'query', :sort => [{:title => :descending}, {:date => :ascending}]) + assert_equal 'query', request.to_hash[:q] + assert_equal 'title desc,date asc', request.to_hash[:sort] + end + + def test_highlighting + request = Solr::Request::Standard.new(:query => 'query', + :highlighting => { + :field_list => ['title', 'author'], + :merge_contiguous => true, + :increment => 100, + :max_snippets => 3, + :require_field_match => true, + :prefix => "", + :suffix => "", + :fragment_size => 300, + :max_analyzed_chars => 102400, + :formatter => 'myFormatter', + :fragmenter => 'myFragmenter', + :use_phrase_highlighter => true + } + ) + + hash = request.to_hash + assert_equal true, hash[:hl] + assert_equal "title,author", hash["hl.fl"] + assert_equal true, hash["hl.mergeContiguous"] + assert_equal 100, hash["hl.increment"] + assert_equal 3, hash["hl.snippets"] + assert_equal true, hash["hl.requireFieldMatch"] + assert_equal "", hash["hl.simple.pre"] + assert_equal "", hash["hl.simple.post"] + assert_equal 300, hash["hl.fragsize"] + assert_equal 102400, hash["hl.maxAnalyzedChars"] + assert_equal "myFormatter", hash["hl.formatter"] + assert_equal "myFragmenter", hash["hl.fragmenter"] + assert_equal true, hash["hl.usePhraseHighlighter"] + end + + def test_highlighting2 + request = Solr::Request::Standard.new(:query => 'query', + :highlighting => { + :field_list => ['title', 'author'], + :merge_contiguous => { + :default=>false, :fields=>{'author'=>true} + }, + :increment => { + :default=>100, :fields=>{'author'=>200} + }, + :max_snippets => { + :default=>2,:fields=>{'author'=>3} + }, + :prefix => { + :default=>"", :fields=>{'author'=>""}, + }, + :suffix => { + :default=>"", :fields=>{'author'=>""}, + }, + :fragment_size => { + :default=>300,:fields=>{'author'=>200} + }, + :max_analyzed_chars => { + :default=>102400,:fields=>{'author'=>51200} + }, + :require_field_match => { + :default=>false, :fields=>{'author'=>true} + }, + :formatter => { + :default=>'defaultFormatter', :fields=>{'title'=>'titleFormatter'} + }, + :fragmenter => { + :default=>'defaultFragmenter',:fields=>{'title'=>'titleFragmenter'} + }, + } + ) + + hash = request.to_hash + assert_equal true, hash[:hl] + assert_equal "title,author", hash["hl.fl"] + assert_equal false, hash["hl.mergeContiguous"] + assert_equal true, hash["f.author.hl.mergeContiguous"] + assert_equal 100, hash["hl.increment"] + assert_equal 200, hash["f.author.hl.increment"] + assert_equal 2, hash["hl.snippets"] + assert_equal 3, hash["f.author.hl.snippets"] + assert_equal "", hash["hl.simple.pre"] + assert_equal "", hash["f.author.hl.simple.pre"] + assert_equal "", hash["hl.simple.post"] + assert_equal "", hash["f.author.hl.simple.post"] + assert_equal 300, hash["hl.fragsize"] + assert_equal 200, hash["f.author.hl.fragsize"] + assert_equal 102400, hash["hl.maxAnalyzedChars"] + assert_equal 51200, hash["f.author.hl.maxAnalyzedChars"] + assert_equal false, hash["hl.requireFieldMatch"] + assert_equal true, hash["f.author.hl.requireFieldMatch"] + assert_equal 'defaultFormatter', hash["hl.formatter"] + assert_equal 'titleFormatter', hash["f.title.hl.formatter"] + assert_equal 'defaultFragmenter', hash["hl.fragmenter"] + assert_equal 'titleFragmenter', hash["f.title.hl.fragmenter"] + end + + def test_highlighting_regex + request = Solr::Request::Standard.new(:query => 'query', + :highlighting => { + :field_list => ['title', 'author'], + :regex => { + :slop => 0.8, + :pattern => '\w', + :max_analyzed_chars => 10000 + } + } + ) + + hash = request.to_hash + assert_equal true, hash[:hl] + assert_equal "title,author", hash["hl.fl"] + assert_equal 0.8, hash["hl.regex.slop"] + assert_equal '\w', hash["hl.regex.pattern"] + assert_equal 10000, hash["hl.regex.maxAnalyzedChars"] + end + + def test_highlighting_regex2 + request = Solr::Request::Standard.new(:query => 'query', + :highlighting => { + :field_list => ['title', 'author'], + :regex => { + :slop => { :default=>0.5, :fields=>{'author'=>0.8} }, + :pattern => { :default=>'\w', :fields=>{'author'=>'\n'} }, + :max_analyzed_chars => { :default=>10000, :fields=>{'author'=>20000} } + } + } + ) + + hash = request.to_hash + assert_equal true, hash[:hl] + assert_equal "title,author", hash["hl.fl"] + assert_equal 0.5, hash["hl.regex.slop"] + assert_equal 0.8, hash["f.author.hl.regex.slop"] + assert_equal '\w', hash["hl.regex.pattern"] + assert_equal '\n', hash["f.author.hl.regex.pattern"] + assert_equal 10000, hash["hl.regex.maxAnalyzedChars"] + assert_equal 20000, hash["f.author.hl.regex.maxAnalyzedChars"] + end + + def test_highlighting_alternate_field + request = Solr::Request::Standard.new(:query => 'query', + :highlighting => { + :field_list => ['title', 'author'], + :alternate_field => 'title', + :max_alternate_field_length => 30 + } + ) + + hash = request.to_hash + assert_equal true, hash[:hl] + assert_equal "title,author", hash["hl.fl"] + assert_equal "title", hash["hl.alternateField"] + assert_equal 30, hash["hl.maxAlternateFieldLength"] + end + + def test_highlighting_alternate_field2 + request = Solr::Request::Standard.new(:query => 'query', + :highlighting => { + :field_list => ['title', 'author'], + :alternate_field => { + :default=>'default', :fields=>{'title'=>'title', 'author'=>'author'} + }, + :max_alternate_field_length => { + :default=>10, :fields=>{'title'=>30, 'author'=>20} + } + } + ) + + hash = request.to_hash + assert_equal true, hash[:hl] + assert_equal "title,author", hash["hl.fl"] + assert_equal "default", hash["hl.alternateField"] + assert_equal "title", hash["f.title.hl.alternateField"] + assert_equal "author", hash["f.author.hl.alternateField"] + assert_equal 10, hash["hl.maxAlternateFieldLength"] + assert_equal 30, hash["f.title.hl.maxAlternateFieldLength"] + assert_equal 20, hash["f.author.hl.maxAlternateFieldLength"] + end + + def test_highlighting_alternate_field_old_style + request = Solr::Request::Standard.new(:query => 'query', + :highlighting => { + :field_list => ['title', 'author'], + :alternate_fields => {'title'=>'title', 'author'=>'author'}, + :max_alternate_field_length => {'title'=>30, 'author'=>20} + } + ) + + hash = request.to_hash + assert_equal true, hash[:hl] + assert_equal "title,author", hash["hl.fl"] + assert_equal "title", hash["f.title.hl.alternateField"] + assert_equal "author", hash["f.author.hl.alternateField"] + assert_equal 30, hash["f.title.hl.maxAlternateFieldLength"] + assert_equal 20, hash["f.author.hl.maxAlternateFieldLength"] + end + + def test_mlt + request = Solr::Request::Standard.new(:query => 'query', + :mlt => { + :count => 5, :field_list => ['field1', 'field2'], + :min_term_freq => 3, :min_doc_freq => 10, + :min_word_length => 4, :max_word_length => 17, + :max_query_terms => 20, :max_tokens_parsed => 100, + :boost => true + } + ) + + hash = request.to_hash + assert_equal true, hash[:mlt] + assert_equal 5, hash["mlt.count"] + assert_equal 'field1,field2', hash["mlt.fl"] + assert_equal 3, hash["mlt.mintf"] + assert_equal 10, hash["mlt.mindf"] + assert_equal 4, hash["mlt.minwl"] + assert_equal 17, hash["mlt.maxwl"] + assert_equal 20, hash["mlt.maxqt"] + assert_equal 100, hash["mlt.maxntp"] + assert_equal true, hash["mlt.boost"] + end + +end diff --git a/solr/client/ruby/solr-ruby/test/unit/standard_response_test.rb b/solr/client/ruby/solr-ruby/test/unit/standard_response_test.rb new file mode 100644 index 00000000000..81175d9fdf8 --- /dev/null +++ b/solr/client/ruby/solr-ruby/test/unit/standard_response_test.rb @@ -0,0 +1,174 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'solr_mock_base' + +class StandardResponseTest < SolrMockBaseTestCase + + def test_basic + ruby_code = +<{ + 'status'=>0, + 'QTime'=>1, + 'params'=>{ + 'wt'=>'ruby', + 'rows'=>'10', + 'explainOther'=>'', + 'start'=>'0', + 'hl.fl'=>'', + 'indent'=>'on', + 'q'=>'guido', + 'fl'=>'*,score', + 'qt'=>'standard', + 'version'=>'2.2'}}, + 'response'=>{'numFound'=>1,'start'=>0,'maxScore'=>0.67833745,'docs'=>[ + { + 'name'=>'guido von rossum', + 'id'=>'123', + 'timestamp'=>'2007-01-16T09:55:30.589Z', + 'score'=>0.67833745}] + }} +RUBY_CODE + conn = Solr::Connection.new 'http://localhost:9999' + set_post_return(ruby_code) + response = conn.send(Solr::Request::Standard.new(:query => 'foo')) + assert_equal true, response.ok? + assert response.query_time + assert_equal 1, response.total_hits + assert_equal 0, response.start + assert_equal 0.67833745, response.max_score + assert_equal 1, response.hits.length + end + + def test_iteration + ruby_code = +<{ + 'status'=>0, + 'QTime'=>0, + 'params'=>{ + 'wt'=>'ruby', + 'rows'=>'10', + 'explainOther'=>'', + 'start'=>'0', + 'hl.fl'=>'', + 'indent'=>'on', + 'q'=>'guido', + 'fl'=>'*,score', + 'qt'=>'standard', + 'version'=>'2.2'}}, + 'response'=>{'numFound'=>22,'start'=>0,'maxScore'=>0.53799295,'docs'=>[ + { + 'name'=>'guido von rossum the 0', + 'id'=>'0', + 'score'=>0.53799295}, + { + 'name'=>'guido von rossum the 1', + 'id'=>'1', + 'score'=>0.53799295}, + { + 'name'=>'guido von rossum the 2', + 'id'=>'2', + 'score'=>0.53799295}, + { + 'name'=>'guido von rossum the 3', + 'id'=>'3', + 'score'=>0.53799295}, + { + 'name'=>'guido von rossum the 4', + 'id'=>'4', + 'score'=>0.53799295}, + { + 'name'=>'guido von rossum the 5', + 'id'=>'5', + 'score'=>0.53799295}, + { + 'name'=>'guido von rossum the 6', + 'id'=>'6', + 'score'=>0.53799295}, + { + 'name'=>'guido von rossum the 7', + 'id'=>'7', + 'score'=>0.53799295}, + { + 'name'=>'guido von rossum the 8', + 'id'=>'8', + 'score'=>0.53799295}, + { + 'name'=>'guido von rossum the 9', + 'id'=>'9', + 'score'=>0.53799295}] + }} +RUBY_CODE + conn = Solr::Connection.new 'http://localhost:9999' + set_post_return(ruby_code) + + count = 0 + conn.query('foo') do |hit| + assert_equal "guido von rossum the #{count}", hit['name'] + count += 1 + end + + assert_equal 10, count + end + + def test_facets + ruby_code = + <{ + 'status'=>0, + 'QTime'=>1897, + 'params'=>{ + 'facet.limit'=>'20', + 'wt'=>'ruby', + 'rows'=>'0', + 'facet'=>'true', + 'facet.mincount'=>'1', + 'facet.field'=>[ + 'subject_genre_facet', + 'subject_geographic_facet', + 'subject_format_facet', + 'subject_era_facet', + 'subject_topic_facet'], + 'indent'=>'true', + 'fl'=>'*,score', + 'q'=>'[* TO *]', + 'qt'=>'standard', + 'facet.sort'=>'true'}}, + 'response'=>{'numFound'=>49999,'start'=>0,'maxScore'=>1.0,'docs'=>[] + }, + 'facet_counts'=>{ + 'facet_queries'=>{}, + 'facet_fields'=>{ + 'subject_genre_facet'=>[ + 'Biography.',2605, + 'Congresses.',1837, + 'Bibliography.',672, + 'Exhibitions.',642, + 'Periodicals.',615, + 'Sources.',485]}} + } +RUBY_CODE + set_post_return(ruby_code) + conn = Solr::Connection.new "http://localhost:9999" + response = conn.query('foo') + facets = response.field_facets('subject_genre_facet') + assert_equal 2605, facets[0].value + assert_equal 485, facets[5].value + end + +end + diff --git a/solr/client/ruby/solr-ruby/test/unit/suite.rb b/solr/client/ruby/solr-ruby/test/unit/suite.rb new file mode 100755 index 00000000000..d33e4afb74a --- /dev/null +++ b/solr/client/ruby/solr-ruby/test/unit/suite.rb @@ -0,0 +1,16 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# dynamically require all tests files +Dir.glob("*_test.rb").each do | file | + require file +end diff --git a/solr/client/ruby/solr-ruby/test/unit/tab_delimited.txt b/solr/client/ruby/solr-ruby/test/unit/tab_delimited.txt new file mode 100755 index 00000000000..386b5700ea9 --- /dev/null +++ b/solr/client/ruby/solr-ruby/test/unit/tab_delimited.txt @@ -0,0 +1,2 @@ +medium associatedURL boxHeightInInches boxLengthInInches boxWeightInPounds boxWidthInInches scannednumber upc asin country title fullTitle series numberInSeries edition aspect mediacount genre price currentValue language netrating description owner publisher published rare purchaseDate rating used signed hasExperienced notes location paid condition notowned author illustrator pages +book 9780865681743 0865681740 us Xing Yi Nei Gong: Xing Yi Health Maintenance and Internal Strength Development Xing Yi Nei Gong: Xing Yi Health Maintenance and Internal Strength Development Paperback $21.95 $14.05 4.5 This is the most complete book on the art of xing yi (hsing Yi) available. It includes the complete xing yi history and lineage going back eight generations; manuscripts handed down from famous practitioners Dai Long Bang and Li Neng Ran; 16 health maintenance and power development exercises; qigong (chi kung) exerices; xing yi long spear power training exercises; and more. Unique Publications 1998-02-10 12:00:00 +0000 2007-02-03 02:22:25 -0500 Dan Miller/ Tim Cartmell 200 diff --git a/solr/client/ruby/solr-ruby/test/unit/util_test.rb b/solr/client/ruby/solr-ruby/test/unit/util_test.rb new file mode 100755 index 00000000000..b45462a590f --- /dev/null +++ b/solr/client/ruby/solr-ruby/test/unit/util_test.rb @@ -0,0 +1,24 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'solr' +require 'test/unit' + +class UtilTest < Test::Unit::TestCase + def test_paired_array_to_hash + assert_equal({:key1 => :value1, :key2 => :value2}, Solr::Util.paired_array_to_hash([:key1, :value1, :key2, :value2])) + end + + def test_query_parser_escape + assert_equal %q(http\:\/\/lucene\.apache\.org\/solr), Solr::Util.query_parser_escape("http://lucene.apache.org/solr") + end +end diff --git a/solr/client/ruby/solr-ruby/test/unit/xpath_mapper_test.rb b/solr/client/ruby/solr-ruby/test/unit/xpath_mapper_test.rb new file mode 100755 index 00000000000..6d364c9e92e --- /dev/null +++ b/solr/client/ruby/solr-ruby/test/unit/xpath_mapper_test.rb @@ -0,0 +1,38 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +begin + require 'solr' + require 'test/unit' + require 'xml/libxml' + + class XPathMapperTest < Test::Unit::TestCase + + def setup + @doc = XML::Document.file(File.expand_path(File.dirname(__FILE__)) + "/xpath_test_file.xml") + end + + def test_simple_xpath + mapping = {:solr_field1 => :'/root/parent/child', + :solr_field2 => :'/root/parent/child/@attribute'} + + mapper = Solr::Importer::XPathMapper.new(mapping) + mapped_data = mapper.map(@doc) + + assert_equal ['text1', 'text2'], mapped_data[:solr_field1] + assert_equal ['attribute1', 'attribute2'], mapped_data[:solr_field2] + end + + end +rescue LoadError => e + puts "XPathMapperTest not run because #{e}" +end \ No newline at end of file diff --git a/solr/client/ruby/solr-ruby/test/unit/xpath_test_file.xml b/solr/client/ruby/solr-ruby/test/unit/xpath_test_file.xml new file mode 100644 index 00000000000..545734f81c2 --- /dev/null +++ b/solr/client/ruby/solr-ruby/test/unit/xpath_test_file.xml @@ -0,0 +1,25 @@ + + + + text1 + text2 + + \ No newline at end of file diff --git a/solr/common-build.xml b/solr/common-build.xml new file mode 100644 index 00000000000..6e8eea2a4d0 --- /dev/null +++ b/solr/common-build.xml @@ -0,0 +1,431 @@ + + + + + This file is designed for importing into a main build file, and not intended + for standalone use. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    ${@{file}.sum}
    + + +
    ${@{file}.base} +
    +
    +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Signing @{input.file} Sig File: @{output.file} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ################################################################## + Maven ant tasks not found. + Please make sure the maven-ant-tasks jar is in ANT_HOME/lib, or made + available to Ant using other mechanisms like -lib or CLASSPATH. + ################################################################## + + + +
    diff --git a/solr/contrib/clustering/CHANGES.txt b/solr/contrib/clustering/CHANGES.txt new file mode 100644 index 00000000000..93ba1cc86a0 --- /dev/null +++ b/solr/contrib/clustering/CHANGES.txt @@ -0,0 +1,20 @@ +Apache Solr Clustering Implementation + +Intro: + +See http://wiki.apache.org/solr/ClusteringComponent + +CHANGES + +$Id:$ + +================== Release 1.5-dev ================== + +* SOLR-1684: Switch to use the SolrIndexSearcher.doc(int, Set) method b/c it can use the document cache (gsingers) + +* SOLR-1692: Fix bug relating to carrot.produceSummary option (gsingers) + +================== Release 1.4.0 ================== + +Solr Clustering will be released for the first time in Solr 1.4. See http://wiki.apache.org/solr/ClusteringComponent + for details on using. \ No newline at end of file diff --git a/solr/contrib/clustering/README.txt b/solr/contrib/clustering/README.txt new file mode 100644 index 00000000000..41958c4514e --- /dev/null +++ b/solr/contrib/clustering/README.txt @@ -0,0 +1,8 @@ +The Clustering contrib plugin for Solr provides a generic mechanism for plugging in third party clustering implementations. +It currently provides clustering support for search results using the Carrot2 project. + +See http://wiki.apache.org/solr/ClusteringComponent for how to get started. + +Also, note, some of the Carrot2 libraries cannot be distributed in binary form because they are LGPL. Thus, you will have +to download those components. See the build.xml file located in this directory for the location of the libraries. +The libraries you will need are: nni.jar, Colt, PNJ and simple-xml. diff --git a/solr/contrib/clustering/build.xml b/solr/contrib/clustering/build.xml new file mode 100644 index 00000000000..b9a18f07024 --- /dev/null +++ b/solr/contrib/clustering/build.xml @@ -0,0 +1,182 @@ + + + + + + + + + + + + Clustering Integraton + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Tests failed! + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/solr/contrib/clustering/lib/carrot2-mini-3.1.0.jar b/solr/contrib/clustering/lib/carrot2-mini-3.1.0.jar new file mode 100644 index 00000000000..34cc9bc1e09 --- /dev/null +++ b/solr/contrib/clustering/lib/carrot2-mini-3.1.0.jar @@ -0,0 +1,2 @@ +AnyObjectId[5ca86c5e72b2953feb0b58fbd87f76d0301cbbf6] was removed in git history. +Apache SVN contains full history. \ No newline at end of file diff --git a/solr/contrib/clustering/lib/commons-lang-2.4.jar b/solr/contrib/clustering/lib/commons-lang-2.4.jar new file mode 100644 index 00000000000..2ef0c625eb9 --- /dev/null +++ b/solr/contrib/clustering/lib/commons-lang-2.4.jar @@ -0,0 +1,2 @@ +AnyObjectId[532939ecab6b77ccb77af3635c55ff9752b70ab7] was removed in git history. +Apache SVN contains full history. \ No newline at end of file diff --git a/solr/contrib/clustering/lib/ehcache-1.6.2.jar b/solr/contrib/clustering/lib/ehcache-1.6.2.jar new file mode 100644 index 00000000000..37d60601738 --- /dev/null +++ b/solr/contrib/clustering/lib/ehcache-1.6.2.jar @@ -0,0 +1,2 @@ +AnyObjectId[85a0ab428be7c8913c120aa932a3d78f705fa73a] was removed in git history. +Apache SVN contains full history. \ No newline at end of file diff --git a/solr/contrib/clustering/lib/jackson-core-asl-0.9.9-6.jar b/solr/contrib/clustering/lib/jackson-core-asl-0.9.9-6.jar new file mode 100644 index 00000000000..13b2de58b96 --- /dev/null +++ b/solr/contrib/clustering/lib/jackson-core-asl-0.9.9-6.jar @@ -0,0 +1,2 @@ +AnyObjectId[f6f425d4a0c127d5249d939b7a93b1250d454cdd] was removed in git history. +Apache SVN contains full history. \ No newline at end of file diff --git a/solr/contrib/clustering/lib/jackson-mapper-asl-0.9.9-6.jar b/solr/contrib/clustering/lib/jackson-mapper-asl-0.9.9-6.jar new file mode 100644 index 00000000000..955b2f64f29 --- /dev/null +++ b/solr/contrib/clustering/lib/jackson-mapper-asl-0.9.9-6.jar @@ -0,0 +1,2 @@ +AnyObjectId[c1652907ebda1e69895d85730f4fc83e1160306e] was removed in git history. +Apache SVN contains full history. \ No newline at end of file diff --git a/solr/contrib/clustering/lib/log4j-1.2.14.jar b/solr/contrib/clustering/lib/log4j-1.2.14.jar new file mode 100644 index 00000000000..2812b3b95fb --- /dev/null +++ b/solr/contrib/clustering/lib/log4j-1.2.14.jar @@ -0,0 +1,2 @@ +AnyObjectId[625130719013f195869881a36dcb8d2b14d64d1e] was removed in git history. +Apache SVN contains full history. \ No newline at end of file diff --git a/solr/contrib/clustering/solr-clustering-pom.xml.template b/solr/contrib/clustering/solr-clustering-pom.xml.template new file mode 100644 index 00000000000..568150b6864 --- /dev/null +++ b/solr/contrib/clustering/solr-clustering-pom.xml.template @@ -0,0 +1,63 @@ + + + + + 4.0.0 + + + org.apache.solr + solr-parent + @maven_version@ + + + org.apache.solr + solr-clustering + Apache Solr Clustering + @maven_version@ + Apache Solr Clustering + jar + + + org.apache.solr + solr-solrj + @maven_version@ + + + org.apache.solr + solr-core + @maven_version@ + + + + org.carrot2 + carrot2-mini + 3.1.0 + + + + + carrot2.org + Carrot2 Maven2 repository + http://download.carrot2.org/maven2/ + + + diff --git a/solr/contrib/clustering/src/main/java/org/apache/solr/handler/clustering/ClusteringComponent.java b/solr/contrib/clustering/src/main/java/org/apache/solr/handler/clustering/ClusteringComponent.java new file mode 100644 index 00000000000..75255fac1ca --- /dev/null +++ b/solr/contrib/clustering/src/main/java/org/apache/solr/handler/clustering/ClusteringComponent.java @@ -0,0 +1,190 @@ +package org.apache.solr.handler.clustering; +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import org.apache.solr.common.params.SolrParams; +import org.apache.solr.common.util.NamedList; +import org.apache.solr.core.SolrCore; +import org.apache.solr.core.SolrResourceLoader; +import org.apache.solr.handler.clustering.carrot2.CarrotClusteringEngine; +import org.apache.solr.handler.component.ResponseBuilder; +import org.apache.solr.handler.component.SearchComponent; +import org.apache.solr.search.DocListAndSet; +import org.apache.solr.util.plugin.SolrCoreAware; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + + +/** + * Provide a plugin for clustering results. Can either be for search results (i.e. via Carrot2) or for + * clustering documents (i.e. via Mahout) + *

    + * This engine is experimental. Output from this engine is subject to change in future releases. + * + */ +public class ClusteringComponent extends SearchComponent implements SolrCoreAware { + private transient static Logger log = LoggerFactory.getLogger(ClusteringComponent.class); + + private Map searchClusteringEngines = new HashMap(); + private Map documentClusteringEngines = new HashMap(); + /** + * Base name for all spell checker query parameters. This name is also used to + * register this component with SearchHandler. + */ + public static final String COMPONENT_NAME = "clustering"; + private NamedList initParams; + + + public void prepare(ResponseBuilder rb) throws IOException { + SolrParams params = rb.req.getParams(); + if (!params.getBool(COMPONENT_NAME, false)) { + return; + } + } + + public void process(ResponseBuilder rb) throws IOException { + SolrParams params = rb.req.getParams(); + if (!params.getBool(COMPONENT_NAME, false)) { + return; + } + String name = params.get(ClusteringParams.ENGINE_NAME, ClusteringEngine.DEFAULT_ENGINE_NAME); + boolean useResults = params.getBool(ClusteringParams.USE_SEARCH_RESULTS, false); + if (useResults == true) { + SearchClusteringEngine engine = searchClusteringEngines.get(name); + if (engine != null) { + DocListAndSet results = rb.getResults(); + Object clusters = engine.cluster(rb.getQuery(), results.docList, rb.req); + rb.rsp.add("clusters", clusters); + } else { + log.warn("No engine for: " + name); + } + } + boolean useCollection = params.getBool(ClusteringParams.USE_COLLECTION, false); + if (useCollection == true) { + DocumentClusteringEngine engine = documentClusteringEngines.get(name); + if (engine != null) { + boolean useDocSet = params.getBool(ClusteringParams.USE_DOC_SET, false); + NamedList nl = null; + + //TODO: This likely needs to be made into a background task that runs in an executor + if (useDocSet == true) { + nl = engine.cluster(rb.getResults().docSet, params); + } else { + nl = engine.cluster(params); + } + rb.rsp.add("clusters", nl); + } else { + log.warn("No engine for " + name); + } + } + } + + @Override + @SuppressWarnings("unchecked") + public void init(NamedList args) { + super.init(args); + this.initParams = args; + } + + public void inform(SolrCore core) { + if (initParams != null) { + log.info("Initializing Clustering Engines"); + boolean searchHasDefault = false; + boolean documentHasDefault = false; + for (int i = 0; i < initParams.size(); i++) { + if (initParams.getName(i).equals("engine")) { + NamedList engineNL = (NamedList) initParams.getVal(i); + String className = (String) engineNL.get("classname"); + if (className == null) { + className = CarrotClusteringEngine.class.getName(); + } + SolrResourceLoader loader = core.getResourceLoader(); + ClusteringEngine clusterer = (ClusteringEngine) loader.newInstance(className); + if (clusterer != null) { + String name = clusterer.init(engineNL, core); + if (name != null) { + boolean isDefault = name.equals(ClusteringEngine.DEFAULT_ENGINE_NAME); + if (clusterer instanceof SearchClusteringEngine) { + if (isDefault == true && searchHasDefault == false) { + searchHasDefault = true; + } else if (isDefault == true && searchHasDefault == true) { + throw new RuntimeException("More than one engine is missing name: " + engineNL); + } + searchClusteringEngines.put(name, (SearchClusteringEngine) clusterer); + } else if (clusterer instanceof DocumentClusteringEngine) { + if (isDefault == true && documentHasDefault == false) { + searchHasDefault = true; + } else if (isDefault == true && documentHasDefault == true) { + throw new RuntimeException("More than one engine is missing name: " + engineNL); + } + documentClusteringEngines.put(name, (DocumentClusteringEngine) clusterer); + } + } else { + if (clusterer instanceof SearchClusteringEngine && searchHasDefault == false) { + searchClusteringEngines.put(ClusteringEngine.DEFAULT_ENGINE_NAME, (SearchClusteringEngine) clusterer); + searchHasDefault = true; + } else if (clusterer instanceof DocumentClusteringEngine && documentHasDefault == false) { + documentClusteringEngines.put(ClusteringEngine.DEFAULT_ENGINE_NAME, (DocumentClusteringEngine) clusterer); + documentHasDefault = true; + } else { + throw new RuntimeException("More than one engine is missing name: " + engineNL); + } + } + } + } + } + log.info("Finished Initializing Clustering Engines"); + } + } + + /* + * @return Unmodifiable Map of the engines, key is the name from the config, value is the engine + * */ + public Map getSearchClusteringEngines() { + return Collections.unmodifiableMap(searchClusteringEngines); + } + + // /////////////////////////////////////////// + // / SolrInfoMBean + // ////////////////////////////////////////// + + @Override + public String getDescription() { + return "A Clustering component"; + } + + @Override + public String getVersion() { + return "$Revision:$"; + } + + @Override + public String getSourceId() { + return "$Id:$"; + } + + @Override + public String getSource() { + return "$URL:$"; + } + +} diff --git a/solr/contrib/clustering/src/main/java/org/apache/solr/handler/clustering/ClusteringEngine.java b/solr/contrib/clustering/src/main/java/org/apache/solr/handler/clustering/ClusteringEngine.java new file mode 100644 index 00000000000..85bfffa9a2e --- /dev/null +++ b/solr/contrib/clustering/src/main/java/org/apache/solr/handler/clustering/ClusteringEngine.java @@ -0,0 +1,40 @@ +package org.apache.solr.handler.clustering; +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import org.apache.solr.common.util.NamedList; +import org.apache.solr.core.SolrCore; + + +/** + * + * + **/ +public class ClusteringEngine { + private String name; + public static final String ENGINE_NAME = "name"; + public static final String DEFAULT_ENGINE_NAME = "default"; + + public String init(NamedList config, SolrCore core) { + name = (String) config.get(ENGINE_NAME); + + return name; + } + + public String getName() { + return name; + } +} diff --git a/solr/contrib/clustering/src/main/java/org/apache/solr/handler/clustering/ClusteringParams.java b/solr/contrib/clustering/src/main/java/org/apache/solr/handler/clustering/ClusteringParams.java new file mode 100644 index 00000000000..d5ed5b678d7 --- /dev/null +++ b/solr/contrib/clustering/src/main/java/org/apache/solr/handler/clustering/ClusteringParams.java @@ -0,0 +1,37 @@ +package org.apache.solr.handler.clustering; +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +/** + * + * + **/ +public interface ClusteringParams { + + public static final String CLUSTERING_PREFIX = "clustering."; + + public static final String ENGINE_NAME = CLUSTERING_PREFIX + "engine"; + + public static final String USE_SEARCH_RESULTS = CLUSTERING_PREFIX + "results"; + + public static final String USE_COLLECTION = CLUSTERING_PREFIX + "collection"; + /** + * When document clustering, cluster on the Doc Set + */ + public static final String USE_DOC_SET = CLUSTERING_PREFIX + "docs.useDocSet"; +} diff --git a/solr/contrib/clustering/src/main/java/org/apache/solr/handler/clustering/DocumentClusteringEngine.java b/solr/contrib/clustering/src/main/java/org/apache/solr/handler/clustering/DocumentClusteringEngine.java new file mode 100644 index 00000000000..2b1ef9b5f0b --- /dev/null +++ b/solr/contrib/clustering/src/main/java/org/apache/solr/handler/clustering/DocumentClusteringEngine.java @@ -0,0 +1,54 @@ +package org.apache.solr.handler.clustering; +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import org.apache.solr.common.util.NamedList; +import org.apache.solr.common.params.SolrParams; +import org.apache.solr.core.SolrCore; +import org.apache.solr.search.DocList; +import org.apache.solr.search.DocSet; +import org.apache.lucene.search.Query; + + +/** + * Experimental. Subject to change before the next release. + * + **/ +public abstract class DocumentClusteringEngine extends ClusteringEngine { + + /** + * Experimental. Subject to change before the next release + * + * Cluster all the documents in the index. Clustering is often an expensive task that can take a long time. + * @param solrParams The params controlling clustering + * @return The clustering results + */ + public abstract NamedList cluster(SolrParams solrParams); + + /** + * Experimental. Subject to change before the next release + * + * + * Cluster the set of docs. Clustering of documents is often an expensive task that can take a long time. + * @param docs The docs to cluster. If null, cluster all docs as in {@link #cluster(org.apache.solr.common.params.SolrParams)} + * @param solrParams The params controlling the clustering + * @return The results. + */ + public abstract NamedList cluster(DocSet docs, SolrParams solrParams); + + +} diff --git a/solr/contrib/clustering/src/main/java/org/apache/solr/handler/clustering/SearchClusteringEngine.java b/solr/contrib/clustering/src/main/java/org/apache/solr/handler/clustering/SearchClusteringEngine.java new file mode 100644 index 00000000000..1d63877f140 --- /dev/null +++ b/solr/contrib/clustering/src/main/java/org/apache/solr/handler/clustering/SearchClusteringEngine.java @@ -0,0 +1,37 @@ +package org.apache.solr.handler.clustering; +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import org.apache.solr.common.util.NamedList; +import org.apache.solr.common.params.SolrParams; +import org.apache.solr.core.SolrCore; +import org.apache.solr.search.DocList; +import org.apache.solr.request.SolrQueryRequest; +import org.apache.lucene.search.Query; + + +/** + * + * + **/ +public abstract class SearchClusteringEngine extends ClusteringEngine { + + + public abstract Object cluster(Query query, DocList docList, SolrQueryRequest sreq); + + +} diff --git a/solr/contrib/clustering/src/main/java/org/apache/solr/handler/clustering/carrot2/CarrotClusteringEngine.java b/solr/contrib/clustering/src/main/java/org/apache/solr/handler/clustering/carrot2/CarrotClusteringEngine.java new file mode 100644 index 00000000000..55e23f94cb4 --- /dev/null +++ b/solr/contrib/clustering/src/main/java/org/apache/solr/handler/clustering/carrot2/CarrotClusteringEngine.java @@ -0,0 +1,259 @@ +package org.apache.solr.handler.clustering.carrot2; + +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import java.io.IOException; +import java.util.*; + +import org.apache.commons.lang.StringUtils; +import org.apache.lucene.document.FieldSelector; +import org.apache.lucene.document.SetBasedFieldSelector; +import org.apache.lucene.search.Query; +import org.apache.solr.common.params.HighlightParams; +import org.apache.solr.common.params.SolrParams; +import org.apache.solr.common.util.NamedList; +import org.apache.solr.common.util.SimpleOrderedMap; +import org.apache.solr.common.SolrException; +import org.apache.solr.core.SolrCore; +import org.apache.solr.handler.clustering.SearchClusteringEngine; +import org.apache.solr.highlight.SolrHighlighter; +import org.apache.solr.request.LocalSolrQueryRequest; +import org.apache.solr.request.SolrQueryRequest; +import org.apache.solr.search.*; +import org.apache.solr.util.RefCounted; +import org.carrot2.core.*; +import org.carrot2.core.attribute.AttributeNames; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.collect.Sets; + +/** + * Search results clustering engine based on Carrot2 clustering algorithms. + *

    + * Output from this class is subject to change. + * + * @link http://project.carrot2.org + */ +@SuppressWarnings("unchecked") +public class CarrotClusteringEngine extends SearchClusteringEngine { + private transient static Logger log = LoggerFactory + .getLogger(CarrotClusteringEngine.class); + + /** + * Carrot2 controller that manages instances of clustering algorithms + */ + private CachingController controller = new CachingController(); + private Class clusteringAlgorithmClass; + + private String idFieldName; + + public Object cluster(Query query, DocList docList, SolrQueryRequest sreq) { + try { + // Prepare attributes for Carrot2 clustering call + Map attributes = new HashMap(); + List documents = getDocuments(docList, query, sreq); + attributes.put(AttributeNames.DOCUMENTS, documents); + attributes.put(AttributeNames.QUERY, query.toString()); + + // Pass extra overriding attributes from the request, if any + extractCarrotAttributes(sreq.getParams(), attributes); + + // Perform clustering and convert to named list + return clustersToNamedList(controller.process(attributes, + clusteringAlgorithmClass).getClusters(), sreq.getParams()); + } catch (Exception e) { + log.error("Carrot2 clustering failed", e); + throw new RuntimeException(e); + } + } + + @Override + public String init(NamedList config, final SolrCore core) { + String result = super.init(config, core); + SolrParams initParams = SolrParams.toSolrParams(config); + + // Initialize Carrot2 controller. Pass initialization attributes, if any. + HashMap initAttributes = new HashMap(); + extractCarrotAttributes(initParams, initAttributes); + this.controller.init(initAttributes); + + this.idFieldName = core.getSchema().getUniqueKeyField().getName(); + + // Make sure the requested Carrot2 clustering algorithm class is available + String carrotAlgorithmClassName = initParams.get(CarrotParams.ALGORITHM); + Class algorithmClass = core.getResourceLoader().findClass(carrotAlgorithmClassName); + if (!IClusteringAlgorithm.class.isAssignableFrom(algorithmClass)) { + throw new IllegalArgumentException("Class provided as " + + CarrotParams.ALGORITHM + " must implement " + + IClusteringAlgorithm.class.getName()); + } + this.clusteringAlgorithmClass = (Class) algorithmClass; + + return result; + } + + /** + * Prepares Carrot2 documents for clustering. + */ + private List getDocuments(DocList docList, + Query query, final SolrQueryRequest sreq) throws IOException { + SolrHighlighter highlighter = null; + SolrParams solrParams = sreq.getParams(); + SolrCore core = sreq.getCore(); + + // Names of fields to deliver content for clustering + String urlField = solrParams.get(CarrotParams.URL_FIELD_NAME, "url"); + String titleField = solrParams.get(CarrotParams.TITLE_FIELD_NAME, "title"); + String snippetField = solrParams.get(CarrotParams.SNIPPET_FIELD_NAME, + titleField); + if (StringUtils.isBlank(snippetField)) { + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, CarrotParams.SNIPPET_FIELD_NAME + + " must not be blank."); + } + Set fieldsToLoad = Sets.newHashSet(urlField, titleField, + snippetField, idFieldName); + + // Get the documents + DocIterator docsIter = docList.iterator(); + boolean produceSummary = solrParams.getBool(CarrotParams.PRODUCE_SUMMARY, + false); + + SolrQueryRequest req = null; + String[] snippetFieldAry = null; + if (produceSummary == true) { + highlighter = core.getHighlighter(); + if (highlighter != null){ + Map args = new HashMap(); + snippetFieldAry = new String[]{snippetField}; + args.put(HighlightParams.FIELDS, snippetFieldAry); + args.put(HighlightParams.HIGHLIGHT, "true"); + args.put(HighlightParams.SIMPLE_PRE, ""); //we don't care about actually highlighting the area + args.put(HighlightParams.SIMPLE_POST, ""); + args.put(HighlightParams.FRAGSIZE, solrParams.getInt(CarrotParams.SUMMARY_FRAGSIZE, solrParams.getInt(HighlightParams.FRAGSIZE, 100))); + req = new LocalSolrQueryRequest(core, query.toString(), "", 0, 1, args) { + @Override + public SolrIndexSearcher getSearcher() { + return sreq.getSearcher(); + } + }; + } else { + log.warn("No highlighter configured, cannot produce summary"); + produceSummary = false; + } + } + + SolrIndexSearcher searcher = sreq.getSearcher(); + List result = new ArrayList(docList.size()); + + float[] scores = {1.0f}; + int[] docsHolder = new int[1]; + Query theQuery = query; + + while (docsIter.hasNext()) { + Integer id = docsIter.next(); + org.apache.lucene.document.Document doc = searcher.doc(id, + fieldsToLoad); + String snippet = getValue(doc, snippetField); + if (produceSummary == true) { + docsHolder[0] = id.intValue(); + DocList docAsList = new DocSlice(0, 1, docsHolder, scores, 1, 1.0f); + NamedList highlights = highlighter.doHighlighting(docAsList, theQuery, req, snippetFieldAry); + if (highlights != null && highlights.size() == 1) {//should only be one value given our setup + //should only be one document with one field + NamedList tmp = (NamedList) highlights.getVal(0); + String [] highlt = (String[]) tmp.get(snippetField); + if (highlt != null && highlt.length == 1) { + snippet = highlt[0]; + } + } + } + Document carrotDocument = new Document(getValue(doc, titleField), + snippet, doc.get(urlField)); + carrotDocument.setField("solrId", doc.get(idFieldName)); + result.add(carrotDocument); + } + + return result; + } + + protected String getValue(org.apache.lucene.document.Document doc, + String field) { + StringBuilder result = new StringBuilder(); + String[] vals = doc.getValues(field); + for (int i = 0; i < vals.length; i++) { + // Join multiple values with a period so that Carrot2 does not pick up + // phrases that cross field value boundaries (in most cases it would + // create useless phrases). + result.append(vals[i]).append(" . "); + } + return result.toString().trim(); + } + + private List clustersToNamedList(List carrotClusters, + SolrParams solrParams) { + List result = new ArrayList(); + clustersToNamedList(carrotClusters, result, solrParams.getBool( + CarrotParams.OUTPUT_SUB_CLUSTERS, true), solrParams.getInt( + CarrotParams.NUM_DESCRIPTIONS, Integer.MAX_VALUE)); + return result; + } + + private void clustersToNamedList(List outputClusters, + List parent, boolean outputSubClusters, int maxLabels) { + for (Cluster outCluster : outputClusters) { + NamedList cluster = new SimpleOrderedMap(); + parent.add(cluster); + + List labels = outCluster.getPhrases(); + if (labels.size() > maxLabels) + labels = labels.subList(0, maxLabels); + cluster.add("labels", labels); + + List docs = outputSubClusters ? outCluster.getDocuments() : outCluster.getAllDocuments(); + List docList = new ArrayList(); + cluster.add("docs", docList); + for (Document doc : docs) { + docList.add(doc.getField("solrId")); + } + + if (outputSubClusters) { + List subclusters = new ArrayList(); + cluster.add("clusters", subclusters); + clustersToNamedList(outCluster.getSubclusters(), subclusters, + outputSubClusters, maxLabels); + } + } + } + + /** + * Extracts parameters that can possibly match some attributes of Carrot2 algorithms. + */ + private void extractCarrotAttributes(SolrParams solrParams, + Map attributes) { + // Extract all non-predefined parameters. This way, we'll be able to set all + // parameters of Carrot2 algorithms without defining their names as constants. + for (Iterator paramNames = solrParams.getParameterNamesIterator(); paramNames + .hasNext();) { + String paramName = paramNames.next(); + if (!CarrotParams.CARROT_PARAM_NAMES.contains(paramName)) { + attributes.put(paramName, solrParams.get(paramName)); + } + } + } +} diff --git a/solr/contrib/clustering/src/main/java/org/apache/solr/handler/clustering/carrot2/CarrotParams.java b/solr/contrib/clustering/src/main/java/org/apache/solr/handler/clustering/carrot2/CarrotParams.java new file mode 100644 index 00000000000..6809566e384 --- /dev/null +++ b/solr/contrib/clustering/src/main/java/org/apache/solr/handler/clustering/carrot2/CarrotParams.java @@ -0,0 +1,42 @@ +package org.apache.solr.handler.clustering.carrot2; + +import java.util.Set; + +import com.google.common.collect.ImmutableSet; +import org.apache.solr.common.params.HighlightParams; + +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +public interface CarrotParams { + + String CARROT_PREFIX = "carrot."; + + String ALGORITHM = CARROT_PREFIX + "algorithm"; + String TITLE_FIELD_NAME = CARROT_PREFIX + "title"; + String URL_FIELD_NAME = CARROT_PREFIX + "url"; + String SNIPPET_FIELD_NAME = CARROT_PREFIX + "snippet"; + String PRODUCE_SUMMARY = CARROT_PREFIX + "produceSummary"; + String NUM_DESCRIPTIONS = CARROT_PREFIX + "numDescriptions"; + String OUTPUT_SUB_CLUSTERS = CARROT_PREFIX + "outputSubClusters"; + String SUMMARY_FRAGSIZE = CARROT_PREFIX + "fragzise"; + + public static final Set CARROT_PARAM_NAMES = ImmutableSet.of( + ALGORITHM, TITLE_FIELD_NAME, URL_FIELD_NAME, SNIPPET_FIELD_NAME, + PRODUCE_SUMMARY, NUM_DESCRIPTIONS, OUTPUT_SUB_CLUSTERS, SUMMARY_FRAGSIZE); +} diff --git a/solr/contrib/clustering/src/test/java/org/apache/solr/handler/clustering/AbstractClusteringTest.java b/solr/contrib/clustering/src/test/java/org/apache/solr/handler/clustering/AbstractClusteringTest.java new file mode 100644 index 00000000000..422d2a44031 --- /dev/null +++ b/solr/contrib/clustering/src/test/java/org/apache/solr/handler/clustering/AbstractClusteringTest.java @@ -0,0 +1,198 @@ +package org.apache.solr.handler.clustering; +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import org.apache.solr.util.AbstractSolrTestCase; + + +/** + * + */ +public class AbstractClusteringTest extends AbstractSolrTestCase { + protected int numberOfDocs = 0; + + @Override + public void setUp() throws Exception { + super.setUp(); + + numberOfDocs = 0; + for (String[] doc : DOCUMENTS) { + assertU("add failed", adoc("id", Integer.toString(numberOfDocs), "url", doc[0], "title", doc[1], "snippet", doc[2])); + numberOfDocs++; + } + assertU("commit", commit()); + } + + public String getSchemaFile() { + return "schema.xml"; + } + + public String getSolrConfigFile() { + return "solrconfig.xml"; + } + + final String[][] DOCUMENTS = new String[][]{ + {"http://en.wikipedia.org/wiki/Data_mining", + "Data Mining - Wikipedia", + "Article about knowledge-discovery in databases (KDD), the practice of automatically searching large stores of data for patterns."}, + + + {"http://en.wikipedia.org/wiki/Datamining", + "Data mining - Wikipedia, the free encyclopedia", + "Data mining is the entire process of applying computer-based methodology, ... Moreover, some data-mining systems such as neural networks are inherently geared ..."}, + + + {"http://www.statsoft.com/textbook/stdatmin.html", + "Electronic Statistics Textbook: Data Mining Techniques", + "Outlines the crucial concepts in data mining, defines the data warehousing process, and offers examples of computational and graphical exploratory data analysis techniques."}, + + + {"http://www.thearling.com/text/dmwhite/dmwhite.htm", + "An Introduction to Data Mining", + "Data mining, the extraction of hidden predictive information from large ... Data mining tools predict future trends and behaviors, allowing businesses to ..."}, + + + {"http://www.anderson.ucla.edu/faculty/jason.frand/teacher/technologies/palace/datamining.htm", + "Data Mining: What is Data Mining?", + "Outlines what knowledge discovery, the process of analyzing data from different perspectives and summarizing it into useful information, can do and how it works."}, + + + {"http://www.spss.com/datamine", + "Data Mining Software, Data Mining Applications and Data Mining Solutions", + "The patterns uncovered using data mining help organizations make better and ... data mining customer ... Data mining applications, on the other hand, embed ..."}, + + + {"http://www.kdnuggets.com/", + "KD Nuggets", + "Newsletter on the data mining and knowledge industries, offering information on data mining, knowledge discovery, text mining, and web mining software, courses, jobs, publications, and meetings."}, + + + {"http://www.answers.com/topic/data-mining", + "data mining: Definition from Answers.com", + "data mining n. The automatic extraction of useful, often previously unknown information from large databases or data ... Data Mining For Investing ..."}, + + + {"http://www.statsoft.com/products/dataminer.htm", + "STATISTICA Data Mining and Predictive Modeling Solutions", + "GRC site-wide menuing system research and development. ... Contact a Data Mining Solutions Consultant. News and Success Stories. Events ..."}, + + + {"http://datamining.typepad.com/", + "Data Mining: Text Mining, Visualization and Social Media", + "Commentary on text mining, data mining, social media and data visualization. ... While mining Twitter data for business and marketing intelligence (trend/buzz ..."}, + + + {"http://www.twocrows.com/", + "Two Crows Corporation", + "Dedicated to the development, marketing, sales and support of tools for knowledge discovery to make data mining accessible and easy to use."}, + + + {"http://www.thearling.com/", + "Thearling.com", + "Kurt Thearling's site dedicated to sharing information about data mining, the automated extraction of hidden predictive information from databases, and other analytic technologies."}, + + + {"http://www.ccsu.edu/datamining/", + "CCSU - Data Mining", + "Offers degrees and certificates in data mining. Allows students to explore cutting-edge data mining techniques and applications: market basket analysis, decision trees, neural networks, machine learning, web mining, and data modeling."}, + + + {"http://www.oracle.com/technology/products/bi/odm", + "Oracle Data Mining", + "Oracle Data Mining Product Center ... New Oracle Data Mining Powers New Social CRM Application (more information ... Mining High-Dimensional Data for ..."}, + + + {"http://databases.about.com/od/datamining/a/datamining.htm", + "Data Mining: An Introduction", + "About.com article on how businesses are discovering new trends and patterns of behavior that previously went unnoticed through data mining, automated statistical analysis techniques."}, + + + {"http://www.dmoz.org/Computers/Software/Databases/Data_Mining/", + "Open Directory - Computers: Software: Databases: Data Mining", + "Data Mining and Knowledge Discovery - A peer-reviewed journal publishing ... Data mining creates information assets that an organization can leverage to ..."}, + + + {"http://www.cs.wisc.edu/dmi/", + "DMI:Data Mining Institute", + "Data Mining Institute at UW-Madison ... The Data Mining Institute (DMI) was started on June 1, 1999 at the Computer ... of the Data Mining Group of Microsoft ..."}, + + + {"http://www.the-data-mine.com/", + "The Data Mine", + "Provides information about data mining also known as knowledge discovery in databases (KDD) or simply knowledge discovery. List software, events, organizations, and people working in data mining."}, + + + {"http://www.statserv.com/datamining.html", + "St@tServ - About Data Mining", + "St@tServ Data Mining page ... Data mining in molecular biology, by Alvis Brazma. Graham Williams page. Knowledge Discovery and Data Mining Resources, ..."}, + + + {"http://ocw.mit.edu/OcwWeb/Sloan-School-of-Management/15-062Data-MiningSpring2003/CourseHome/index.htm", + "MIT OpenCourseWare | Sloan School of Management | 15.062 Data Mining ...", + "Introduces students to a class of methods known as data mining that assists managers in recognizing patterns and making intelligent use of massive amounts of ..."}, + + + {"http://www.pentaho.com/products/data_mining/", + "Pentaho Commercial Open Source Business Intelligence: Data Mining", + "For example, data mining can warn you there's a high probability a specific ... Pentaho Data Mining is differentiated by its open, standards-compliant nature, ..."}, + + + {"http://www.investorhome.com/mining.htm", + "Investor Home - Data Mining", + "Data Mining or Data Snooping is the practice of searching for relationships and ... Data mining involves searching through databases for correlations and patterns ..."}, + + + {"http://www.datamining.com/", + "Predictive Modeling and Predictive Analytics Solutions | Enterprise ...", + "Insightful Enterprise Miner - Enterprise data mining for predictive modeling and predictive analytics."}, + + + {"http://www.sourcewatch.org/index.php?title=Data_mining", + "Data mining - SourceWatch", + "These agencies reported 199 data mining projects, of which 68 ... Office, \"DATA MINING. ... powerful technology known as data mining -- and how, in the ..."}, + + + {"http://www.autonlab.org/tutorials/", + "Statistical Data Mining Tutorials", + "Includes a set of tutorials on many aspects of statistical data mining, including the foundations of probability, the foundations of statistical data analysis, and most of the classic machine learning and data mining algorithms."}, + + + {"http://www.microstrategy.com/data-mining/index.asp", + "Data Mining", + "With MicroStrategy, data mining scoring is fully integrated into mainstream ... The integration of data mining models from other applications is accomplished by ..."}, + + + {"http://www.datamininglab.com/", + "Elder Research", + "Provides consulting and short courses in data mining and pattern discovery patterns in data."}, + + + {"http://www.sqlserverdatamining.com/", + "SQL Server Data Mining > Home", + "SQL Server Data Mining Portal ... Data Mining as an Application Platform (Whitepaper) Creating a Web Cross-sell Application with SQL Server 2005 Data Mining (Article) ..."}, + + + {"http://databases.about.com/cs/datamining/g/dmining.htm", + "Data Mining", + "What is data mining? Find out here! ... Book Review: Data Mining and Statistical Analysis Using SQL. What is Data Mining, and What Does it Have to Do with ..."}, + + + {"http://www.sas.com/technologies/analytics/datamining/index.html", + "Data Mining Software and Text Mining | SAS", + "... raw data to smarter ... Data Mining is an iterative process of creating ... The knowledge gleaned from data and text mining can be used to fuel ..."} + }; +} diff --git a/solr/contrib/clustering/src/test/java/org/apache/solr/handler/clustering/ClusteringComponentTest.java b/solr/contrib/clustering/src/test/java/org/apache/solr/handler/clustering/ClusteringComponentTest.java new file mode 100644 index 00000000000..096710c3b9d --- /dev/null +++ b/solr/contrib/clustering/src/test/java/org/apache/solr/handler/clustering/ClusteringComponentTest.java @@ -0,0 +1,80 @@ +package org.apache.solr.handler.clustering; +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import org.apache.solr.common.params.CommonParams; +import org.apache.solr.common.params.ModifiableSolrParams; +import org.apache.solr.common.util.NamedList; +import org.apache.solr.common.util.SimpleOrderedMap; +import org.apache.solr.core.SolrCore; +import org.apache.solr.handler.component.QueryComponent; +import org.apache.solr.handler.component.SearchComponent; +import org.apache.solr.request.LocalSolrQueryRequest; +import org.apache.solr.response.SolrQueryResponse; +import org.apache.solr.request.SolrRequestHandler; + + +/** + * + * + **/ +public class ClusteringComponentTest extends AbstractClusteringTest { + + public void testComponent() throws Exception { + SolrCore core = h.getCore(); + + SearchComponent sc = core.getSearchComponent("clustering"); + assertTrue("sc is null and it shouldn't be", sc != null); + ModifiableSolrParams params = new ModifiableSolrParams(); + + params.add(ClusteringComponent.COMPONENT_NAME, "true"); + params.add(CommonParams.Q, "*:*"); + + params.add(ClusteringParams.USE_SEARCH_RESULTS, "true"); + + + SolrRequestHandler handler = core.getRequestHandler("standard"); + SolrQueryResponse rsp; + rsp = new SolrQueryResponse(); + rsp.add("responseHeader", new SimpleOrderedMap()); + handler.handleRequest(new LocalSolrQueryRequest(core, params), rsp); + NamedList values = rsp.getValues(); + Object clusters = values.get("clusters"); + //System.out.println("Clusters: " + clusters); + assertTrue("clusters is null and it shouldn't be", clusters != null); + + + params = new ModifiableSolrParams(); + params.add(ClusteringComponent.COMPONENT_NAME, "true"); + params.add(ClusteringParams.ENGINE_NAME, "mock"); + params.add(ClusteringParams.USE_COLLECTION, "true"); + params.add(QueryComponent.COMPONENT_NAME, "false"); + + handler = core.getRequestHandler("docClustering"); + + rsp = new SolrQueryResponse(); + rsp.add("responseHeader", new SimpleOrderedMap()); + handler.handleRequest(new LocalSolrQueryRequest(core, params), rsp); + values = rsp.getValues(); + clusters = values.get("clusters"); + //System.out.println("Clusters: " + clusters); + assertTrue("clusters is null and it shouldn't be", clusters != null); + + + } + +} diff --git a/solr/contrib/clustering/src/test/java/org/apache/solr/handler/clustering/MockDocumentClusteringEngine.java b/solr/contrib/clustering/src/test/java/org/apache/solr/handler/clustering/MockDocumentClusteringEngine.java new file mode 100644 index 00000000000..90f0ab73e5a --- /dev/null +++ b/solr/contrib/clustering/src/test/java/org/apache/solr/handler/clustering/MockDocumentClusteringEngine.java @@ -0,0 +1,37 @@ +package org.apache.solr.handler.clustering; +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import org.apache.solr.common.params.SolrParams; +import org.apache.solr.common.util.NamedList; +import org.apache.solr.search.DocSet; + + +/** + * + * + **/ +public class MockDocumentClusteringEngine extends DocumentClusteringEngine { + public NamedList cluster(DocSet docs, SolrParams solrParams) { + NamedList result = new NamedList(); + return result; + } + + public NamedList cluster(SolrParams solrParams) { + NamedList result = new NamedList(); + return result; + } +} diff --git a/solr/contrib/clustering/src/test/java/org/apache/solr/handler/clustering/carrot2/CarrotClusteringEngineTest.java b/solr/contrib/clustering/src/test/java/org/apache/solr/handler/clustering/carrot2/CarrotClusteringEngineTest.java new file mode 100644 index 00000000000..b9c69a1c6a9 --- /dev/null +++ b/solr/contrib/clustering/src/test/java/org/apache/solr/handler/clustering/carrot2/CarrotClusteringEngineTest.java @@ -0,0 +1,183 @@ +package org.apache.solr.handler.clustering.carrot2; + +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import org.apache.lucene.index.Term; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.Sort; +import org.apache.lucene.search.TermQuery; +import org.apache.solr.common.params.ModifiableSolrParams; +import org.apache.solr.common.params.SolrParams; +import org.apache.solr.common.util.NamedList; +import org.apache.solr.handler.clustering.AbstractClusteringTest; +import org.apache.solr.handler.clustering.ClusteringComponent; +import org.apache.solr.request.LocalSolrQueryRequest; +import org.apache.solr.search.DocList; +import org.apache.solr.search.SolrIndexSearcher; +import org.apache.solr.util.RefCounted; +import org.carrot2.util.attribute.AttributeUtils; + +import java.io.IOException; +import java.util.List; + +/** + * + */ +@SuppressWarnings("unchecked") +public class CarrotClusteringEngineTest extends AbstractClusteringTest { + public void testCarrotLingo() throws Exception { + checkEngine(getClusteringEngine("default"), 10); + } + + public void testProduceSummary() throws Exception { + ModifiableSolrParams solrParams = new ModifiableSolrParams(); + solrParams.add(CarrotParams.SNIPPET_FIELD_NAME, "snippet"); + solrParams.add(CarrotParams.SUMMARY_FRAGSIZE, "200");//how do we validate this? + checkEngine(getClusteringEngine("default"), numberOfDocs -2 /*two don't have mining in the snippet*/, 16, new TermQuery(new Term("snippet", "mine")), solrParams); + } + + public void testCarrotStc() throws Exception { + checkEngine(getClusteringEngine("stc"), 1); + } + + public void testWithoutSubclusters() throws Exception { + checkClusters(checkEngine(getClusteringEngine("mock"), this.numberOfDocs), + 1, 1, 0); + } + + public void testWithSubclusters() throws Exception { + ModifiableSolrParams params = new ModifiableSolrParams(); + params.set(CarrotParams.OUTPUT_SUB_CLUSTERS, true); + checkClusters(checkEngine(getClusteringEngine("mock"), this.numberOfDocs), 1, 1, 2); + } + + public void testNumDescriptions() throws Exception { + ModifiableSolrParams params = new ModifiableSolrParams(); + params.set(AttributeUtils.getKey(MockClusteringAlgorithm.class, "labels"), 5); + params.set(CarrotParams.NUM_DESCRIPTIONS, 3); + checkClusters(checkEngine(getClusteringEngine("mock"), this.numberOfDocs, + params), 1, 3, 0); + } + + public void testCarrotAttributePassing() throws Exception { + ModifiableSolrParams params = new ModifiableSolrParams(); + params.set(AttributeUtils.getKey(MockClusteringAlgorithm.class, "depth"), 1); + params.set(AttributeUtils.getKey(MockClusteringAlgorithm.class, "labels"), 3); + checkClusters(checkEngine(getClusteringEngine("mock"), this.numberOfDocs, + params), 1, 3, 0); + } + + private CarrotClusteringEngine getClusteringEngine(String engineName) { + ClusteringComponent comp = (ClusteringComponent) h.getCore() + .getSearchComponent("clustering"); + assertNotNull("clustering component should not be null", comp); + CarrotClusteringEngine engine = (CarrotClusteringEngine) comp + .getSearchClusteringEngines().get(engineName); + assertNotNull("clustering engine for name: " + engineName + + " should not be null", engine); + return engine; + } + + private List checkEngine(CarrotClusteringEngine engine, + int expectedNumClusters) throws IOException { + return checkEngine(engine, numberOfDocs, expectedNumClusters, new MatchAllDocsQuery(), new ModifiableSolrParams()); + } + + private List checkEngine(CarrotClusteringEngine engine, + int expectedNumClusters, SolrParams clusteringParams) throws IOException { + return checkEngine(engine, numberOfDocs, expectedNumClusters, new MatchAllDocsQuery(), clusteringParams); + } + + + private List checkEngine(CarrotClusteringEngine engine, int expectedNumDocs, + int expectedNumClusters, Query query, SolrParams clusteringParams) throws IOException { + // Get all documents to cluster + RefCounted ref = h.getCore().getSearcher(); + + DocList docList; + try { + SolrIndexSearcher searcher = ref.get(); + docList = searcher.getDocList(query, (Query) null, new Sort(), 0, + numberOfDocs); + assertEquals("docList size", expectedNumDocs, docList.matches()); + } finally { + ref.decref(); + } + + ModifiableSolrParams solrParams = new ModifiableSolrParams(); + solrParams.add(CarrotParams.PRODUCE_SUMMARY, "true"); + solrParams.add(clusteringParams); + + // Perform clustering + LocalSolrQueryRequest req = new LocalSolrQueryRequest(h.getCore(), solrParams); + List results = (List) engine.cluster(query, docList, req); + req.close(); + assertEquals("number of clusters: " + results, expectedNumClusters, results.size()); + checkClusters(results, false); + return results; + } + + private void checkClusters(List results, int expectedDocCount, + int expectedLabelCount, int expectedSubclusterCount) { + for (int i = 0; i < results.size(); i++) { + NamedList cluster = (NamedList) results.get(i); + checkCluster(cluster, expectedDocCount, expectedLabelCount, + expectedSubclusterCount); + } + } + + private void checkClusters(List results, boolean hasSubclusters) { + for (int i = 0; i < results.size(); i++) { + checkCluster((NamedList) results.get(i), hasSubclusters); + } + } + + private void checkCluster(NamedList cluster, boolean hasSubclusters) { + List docs = (List) cluster.get("docs"); + assertNotNull("docs is null and it shouldn't be", docs); + for (int j = 0; j < docs.size(); j++) { + String id = (String) docs.get(j); + assertNotNull("id is null and it shouldn't be", id); + } + + List labels = (List) cluster.get("labels"); + assertNotNull("labels is null but it shouldn't be", labels); + + if (hasSubclusters) { + List subclusters = (List) cluster.get("clusters"); + assertNotNull("subclusters is null but it shouldn't be", subclusters); + } + } + + private void checkCluster(NamedList cluster, int expectedDocCount, + int expectedLabelCount, int expectedSubclusterCount) { + checkCluster(cluster, expectedSubclusterCount > 0); + assertEquals("number of docs in cluster", expectedDocCount, + ((List) cluster.get("docs")).size()); + assertEquals("number of labels in cluster", expectedLabelCount, + ((List) cluster.get("labels")).size()); + + if (expectedSubclusterCount > 0) { + List subclusters = (List) cluster.get("clusters"); + assertEquals("numClusters", expectedSubclusterCount, subclusters.size()); + assertEquals("number of subclusters in cluster", + expectedSubclusterCount, subclusters.size()); + } + } +} diff --git a/solr/contrib/clustering/src/test/java/org/apache/solr/handler/clustering/carrot2/MockClusteringAlgorithm.java b/solr/contrib/clustering/src/test/java/org/apache/solr/handler/clustering/carrot2/MockClusteringAlgorithm.java new file mode 100644 index 00000000000..ffd70cf1d25 --- /dev/null +++ b/solr/contrib/clustering/src/test/java/org/apache/solr/handler/clustering/carrot2/MockClusteringAlgorithm.java @@ -0,0 +1,83 @@ +package org.apache.solr.handler.clustering.carrot2; +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import com.google.common.collect.Lists; +import org.carrot2.core.*; +import org.carrot2.core.attribute.AttributeNames; +import org.carrot2.core.attribute.Processing; +import org.carrot2.util.attribute.*; +import org.carrot2.util.attribute.constraint.IntRange; + +import java.util.List; + +@Bindable(prefix = "MockClusteringAlgorithm") +public class MockClusteringAlgorithm extends ProcessingComponentBase implements + IClusteringAlgorithm { + @Input + @Processing + @Attribute(key = AttributeNames.DOCUMENTS) + private List documents; + + @Output + @Processing + @Attribute(key = AttributeNames.CLUSTERS) + private List clusters; + + @Input + @Processing + @Attribute + @IntRange(min = 1, max = 5) + private int depth = 2; + + @Input + @Processing + @Attribute + @IntRange(min = 1, max = 5) + private int labels = 1; + + @Override + public void process() throws ProcessingException { + clusters = Lists.newArrayList(); + if (documents == null) { + return; + } + + int documentIndex = 1; + for (Document document : documents) { + StringBuilder label = new StringBuilder("Cluster " + documentIndex); + Cluster cluster = createCluster(label.toString(), document); + clusters.add(cluster); + for (int i = 1; i <= depth; i++) { + label.append("."); + label.append(i); + Cluster newCluster = createCluster(label.toString(), document); + cluster.addSubclusters(createCluster(label.toString(), document), newCluster); + cluster = newCluster; + } + documentIndex++; + } + } + + private Cluster createCluster(String labelBase, Document... documents) { + Cluster cluster = new Cluster(); + for (int i = 0; i < labels; i++) { + cluster.addPhrases(labelBase + "#" + (i + 1)); + } + cluster.addDocuments(documents); + return cluster; + } +} diff --git a/solr/contrib/clustering/src/test/resources/solr/conf/mapping-ISOLatin1Accent.txt b/solr/contrib/clustering/src/test/resources/solr/conf/mapping-ISOLatin1Accent.txt new file mode 100644 index 00000000000..ede7742581b --- /dev/null +++ b/solr/contrib/clustering/src/test/resources/solr/conf/mapping-ISOLatin1Accent.txt @@ -0,0 +1,246 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Syntax: +# "source" => "target" +# "source".length() > 0 (source cannot be empty.) +# "target".length() >= 0 (target can be empty.) + +# example: +# "À" => "A" +# "\u00C0" => "A" +# "\u00C0" => "\u0041" +# "ß" => "ss" +# "\t" => " " +# "\n" => "" + +# À => A +"\u00C0" => "A" + +# à => A +"\u00C1" => "A" + +#  => A +"\u00C2" => "A" + +# à => A +"\u00C3" => "A" + +# Ä => A +"\u00C4" => "A" + +# Ã… => A +"\u00C5" => "A" + +# Æ => AE +"\u00C6" => "AE" + +# Ç => C +"\u00C7" => "C" + +# È => E +"\u00C8" => "E" + +# É => E +"\u00C9" => "E" + +# Ê => E +"\u00CA" => "E" + +# Ë => E +"\u00CB" => "E" + +# ÃŒ => I +"\u00CC" => "I" + +# à => I +"\u00CD" => "I" + +# ÃŽ => I +"\u00CE" => "I" + +# à => I +"\u00CF" => "I" + +# IJ => IJ +"\u0132" => "IJ" + +# à => D +"\u00D0" => "D" + +# Ñ => N +"\u00D1" => "N" + +# Ã’ => O +"\u00D2" => "O" + +# Ó => O +"\u00D3" => "O" + +# Ô => O +"\u00D4" => "O" + +# Õ => O +"\u00D5" => "O" + +# Ö => O +"\u00D6" => "O" + +# Ø => O +"\u00D8" => "O" + +# Å’ => OE +"\u0152" => "OE" + +# Þ +"\u00DE" => "TH" + +# Ù => U +"\u00D9" => "U" + +# Ú => U +"\u00DA" => "U" + +# Û => U +"\u00DB" => "U" + +# Ãœ => U +"\u00DC" => "U" + +# à => Y +"\u00DD" => "Y" + +# Ÿ => Y +"\u0178" => "Y" + +# à => a +"\u00E0" => "a" + +# á => a +"\u00E1" => "a" + +# â => a +"\u00E2" => "a" + +# ã => a +"\u00E3" => "a" + +# ä => a +"\u00E4" => "a" + +# Ã¥ => a +"\u00E5" => "a" + +# æ => ae +"\u00E6" => "ae" + +# ç => c +"\u00E7" => "c" + +# è => e +"\u00E8" => "e" + +# é => e +"\u00E9" => "e" + +# ê => e +"\u00EA" => "e" + +# ë => e +"\u00EB" => "e" + +# ì => i +"\u00EC" => "i" + +# í => i +"\u00ED" => "i" + +# î => i +"\u00EE" => "i" + +# ï => i +"\u00EF" => "i" + +# ij => ij +"\u0133" => "ij" + +# ð => d +"\u00F0" => "d" + +# ñ => n +"\u00F1" => "n" + +# ò => o +"\u00F2" => "o" + +# ó => o +"\u00F3" => "o" + +# ô => o +"\u00F4" => "o" + +# õ => o +"\u00F5" => "o" + +# ö => o +"\u00F6" => "o" + +# ø => o +"\u00F8" => "o" + +# Å“ => oe +"\u0153" => "oe" + +# ß => ss +"\u00DF" => "ss" + +# þ => th +"\u00FE" => "th" + +# ù => u +"\u00F9" => "u" + +# ú => u +"\u00FA" => "u" + +# û => u +"\u00FB" => "u" + +# ü => u +"\u00FC" => "u" + +# ý => y +"\u00FD" => "y" + +# ÿ => y +"\u00FF" => "y" + +# ff => ff +"\uFB00" => "ff" + +# ï¬ => fi +"\uFB01" => "fi" + +# fl => fl +"\uFB02" => "fl" + +# ffi => ffi +"\uFB03" => "ffi" + +# ffl => ffl +"\uFB04" => "ffl" + +# ſt => ft +"\uFB05" => "ft" + +# st => st +"\uFB06" => "st" diff --git a/solr/contrib/clustering/src/test/resources/solr/conf/protwords.txt b/solr/contrib/clustering/src/test/resources/solr/conf/protwords.txt new file mode 100644 index 00000000000..1dfc0abecbf --- /dev/null +++ b/solr/contrib/clustering/src/test/resources/solr/conf/protwords.txt @@ -0,0 +1,21 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +#----------------------------------------------------------------------- +# Use a protected word file to protect against the stemmer reducing two +# unrelated words to the same base word. + +# Some non-words that normally won't be encountered, +# just to test that they won't be stemmed. +dontstems +zwhacky + diff --git a/solr/contrib/clustering/src/test/resources/solr/conf/schema.xml b/solr/contrib/clustering/src/test/resources/solr/conf/schema.xml new file mode 100644 index 00000000000..9a4a42df525 --- /dev/null +++ b/solr/contrib/clustering/src/test/resources/solr/conf/schema.xml @@ -0,0 +1,347 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + id + + + text + + + + + + + + + + + + + + + + + diff --git a/solr/contrib/clustering/src/test/resources/solr/conf/solrconfig.xml b/solr/contrib/clustering/src/test/resources/solr/conf/solrconfig.xml new file mode 100644 index 00000000000..6db595045b5 --- /dev/null +++ b/solr/contrib/clustering/src/test/resources/solr/conf/solrconfig.xml @@ -0,0 +1,561 @@ + + + + + + ${solr.abortOnConfigurationError:true} + + + ${solr.data.dir:./solr/data} + + + + + false + + 10 + + + + 32 + 2147483647 + 10000 + 1000 + 10000 + + + + + + + + + + + single + + + + + false + 32 + 10 + + + 2147483647 + 10000 + + + false + + + + + + + + + + + + + + + + + + + + + + 1024 + + + + + + + + + + + + + true + + + + + + + + 50 + + + 200 + + + + + + + + + solr 0 10 + rocks 0 10 + static newSearcher warming query from solrconfig.xml + + + + + + + fast_warm 0 10 + static firstSearcher warming query from solrconfig.xml + + + + + false + + + 2 + + + + + + + + + + + + + + + + + + + + + + + explicit + + + + clustering + + + + + + + + explicit + + + + doc-clustering + + + + + + + + + + + default + org.carrot2.clustering.lingo.LingoClusteringAlgorithm + + + stc + org.carrot2.clustering.stc.STCClusteringAlgorithm + + + mock + org.apache.solr.handler.clustering.carrot2.MockClusteringAlgorithm + + + + + + + + mock + org.apache.solr.handler.clustering.MockDocumentClusteringEngine + + + + + + + + + + + + + + + + + + + + + + + + standard + solrpingquery + all + + + + + + + explicit + true + + + + + + + + + 100 + + + + + + + + 70 + + 0.5 + + [-\w ,/\n\"']{20,200} + + + + + + + ]]> + ]]> + + + + + + + + + + 5 + + + + + + + + + + solr + + + + + diff --git a/solr/contrib/clustering/src/test/resources/solr/conf/spellings.txt b/solr/contrib/clustering/src/test/resources/solr/conf/spellings.txt new file mode 100644 index 00000000000..d7ede6f5611 --- /dev/null +++ b/solr/contrib/clustering/src/test/resources/solr/conf/spellings.txt @@ -0,0 +1,2 @@ +pizza +history \ No newline at end of file diff --git a/solr/contrib/clustering/src/test/resources/solr/conf/stopwords.txt b/solr/contrib/clustering/src/test/resources/solr/conf/stopwords.txt new file mode 100644 index 00000000000..b5824da3263 --- /dev/null +++ b/solr/contrib/clustering/src/test/resources/solr/conf/stopwords.txt @@ -0,0 +1,58 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +#----------------------------------------------------------------------- +# a couple of test stopwords to test that the words are really being +# configured from this file: +stopworda +stopwordb + +#Standard english stop words taken from Lucene's StopAnalyzer +a +an +and +are +as +at +be +but +by +for +if +in +into +is +it +no +not +of +on +or +s +such +t +that +the +their +then +there +these +they +this +to +was +will +with + diff --git a/solr/contrib/clustering/src/test/resources/solr/conf/synonyms.txt b/solr/contrib/clustering/src/test/resources/solr/conf/synonyms.txt new file mode 100644 index 00000000000..b0e31cb7ec8 --- /dev/null +++ b/solr/contrib/clustering/src/test/resources/solr/conf/synonyms.txt @@ -0,0 +1,31 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +#----------------------------------------------------------------------- +#some test synonym mappings unlikely to appear in real input text +aaa => aaaa +bbb => bbbb1 bbbb2 +ccc => cccc1,cccc2 +a\=>a => b\=>b +a\,a => b\,b +fooaaa,baraaa,bazaaa + +# Some synonym groups specific to this example +GB,gib,gigabyte,gigabytes +MB,mib,megabyte,megabytes +Television, Televisions, TV, TVs +#notice we use "gib" instead of "GiB" so any WordDelimiterFilter coming +#after us won't split it into two words. + +# Synonym mappings can be used for spelling correction too +pixima => pixma + diff --git a/solr/contrib/dataimporthandler/CHANGES.txt b/solr/contrib/dataimporthandler/CHANGES.txt new file mode 100644 index 00000000000..4ab33653da1 --- /dev/null +++ b/solr/contrib/dataimporthandler/CHANGES.txt @@ -0,0 +1,426 @@ + Apache Solr - DataImportHandler + Release Notes + +Introduction +------------ +DataImportHandler is a data import tool for Solr which makes importing data from Databases, XML files and +HTTP data sources quick and easy. + + +$Id$ +================== 1.5.0-dev ================== +Upgrading from Solr 1.4 +---------------------- + +Versions of Major Components +--------------------- + +Detailed Change List +---------------------- + +New Features +---------------------- + +* SOLR-1525 : allow DIH to refer to core properties (noble) + +* SOLR-1547 : TemplateTransformer copy objects more intelligently when there when the template is a single variable (noble) + +* SOLR-1627 : VariableResolver should be fetched just in time (noble) + +* SOLR-1583 : Create DataSources that return InputStream (noble) + +* SOLR-1358 : Integration of Tika and DataImportHandler ( Akshay Ukey, noble) + +* SOLR-1654 : TikaEntityProcessor example added DIHExample (Akshay Ukey via noble) + +* SOLR-1678 : Move onError handling to DIH framework (noble) + +* SOLR-1352 : Multi-threaded implementation of DIH (noble) + +* SOLR-1721 : Add explicit option to run DataImportHandler in synchronous mode (Alexey Serba via noble) + +* SOLR-1737 : Added FieldStreamDataSource (noble) + +Optimizations +---------------------- + +Bug Fixes +---------------------- +* SOLR-1638: Fixed NullPointerException during import if uniqueKey is not specified + in schema (Akshay Ukey via shalin) + +* SOLR-1639: Fixed misleading error message when dataimport.properties is not writable (shalin) + +* SOLR-1598: Reader used in PlainTextEntityProcessor is not explicitly closed (Sascha Szott via noble) + +* SOLR-1759: $skipDoc was not working correctly (Gian Marco Tagliani via noble) + +* SOLR-1762: DateFormatTransformer does not work correctly with non-default locale dates (tommy chheng via noble) + +* SOLR-1757: DIH multithreading sometimes throws NPE (noble) + +* SOLR-1766: DIH with threads enabled doesn't respond to the abort command (Michael Henson via noble) + +* SOLR-1767: dataimporter.functions.escapeSql() does not escape backslash character (Sean Timm via noble) + +Other Changes +---------------------- + + +Build +---------------------- + + +Documentation +---------------------- + +================== Release 1.4.0 ================== + +Upgrading from Solr 1.3 +----------------------- + +Evaluator API has been changed in a non back-compatible way. Users who have developed custom Evaluators will need +to change their code according to the new API for it to work. See SOLR-996 for details. + +The formatDate evaluator's syntax has been changed. The new syntax is formatDate(, ''). +For example, formatDate(x.date, 'yyyy-MM-dd'). In the old syntax, the date string was written without a single-quotes. +The old syntax has been deprecated and will be removed in 1.5, until then, using the old syntax will log a warning. + +The Context API has been changed in a non back-compatible way. In particular, the Context.currentProcess() method +now returns a String describing the type of the current import process instead of an int. Similarily, the public +constants in Context viz. FULL_DUMP, DELTA_DUMP and FIND_DELTA are changed to a String type. See SOLR-969 for details. + +The EntityProcessor API has been simplified by moving logic for applying transformers and handling multi-row outputs +from Transformers into an EntityProcessorWrapper class. The EntityProcessor#destroy is now called once per +parent-row at the end of row (end of data). A new method EntityProcessor#close is added which is called at the end +of import. + +In Solr 1.3, if the last_index_time was not available (first import) and a delta-import was requested, a full-import +was run instead. This is no longer the case. In Solr 1.4 delta import is run with last_index_time as the epoch +date (January 1, 1970, 00:00:00 GMT) if last_index_time is not available. + +Detailed Change List +---------------------- + +New Features +---------------------- +1. SOLR-768: Set last_index_time variable in full-import command. + (Wojtek Piaseczny, Noble Paul via shalin) + +2. SOLR-811: Allow a "deltaImportQuery" attribute in SqlEntityProcessor which is used for delta imports + instead of DataImportHandler manipulating the SQL itself. + (Noble Paul via shalin) + +3. SOLR-842: Better error handling in DataImportHandler with options to abort, skip and continue imports. + (Noble Paul, shalin) + +4. SOLR-833: A DataSource to read data from a field as a reader. This can be used, for example, to read XMLs + residing as CLOBs or BLOBs in databases. + (Noble Paul via shalin) + +5. SOLR-887: A Transformer to strip HTML tags. + (Ahmed Hammad via shalin) + +6. SOLR-886: DataImportHandler should rollback when an import fails or it is aborted + (shalin) + +7. SOLR-891: A Transformer to read strings from Clob type. + (Noble Paul via shalin) + +8. SOLR-812: Configurable JDBC settings in JdbcDataSource including optimized defaults for read only mode. + (David Smiley, Glen Newton, shalin) + +9. SOLR-910: Add a few utility commands to the DIH admin page such as full import, delta import, status, reload config. + (Ahmed Hammad via shalin) + +10.SOLR-938: Add event listener API for import start and end. + (Kay Kay, Noble Paul via shalin) + +11.SOLR-801: Add support for configurable pre-import and post-import delete query per root-entity. + (Noble Paul via shalin) + +12.SOLR-988: Add a new scope for session data stored in Context to store objects across imports. + (Noble Paul via shalin) + +13.SOLR-980: A PlainTextEntityProcessor which can read from any DataSource and output a String. + (Nathan Adams, Noble Paul via shalin) + +14.SOLR-1003: XPathEntityprocessor must allow slurping all text from a given xml node and its children. + (Noble Paul via shalin) + +15.SOLR-1001: Allow variables in various attributes of RegexTransformer, HTMLStripTransformer + and NumberFormatTransformer. + (Fergus McMenemie, Noble Paul, shalin) + +16.SOLR-989: Expose running statistics from the Context API. + (Noble Paul, shalin) + +17.SOLR-996: Expose Context to Evaluators. + (Noble Paul, shalin) + +18.SOLR-783: Enhance delta-imports by maintaining separate last_index_time for each entity. + (Jon Baer, Noble Paul via shalin) + +19.SOLR-1033: Current entity's namespace is made available to all Transformers. This allows one to use an output field + of TemplateTransformer in other transformers, among other things. + (Fergus McMenemie, Noble Paul via shalin) + +20.SOLR-1066: New methods in Context to expose Script details. ScriptTransformer changed to read scripts + through the new API methods. + (Noble Paul via shalin) + +21.SOLR-1062: A LogTransformer which can log data in a given template format. + (Jon Baer, Noble Paul via shalin) + +22.SOLR-1065: A ContentStreamDataSource which can accept HTTP POST data in a content stream. This can be used to + push data to Solr instead of just pulling it from DB/Files/URLs. + (Noble Paul via shalin) + +23.SOLR-1061: Improve RegexTransformer to create multiple columns from regex groups. + (Noble Paul via shalin) + +24.SOLR-1059: Special flags introduced for deleting documents by query or id, skipping rows and stopping further + transforms. Use $deleteDocById, $deleteDocByQuery for deleting by id and query respectively. + Use $skipRow to skip the current row but continue with the document. Use $stopTransform to stop + further transformers. New methods are introduced in Context for deleting by id and query. + (Noble Paul, Fergus McMenemie, shalin) + +25.SOLR-1076: JdbcDataSource should resolve variables in all its configuration parameters. + (shalin) + +26.SOLR-1055: Make DIH JdbcDataSource easily extensible by making the createConnectionFactory method protected and + return a Callable object. + (Noble Paul, shalin) + +27.SOLR-1058: JdbcDataSource can lookup javax.sql.DataSource using JNDI. Use a jndiName attribute to specify the + location of the data source. + (Jason Shepherd, Noble Paul via shalin) + +28.SOLR-1083: An Evaluator for escaping query characters. + (Noble Paul, shalin) + +29.SOLR-934: A MailEntityProcessor to enable indexing mails from POP/IMAP sources into a solr index. + (Preetam Rao, shalin) + +30.SOLR-1060: A LineEntityProcessor which can stream lines of text from a given file to be indexed directly or + for processing with transformers and child entities. + (Fergus McMenemie, Noble Paul, shalin) + +31.SOLR-1127: Add support for field name to be templatized. + (Noble Paul, shalin) + +32.SOLR-1092: Added a new command named 'import' which does not automatically clean the index. This is useful and + more appropriate when one needs to import only some of the entities. + (Noble Paul via shalin) + +33.SOLR-1153: 'deltaImportQuery' is honored on child entities as well (noble) + +34.SOLR-1230: Enhanced dataimport.jsp to work with all DataImportHandler request handler configurations, + rather than just a hardcoded /dataimport handler. (ehatcher) + +35.SOLR-1235: disallow period (.) in entity names (noble) + +36.SOLR-1234: Multiple DIH does not work because all of them write to dataimport.properties. + Use the handler name as the properties file name (noble) + +37.SOLR-1348: Support binary field type in convertType logic in JdbcDataSource (shalin) + +38.SOLR-1406: Make FileDataSource and FileListEntityProcessor to be more extensible (Luke Forehand, shalin) + +39.SOLR-1437 : XPathEntityProcessor can deal with xpath syntaxes such as //tagname , /root//tagname (Fergus McMenemie via noble) + +Optimizations +---------------------- +1. SOLR-846: Reduce memory consumption during delta import by removing keys when used + (Ricky Leung, Noble Paul via shalin) + +2. SOLR-974: DataImportHandler skips commit if no data has been updated. + (Wojtek Piaseczny, shalin) + +3. SOLR-1004: Check for abort more frequently during delta-imports. + (Marc Sturlese, shalin) + +4. SOLR-1098: DateFormatTransformer can cache the format objects. + (Noble Paul via shalin) + +5. SOLR-1465: Replaced string concatenations with StringBuilder append calls in XPathRecordReader. + (Mark Miller, shalin) + + +Bug Fixes +---------------------- +1. SOLR-800: Deep copy collections to avoid ConcurrentModificationException in XPathEntityprocessor while streaming + (Kyle Morrison, Noble Paul via shalin) + +2. SOLR-823: Request parameter variables ${dataimporter.request.xxx} are not resolved + (Mck SembWever, Noble Paul, shalin) + +3. SOLR-728: Add synchronization to avoid race condition of multiple imports working concurrently + (Walter Ferrara, shalin) + +4. SOLR-742: Add ability to create dynamic fields with custom DataImportHandler transformers + (Wojtek Piaseczny, Noble Paul, shalin) + +5. SOLR-832: Rows parameter is not honored in non-debug mode and can abort a running import in debug mode. + (Akshay Ukey, shalin) + +6. SOLR-838: The VariableResolver obtained from a DataSource's context does not have current data. + (Noble Paul via shalin) + +7. SOLR-864: DataImportHandler does not catch and log Errors (shalin) + +8. SOLR-873: Fix case-sensitive field names and columns (Jon Baer, shalin) + +9. SOLR-893: Unable to delete documents via SQL and deletedPkQuery with deltaimport + (Dan Rosher via shalin) + +10. SOLR-888: DateFormatTransformer cannot convert non-string type + (Amit Nithian via shalin) + +11. SOLR-841: DataImportHandler should throw exception if a field does not have column attribute + (Michael Henson, shalin) + +12. SOLR-884: CachedSqlEntityProcessor should check if the cache key is present in the query results + (Noble Paul via shalin) + +13. SOLR-985: Fix thread-safety issue with TemplateString for concurrent imports with multiple cores. + (Ryuuichi Kumai via shalin) + +14. SOLR-999: XPathRecordReader fails on XMLs with nodes mixed with CDATA content. + (Fergus McMenemie, Noble Paul via shalin) + +15.SOLR-1000: FileListEntityProcessor should not apply fileName filter to directory names. + (Fergus McMenemie via shalin) + +16.SOLR-1009: Repeated column names result in duplicate values. + (Fergus McMenemie, Noble Paul via shalin) + +17.SOLR-1017: Fix thread-safety issue with last_index_time for concurrent imports in multiple cores due to unsafe usage + of SimpleDateFormat by multiple threads. + (Ryuuichi Kumai via shalin) + +18.SOLR-1024: Calling abort on DataImportHandler import commits data instead of calling rollback. + (shalin) + +19.SOLR-1037: DIH should not add null values in a row returned by EntityProcessor to documents. + (shalin) + +20.SOLR-1040: XPathEntityProcessor fails with an xpath like /feed/entry/link[@type='text/html']/@href + (Noble Paul via shalin) + +21.SOLR-1042: Fix memory leak in DIH by making TemplateString non-static member in VariableResolverImpl + (Ryuuichi Kumai via shalin) + +22.SOLR-1053: IndexOutOfBoundsException in SolrWriter.getResourceAsString when size of data-config.xml is a + multiple of 1024 bytes. + (Herb Jiang via shalin) + +23.SOLR-1077: IndexOutOfBoundsException with useSolrAddSchema in XPathEntityProcessor. + (Sam Keen, Noble Paul via shalin) + +24.SOLR-1080: RegexTransformer should not replace if regex is not matched. + (Noble Paul, Fergus McMenemie via shalin) + +25.SOLR-1090: DataImportHandler should load the data-config.xml using UTF-8 encoding. + (Rui Pereira, shalin) + +26.SOLR-1146: ConcurrentModificationException in DataImporter.getStatusMessages + (Walter Ferrara, Noble Paul via shalin) + +27.SOLR-1229: Fixes for deletedPkQuery, particularly when using transformed Solr unique id's + (Lance Norskog, Noble Paul via ehatcher) + +28.SOLR-1286: Fix the commit parameter always defaulting to "true" even if "false" is explicitly passed in. + (Jay Hill, Noble Paul via ehatcher) + +29.SOLR-1323: Reset XPathEntityProcessor's $hasMore/$nextUrl when fetching next URL (noble, ehatcher) + +30.SOLR-1450: Jdbc connection properties such as batchSize are not applied if the driver jar is placed + in solr_home/lib. + (Steve Sun via shalin) + +31.SOLR-1474: Delta-import should run even if last_index_time is not set. + (shalin) + + +Documentation +---------------------- +1. SOLR-1369: Add HSQLDB Jar to example-DIH, unzip database and update instructions. + +Other +---------------------- +1. SOLR-782: Refactored SolrWriter to make it a concrete class and removed wrappers over SolrInputDocument. + Refactored to load Evaluators lazily. Removed multiple document nodes in the configuration xml. + Removed support for 'default' variables, they are automatically available as request parameters. + (Noble Paul via shalin) + +2. SOLR-964: XPathEntityProcessor now ignores DTD validations + (Fergus McMenemie, Noble Paul via shalin) + +3. SOLR-1029: Standardize Evaluator parameter parsing and added helper functions for parsing all evaluator + parameters in a standard way. + (Noble Paul, shalin) + +4. SOLR-1081: Change EventListener to be an interface so that components such as an EntityProcessor or a Transformer + can act as an event listener. + (Noble Paul, shalin) + +5. SOLR-1027: Alias the 'dataimporter' namespace to a shorter name 'dih'. + (Noble Paul via shalin) + +6. SOLR-1084: Better error reporting when entity name is a reserved word and data-config.xml root node + is not . + (Noble Paul via shalin) + +7. SOLR-1087: Deprecate 'where' attribute in CachedSqlEntityProcessor in favor of cacheKey and cacheLookup. + (Noble Paul via shalin) + +8. SOLR-969: Change the FULL_DUMP, DELTA_DUMP, FIND_DELTA constants in Context to String. + Change Context.currentProcess() to return a string instead of an integer. + (Kay Kay, Noble Paul, shalin) + +9. SOLR-1120: Simplified EntityProcessor API by moving logic for applying transformers and handling multi-row outputs + from Transformers into an EntityProcessorWrapper class. The behavior of the method + EntityProcessor#destroy has been modified to be called once per parent-row at the end of row. A new + method EntityProcessor#close is added which is called at the end of import. A new method + Context#getResolvedEntityAttribute is added which returns the resolved value of an entity's attribute. + Introduced a DocWrapper which takes care of maintaining document level session variables. + (Noble Paul, shalin) + +10.SOLR-1265: Add variable resolving for URLDataSource properties like baseUrl. (Chris Eldredge via ehatcher) + +11.SOLR-1269: Better error messages from JdbcDataSource when JDBC Driver name or SQL is incorrect. + (ehatcher, shalin) + +================== Release 1.3.0 20080915 ================== + +Status +------ +This is the first release since DataImportHandler was added to the contrib solr distribution. +The following changes list changes since the code was introduced, not since +the first official release. + + +Detailed Change List +-------------------- + +New Features +1. SOLR-700: Allow configurable locales through a locale attribute in fields for NumberFormatTransformer. + (Stefan Oestreicher, shalin) + +Changes in runtime behavior + +Bug Fixes +1. SOLR-704: NumberFormatTransformer can silently ignore part of the string while parsing. Now it tries to + use the complete string for parsing. Failure to do so will result in an exception. + (Stefan Oestreicher via shalin) + +2. SOLR-729: Context.getDataSource(String) gives current entity's DataSource instance regardless of argument. + (Noble Paul, shalin) + +3. SOLR-726: Jdbc Drivers and DataSources fail to load if placed in multicore sharedLib or core's lib directory. + (Walter Ferrara, Noble Paul, shalin) + +Other Changes + + diff --git a/solr/contrib/dataimporthandler/build.xml b/solr/contrib/dataimporthandler/build.xml new file mode 100644 index 00000000000..b2832a79acb --- /dev/null +++ b/solr/contrib/dataimporthandler/build.xml @@ -0,0 +1,210 @@ + + + + + + + + + + + + + Data Import Handler + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Tests failed! + + + + + + + + + + + + + + + + + + Tests failed! + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/solr/contrib/dataimporthandler/lib/activation-1.1.jar b/solr/contrib/dataimporthandler/lib/activation-1.1.jar new file mode 100644 index 00000000000..737214c22d4 --- /dev/null +++ b/solr/contrib/dataimporthandler/lib/activation-1.1.jar @@ -0,0 +1,2 @@ +AnyObjectId[53f82a1c4c492dc810c27317857bbb02afd6fa58] was removed in git history. +Apache SVN contains full history. \ No newline at end of file diff --git a/solr/contrib/dataimporthandler/lib/mail-1.4.1.jar b/solr/contrib/dataimporthandler/lib/mail-1.4.1.jar new file mode 100644 index 00000000000..c73aed1bc87 --- /dev/null +++ b/solr/contrib/dataimporthandler/lib/mail-1.4.1.jar @@ -0,0 +1,2 @@ +AnyObjectId[1d15e793ecd1c709de0739a7d3d818266c2e141b] was removed in git history. +Apache SVN contains full history. \ No newline at end of file diff --git a/solr/contrib/dataimporthandler/solr-dataimporthandler-extras-pom.xml.template b/solr/contrib/dataimporthandler/solr-dataimporthandler-extras-pom.xml.template new file mode 100644 index 00000000000..491dd8902e3 --- /dev/null +++ b/solr/contrib/dataimporthandler/solr-dataimporthandler-extras-pom.xml.template @@ -0,0 +1,52 @@ + + + + + 4.0.0 + + + org.apache.solr + solr-parent + @maven_version@ + + + org.apache.solr + solr-dataimporthandler-extras + Apache Solr DataImportHandler Extras + @maven_version@ + Apache Solr DataImportHandler Extras + jar + + + + javax.activation + activation + 1.1 + + + javax.mail + mail + 1.4.1 + + + + diff --git a/solr/contrib/dataimporthandler/solr-dataimporthandler-pom.xml.template b/solr/contrib/dataimporthandler/solr-dataimporthandler-pom.xml.template new file mode 100644 index 00000000000..a15a176a54a --- /dev/null +++ b/solr/contrib/dataimporthandler/solr-dataimporthandler-pom.xml.template @@ -0,0 +1,39 @@ + + + + + 4.0.0 + + + org.apache.solr + solr-parent + @maven_version@ + + + org.apache.solr + solr-dataimporthandler + Apache Solr DataImportHandler + @maven_version@ + Apache Solr DataImportHandler + jar + + diff --git a/solr/contrib/dataimporthandler/src/extras/main/java/org/apache/solr/handler/dataimport/MailEntityProcessor.java b/solr/contrib/dataimporthandler/src/extras/main/java/org/apache/solr/handler/dataimport/MailEntityProcessor.java new file mode 100644 index 00000000000..7e464bf3f20 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/extras/main/java/org/apache/solr/handler/dataimport/MailEntityProcessor.java @@ -0,0 +1,599 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import com.sun.mail.imap.IMAPMessage; +import org.apache.tika.config.TikaConfig; +import org.apache.tika.utils.ParseUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.mail.*; +import javax.mail.internet.AddressException; +import javax.mail.internet.ContentType; +import javax.mail.internet.InternetAddress; +import javax.mail.internet.MimeMessage; +import javax.mail.search.AndTerm; +import javax.mail.search.ComparisonTerm; +import javax.mail.search.ReceivedDateTerm; +import javax.mail.search.SearchTerm; +import java.io.InputStream; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.*; + +/** + * An EntityProcessor instance which can index emails along with their attachments from POP3 or IMAP sources. Refer to + * http://wiki.apache.org/solr/DataImportHandler for more + * details. This API is experimental and subject to change + * + * @version $Id$ + * @since solr 1.4 + */ +public class MailEntityProcessor extends EntityProcessorBase { + + public static interface CustomFilter { + public SearchTerm getCustomSearch(Folder folder); + } + + public void init(Context context) { + super.init(context); + // set attributes using XXX getXXXFromContext(attribute, defualtValue); + // applies variable resolver and return default if value is not found or null + // REQUIRED : connection and folder info + user = getStringFromContext("user", null); + password = getStringFromContext("password", null); + host = getStringFromContext("host", null); + protocol = getStringFromContext("protocol", null); + folderNames = getStringFromContext("folders", null); + // validate + if (host == null || protocol == null || user == null || password == null + || folderNames == null) + throw new DataImportHandlerException(DataImportHandlerException.SEVERE, + "'user|password|protocol|host|folders' are required attributes"); + + //OPTIONAL : have defaults and are optional + recurse = getBoolFromContext("recurse", true); + String excludes = getStringFromContext("exclude", ""); + if (excludes != null && !excludes.trim().equals("")) { + exclude = Arrays.asList(excludes.split(",")); + } + String includes = getStringFromContext("include", ""); + if (includes != null && !includes.trim().equals("")) { + include = Arrays.asList(includes.split(",")); + } + batchSize = getIntFromContext("batchSize", 20); + customFilter = getStringFromContext("customFilter", ""); + String s = getStringFromContext("fetchMailsSince", ""); + if (s != null) + try { + fetchMailsSince = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").parse(s); + } catch (ParseException e) { + throw new DataImportHandlerException(DataImportHandlerException.SEVERE, "Invalid value for fetchMailSince: " + s, e); + } + + fetchSize = getIntFromContext("fetchSize", 32 * 1024); + cTimeout = getIntFromContext("connectTimeout", 30 * 1000); + rTimeout = getIntFromContext("readTimeout", 60 * 1000); + processAttachment = getBoolFromContext("processAttachement", true); + + logConfig(); + } + + public Map nextRow() { + Message mail; + Map row = null; + do { + // try till there is a valid document or folders get exhausted. + // when mail == NULL, it means end of processing + mail = getNextMail(); + if (mail != null) + row = getDocumentFromMail(mail); + } while (row == null && mail != null); + return row; + } + + private Message getNextMail() { + if (!connected) { + if (!connectToMailBox()) + return null; + connected = true; + } + if (folderIter == null) { + createFilters(); + folderIter = new FolderIterator(mailbox); + } + // get next message from the folder + // if folder is exhausted get next folder + // loop till a valid mail or all folders exhausted. + while (msgIter == null || !msgIter.hasNext()) { + Folder next = folderIter.hasNext() ? folderIter.next() : null; + if (next == null) { + return null; + } + msgIter = new MessageIterator(next, batchSize); + } + return msgIter.next(); + } + + private Map getDocumentFromMail(Message mail) { + Map row = new HashMap(); + try { + addPartToDocument(mail, row, true); + return row; + } catch (Exception e) { + return null; + } + } + + public void addPartToDocument(Part part, Map row, boolean outerMost) throws Exception { + if (part instanceof Message) { + addEnvelopToDocument(part, row); + } + + String ct = part.getContentType(); + ContentType ctype = new ContentType(ct); + if (part.isMimeType("multipart/*")) { + Multipart mp = (Multipart) part.getContent(); + int count = mp.getCount(); + if (part.isMimeType("multipart/alternative")) + count = 1; + for (int i = 0; i < count; i++) + addPartToDocument(mp.getBodyPart(i), row, false); + } else if (part.isMimeType("message/rfc822")) { + addPartToDocument((Part) part.getContent(), row, false); + } else { + String disp = part.getDisposition(); + if (!processAttachment || (disp != null && disp.equalsIgnoreCase(Part.ATTACHMENT))) return; + InputStream is = part.getInputStream(); + String fileName = part.getFileName(); + String content = ParseUtils.getStringContent(is, TikaConfig.getDefaultConfig(), ctype.getBaseType().toLowerCase()); + if (disp != null && disp.equalsIgnoreCase(Part.ATTACHMENT)) { + if (row.get(ATTACHMENT) == null) + row.put(ATTACHMENT, new ArrayList()); + List contents = (List) row.get(ATTACHMENT); + contents.add(content); + row.put(ATTACHMENT, contents); + if (row.get(ATTACHMENT_NAMES) == null) + row.put(ATTACHMENT_NAMES, new ArrayList()); + List names = (List) row.get(ATTACHMENT_NAMES); + names.add(fileName); + row.put(ATTACHMENT_NAMES, names); + } else { + if (row.get(CONTENT) == null) + row.put(CONTENT, new ArrayList()); + List contents = (List) row.get(CONTENT); + contents.add(content); + row.put(CONTENT, contents); + } + } + } + + private void addEnvelopToDocument(Part part, Map row) throws MessagingException { + MimeMessage mail = (MimeMessage) part; + Address[] adresses; + if ((adresses = mail.getFrom()) != null && adresses.length > 0) + row.put(FROM, adresses[0].toString()); + + List to = new ArrayList(); + if ((adresses = mail.getRecipients(Message.RecipientType.TO)) != null) + addAddressToList(adresses, to); + if ((adresses = mail.getRecipients(Message.RecipientType.CC)) != null) + addAddressToList(adresses, to); + if ((adresses = mail.getRecipients(Message.RecipientType.BCC)) != null) + addAddressToList(adresses, to); + if (to.size() > 0) + row.put(TO_CC_BCC, to); + + row.put(MESSAGE_ID, mail.getMessageID()); + row.put(SUBJECT, mail.getSubject()); + + Date d = mail.getSentDate(); + if (d != null) { + row.put(SENT_DATE, d); + } + + List flags = new ArrayList(); + for (Flags.Flag flag : mail.getFlags().getSystemFlags()) { + if (flag == Flags.Flag.ANSWERED) + flags.add(FLAG_ANSWERED); + else if (flag == Flags.Flag.DELETED) + flags.add(FLAG_DELETED); + else if (flag == Flags.Flag.DRAFT) + flags.add(FLAG_DRAFT); + else if (flag == Flags.Flag.FLAGGED) + flags.add(FLAG_FLAGGED); + else if (flag == Flags.Flag.RECENT) + flags.add(FLAG_RECENT); + else if (flag == Flags.Flag.SEEN) + flags.add(FLAG_SEEN); + } + flags.addAll(Arrays.asList(mail.getFlags().getUserFlags())); + row.put(FLAGS, flags); + + String[] hdrs = mail.getHeader("X-Mailer"); + if (hdrs != null) + row.put(XMAILER, hdrs[0]); + } + + + private void addAddressToList(Address[] adresses, List to) throws AddressException { + for (Address address : adresses) { + to.add(address.toString()); + InternetAddress ia = (InternetAddress) address; + if (ia.isGroup()) { + InternetAddress[] group = ia.getGroup(false); + for (InternetAddress member : group) + to.add(member.toString()); + } + } + } + + private boolean connectToMailBox() { + try { + Properties props = new Properties(); + props.setProperty("mail.store.protocol", protocol); + props.setProperty("mail.imap.fetchsize", "" + fetchSize); + props.setProperty("mail.imap.timeout", "" + rTimeout); + props.setProperty("mail.imap.connectiontimeout", "" + cTimeout); + Session session = Session.getDefaultInstance(props, null); + mailbox = session.getStore(protocol); + mailbox.connect(host, user, password); + LOG.info("Connected to mailbox"); + return true; + } catch (MessagingException e) { + throw new DataImportHandlerException(DataImportHandlerException.SEVERE, + "Connection failed", e); + } + } + + private void createFilters() { + if (fetchMailsSince != null) { + filters.add(new MailsSinceLastCheckFilter(fetchMailsSince)); + } + if (customFilter != null && !customFilter.equals("")) { + try { + Class cf = Class.forName(customFilter); + Object obj = cf.newInstance(); + if (obj instanceof CustomFilter) { + filters.add((CustomFilter) obj); + } + } catch (Exception e) { + throw new DataImportHandlerException(DataImportHandlerException.SEVERE, + "Custom filter could not be created", e); + } + } + } + + private void logConfig() { + if (!LOG.isInfoEnabled()) return; + StringBuffer config = new StringBuffer(); + config.append("user : ").append(user).append(System.getProperty("line.separator")); + config.append("pwd : ").append(password).append(System.getProperty("line.separator")); + config.append("protocol : ").append(protocol).append(System.getProperty("line.separator")); + config.append("host : ").append(host).append(System.getProperty("line.separator")); + config.append("folders : ").append(folderNames).append(System.getProperty("line.separator")); + config.append("recurse : ").append(recurse).append(System.getProperty("line.separator")); + config.append("exclude : ").append(exclude.toString()).append(System.getProperty("line.separator")); + config.append("include : ").append(include.toString()).append(System.getProperty("line.separator")); + config.append("batchSize : ").append(batchSize).append(System.getProperty("line.separator")); + config.append("fetchSize : ").append(fetchSize).append(System.getProperty("line.separator")); + config.append("read timeout : ").append(rTimeout).append(System.getProperty("line.separator")); + config.append("conection timeout : ").append(cTimeout).append(System.getProperty("line.separator")); + config.append("custom filter : ").append(customFilter).append(System.getProperty("line.separator")); + config.append("fetch mail since : ").append(fetchMailsSince).append(System.getProperty("line.separator")); + LOG.info(config.toString()); + } + + class FolderIterator implements Iterator { + private Store mailbox; + private List topLevelFolders; + private List folders = null; + private Folder lastFolder = null; + + public FolderIterator(Store mailBox) { + this.mailbox = mailBox; + folders = new ArrayList(); + getTopLevelFolders(mailBox); + } + + public boolean hasNext() { + return !folders.isEmpty(); + } + + public Folder next() { + try { + boolean hasMessages = false; + Folder next; + do { + if (lastFolder != null) { + lastFolder.close(false); + lastFolder = null; + } + if (folders.isEmpty()) { + mailbox.close(); + return null; + } + next = folders.remove(0); + if (next != null) { + String fullName = next.getFullName(); + if (!excludeFolder(fullName)) { + hasMessages = (next.getType() & Folder.HOLDS_MESSAGES) != 0; + next.open(Folder.READ_ONLY); + lastFolder = next; + LOG.info("Opened folder : " + fullName); + } + if (recurse && ((next.getType() & Folder.HOLDS_FOLDERS) != 0)) { + Folder[] children = next.list(); + LOG.info("Added its children to list : "); + for (int i = children.length - 1; i >= 0; i--) { + folders.add(0, children[i]); + LOG.info("child name : " + children[i].getFullName()); + } + if (children.length == 0) + LOG.info("NO children : "); + } + } + } + while (!hasMessages); + return next; + } catch (MessagingException e) { + //throw new DataImportHandlerException(DataImportHandlerException.SEVERE, + // "Folder open failed", e); + } + return null; + } + + public void remove() { + throw new UnsupportedOperationException("Its read only mode..."); + } + + private void getTopLevelFolders(Store mailBox) { + if (folderNames != null) + topLevelFolders = Arrays.asList(folderNames.split(",")); + for (int i = 0; topLevelFolders != null && i < topLevelFolders.size(); i++) { + try { + folders.add(mailbox.getFolder(topLevelFolders.get(i))); + } catch (MessagingException e) { + // skip bad ones unless its the last one and still no good folder + if (folders.size() == 0 && i == topLevelFolders.size() - 1) + throw new DataImportHandlerException(DataImportHandlerException.SEVERE, + "Folder retreival failed"); + } + } + if (topLevelFolders == null || topLevelFolders.size() == 0) { + try { + folders.add(mailBox.getDefaultFolder()); + } catch (MessagingException e) { + throw new DataImportHandlerException(DataImportHandlerException.SEVERE, + "Folder retreival failed"); + } + } + } + + private boolean excludeFolder(String name) { + for (String s : exclude) { + if (name.matches(s)) + return true; + } + for (String s : include) { + if (name.matches(s)) + return false; + } + return include.size() > 0; + } + } + + class MessageIterator implements Iterator { + private Folder folder; + private Message[] messagesInCurBatch; + private int current = 0; + private int currentBatch = 0; + private int batchSize = 0; + private int totalInFolder = 0; + private boolean doBatching = true; + + public MessageIterator(Folder folder, int batchSize) { + try { + this.folder = folder; + this.batchSize = batchSize; + SearchTerm st = getSearchTerm(); + if (st != null) { + doBatching = false; + messagesInCurBatch = folder.search(st); + totalInFolder = messagesInCurBatch.length; + folder.fetch(messagesInCurBatch, fp); + current = 0; + LOG.info("Total messages : " + totalInFolder); + LOG.info("Search criteria applied. Batching disabled"); + } else { + totalInFolder = folder.getMessageCount(); + LOG.info("Total messages : " + totalInFolder); + getNextBatch(batchSize, folder); + } + } catch (MessagingException e) { + throw new DataImportHandlerException(DataImportHandlerException.SEVERE, + "Message retreival failed", e); + } + } + + private void getNextBatch(int batchSize, Folder folder) throws MessagingException { + // after each batch invalidate cache + if (messagesInCurBatch != null) { + for (Message m : messagesInCurBatch) { + if (m instanceof IMAPMessage) + ((IMAPMessage) m).invalidateHeaders(); + } + } + int lastMsg = (currentBatch + 1) * batchSize; + lastMsg = lastMsg > totalInFolder ? totalInFolder : lastMsg; + messagesInCurBatch = folder.getMessages(currentBatch * batchSize + 1, lastMsg); + folder.fetch(messagesInCurBatch, fp); + current = 0; + currentBatch++; + LOG.info("Current Batch : " + currentBatch); + LOG.info("Messages in this batch : " + messagesInCurBatch.length); + } + + public boolean hasNext() { + boolean hasMore = current < messagesInCurBatch.length; + if (!hasMore && doBatching + && currentBatch * batchSize < totalInFolder) { + // try next batch + try { + getNextBatch(batchSize, folder); + hasMore = current < messagesInCurBatch.length; + } catch (MessagingException e) { + throw new DataImportHandlerException(DataImportHandlerException.SEVERE, + "Message retreival failed", e); + } + } + return hasMore; + } + + public Message next() { + return hasNext() ? messagesInCurBatch[current++] : null; + } + + public void remove() { + throw new UnsupportedOperationException("Its read only mode..."); + } + + private SearchTerm getSearchTerm() { + if (filters.size() == 0) + return null; + if (filters.size() == 1) + return filters.get(0).getCustomSearch(folder); + SearchTerm last = filters.get(0).getCustomSearch(folder); + for (int i = 1; i < filters.size(); i++) { + CustomFilter filter = filters.get(i); + SearchTerm st = filter.getCustomSearch(folder); + if (st != null) { + last = new AndTerm(last, st); + } + } + return last; + } + } + + class MailsSinceLastCheckFilter implements CustomFilter { + + private Date since; + + public MailsSinceLastCheckFilter(Date date) { + since = date; + } + + public SearchTerm getCustomSearch(Folder folder) { + return new ReceivedDateTerm(ComparisonTerm.GE, since); + } + } + + // user settings stored in member variables + private String user; + private String password; + private String host; + private String protocol; + + private String folderNames; + private List exclude = new ArrayList(); + private List include = new ArrayList(); + private boolean recurse; + + private int batchSize; + private int fetchSize; + private int cTimeout; + private int rTimeout; + + private Date fetchMailsSince; + private String customFilter; + + private boolean processAttachment = true; + + // holds the current state + private Store mailbox; + private boolean connected = false; + private FolderIterator folderIter; + private MessageIterator msgIter; + private List filters = new ArrayList(); + private static FetchProfile fp = new FetchProfile(); + private static final Logger LOG = LoggerFactory.getLogger(DataImporter.class); + + // diagnostics + private int rowCount = 0; + + static { + fp.add(FetchProfile.Item.ENVELOPE); + fp.add(FetchProfile.Item.FLAGS); + fp.add("X-Mailer"); + } + + // Fields To Index + // single valued + private static final String MESSAGE_ID = "messageId"; + private static final String SUBJECT = "subject"; + private static final String FROM = "from"; + private static final String SENT_DATE = "sentDate"; + private static final String XMAILER = "xMailer"; + // multi valued + private static final String TO_CC_BCC = "allTo"; + private static final String FLAGS = "flags"; + private static final String CONTENT = "content"; + private static final String ATTACHMENT = "attachment"; + private static final String ATTACHMENT_NAMES = "attachmentNames"; + // flag values + private static final String FLAG_ANSWERED = "answered"; + private static final String FLAG_DELETED = "deleted"; + private static final String FLAG_DRAFT = "draft"; + private static final String FLAG_FLAGGED = "flagged"; + private static final String FLAG_RECENT = "recent"; + private static final String FLAG_SEEN = "seen"; + + private int getIntFromContext(String prop, int ifNull) { + int v = ifNull; + try { + String val = context.getEntityAttribute(prop); + if (val != null) { + val = context.replaceTokens(val); + v = Integer.valueOf(val); + } + } catch (NumberFormatException e) { + //do nothing + } + return v; + } + + private boolean getBoolFromContext(String prop, boolean ifNull) { + boolean v = ifNull; + String val = context.getEntityAttribute(prop); + if (val != null) { + val = context.replaceTokens(val); + v = Boolean.valueOf(val); + } + return v; + } + + private String getStringFromContext(String prop, String ifNull) { + String v = ifNull; + String val = context.getEntityAttribute(prop); + if (val != null) { + val = context.replaceTokens(val); + v = val; + } + return v; + } +} diff --git a/solr/contrib/dataimporthandler/src/extras/main/java/org/apache/solr/handler/dataimport/TikaEntityProcessor.java b/solr/contrib/dataimporthandler/src/extras/main/java/org/apache/solr/handler/dataimport/TikaEntityProcessor.java new file mode 100644 index 00000000000..aeb64c39b17 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/extras/main/java/org/apache/solr/handler/dataimport/TikaEntityProcessor.java @@ -0,0 +1,193 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import org.apache.commons.io.IOUtils; +import static org.apache.solr.handler.dataimport.DataImportHandlerException.SEVERE; +import static org.apache.solr.handler.dataimport.DataImportHandlerException.wrapAndThrow; +import static org.apache.solr.handler.dataimport.DataImporter.COLUMN; +import static org.apache.solr.handler.dataimport.XPathEntityProcessor.URL; +import org.apache.tika.config.TikaConfig; +import org.apache.tika.metadata.Metadata; +import org.apache.tika.parser.AutoDetectParser; +import org.apache.tika.parser.Parser; +import org.apache.tika.parser.ParseContext; +import org.apache.tika.sax.BodyContentHandler; +import org.apache.tika.sax.ContentHandlerDecorator; +import org.apache.tika.sax.XHTMLContentHandler; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.xml.sax.Attributes; +import org.xml.sax.ContentHandler; +import org.xml.sax.SAXException; +import org.xml.sax.helpers.DefaultHandler; + +import javax.xml.transform.OutputKeys; +import javax.xml.transform.TransformerConfigurationException; +import javax.xml.transform.sax.SAXTransformerFactory; +import javax.xml.transform.sax.TransformerHandler; +import javax.xml.transform.stream.StreamResult; +import java.io.File; +import java.io.InputStream; +import java.io.StringWriter; +import java.io.Writer; +import java.util.HashMap; +import java.util.Map; +/** + *

    An implementation of EntityProcessor which reads data from rich docs using Tika + * + * @version $Id$ + * @since solr 1.5 + */ +public class TikaEntityProcessor extends EntityProcessorBase { + private TikaConfig tikaConfig; + private static final Logger LOG = LoggerFactory.getLogger(TikaEntityProcessor.class); + private String format = "text"; + private boolean done = false; + private String parser; + static final String AUTO_PARSER = "org.apache.tika.parser.AutoDetectParser"; + + + @Override + protected void firstInit(Context context) { + String tikaConfigFile = context.getResolvedEntityAttribute("tikaConfig"); + if (tikaConfigFile == null) { + tikaConfig = TikaConfig.getDefaultConfig(); + } else { + File configFile = new File(tikaConfigFile); + if (!configFile.isAbsolute()) { + configFile = new File(context.getSolrCore().getResourceLoader().getConfigDir(), tikaConfigFile); + } + try { + tikaConfig = new TikaConfig(configFile); + } catch (Exception e) { + wrapAndThrow (SEVERE, e,"Unable to load Tika Config"); + } + } + + format = context.getResolvedEntityAttribute("format"); + if(format == null) + format = "text"; + if (!"html".equals(format) && !"xml".equals(format) && !"text".equals(format)&& !"none".equals(format) ) + throw new DataImportHandlerException(SEVERE, "'format' can be one of text|html|xml|none"); + parser = context.getResolvedEntityAttribute("parser"); + if(parser == null) { + parser = AUTO_PARSER; + } + done = false; + } + + public Map nextRow() { + if(done) return null; + Map row = new HashMap(); + DataSource dataSource = context.getDataSource(); + InputStream is = dataSource.getData(context.getResolvedEntityAttribute(URL)); + ContentHandler contentHandler = null; + Metadata metadata = new Metadata(); + StringWriter sw = new StringWriter(); + try { + if ("html".equals(format)) { + contentHandler = getHtmlHandler(sw); + } else if ("xml".equals(format)) { + contentHandler = getXmlContentHandler(sw); + } else if ("text".equals(format)) { + contentHandler = getTextContentHandler(sw); + } else if("none".equals(format)){ + contentHandler = new DefaultHandler(); + } + } catch (TransformerConfigurationException e) { + wrapAndThrow(SEVERE, e, "Unable to create content handler"); + } + Parser tikaParser = null; + if(parser.equals(AUTO_PARSER)){ + AutoDetectParser parser = new AutoDetectParser(); + parser.setConfig(tikaConfig); + tikaParser = parser; + } else { + tikaParser = (Parser) context.getSolrCore().getResourceLoader().newInstance(parser); + } + try { + tikaParser.parse(is, contentHandler, metadata , new ParseContext()); + } catch (Exception e) { + wrapAndThrow(SEVERE, e, "Unable to read content"); + } + IOUtils.closeQuietly(is); + for (Map field : context.getAllEntityFields()) { + if (!"true".equals(field.get("meta"))) continue; + String col = field.get(COLUMN); + String s = metadata.get(col); + if (s != null) row.put(col, s); + } + if(!"none".equals(format) ) row.put("text", sw.toString()); + done = true; + return row; + } + + private static ContentHandler getHtmlHandler(Writer writer) + throws TransformerConfigurationException { + SAXTransformerFactory factory = (SAXTransformerFactory) + SAXTransformerFactory.newInstance(); + TransformerHandler handler = factory.newTransformerHandler(); + handler.getTransformer().setOutputProperty(OutputKeys.METHOD, "html"); + handler.setResult(new StreamResult(writer)); + return new ContentHandlerDecorator(handler) { + @Override + public void startElement( + String uri, String localName, String name, Attributes atts) + throws SAXException { + if (XHTMLContentHandler.XHTML.equals(uri)) { + uri = null; + } + if (!"head".equals(localName)) { + super.startElement(uri, localName, name, atts); + } + } + + @Override + public void endElement(String uri, String localName, String name) + throws SAXException { + if (XHTMLContentHandler.XHTML.equals(uri)) { + uri = null; + } + if (!"head".equals(localName)) { + super.endElement(uri, localName, name); + } + } + + @Override + public void startPrefixMapping(String prefix, String uri) {/*no op*/ } + + @Override + public void endPrefixMapping(String prefix) {/*no op*/ } + }; + } + + private static ContentHandler getTextContentHandler(Writer writer) { + return new BodyContentHandler(writer); + } + + private static ContentHandler getXmlContentHandler(Writer writer) + throws TransformerConfigurationException { + SAXTransformerFactory factory = (SAXTransformerFactory) + SAXTransformerFactory.newInstance(); + TransformerHandler handler = factory.newTransformerHandler(); + handler.getTransformer().setOutputProperty(OutputKeys.METHOD, "xml"); + handler.setResult(new StreamResult(writer)); + return handler; + } + +} diff --git a/solr/contrib/dataimporthandler/src/extras/test/java/org/apache/solr/handler/dataimport/TestMailEntityProcessor.java b/solr/contrib/dataimporthandler/src/extras/test/java/org/apache/solr/handler/dataimport/TestMailEntityProcessor.java new file mode 100644 index 00000000000..5a5220e3761 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/extras/test/java/org/apache/solr/handler/dataimport/TestMailEntityProcessor.java @@ -0,0 +1,211 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import junit.framework.Assert; +import org.apache.solr.common.SolrInputDocument; +import org.junit.Ignore; +import org.junit.Test; + +import java.text.ParseException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +// Test mailbox is like this: foldername(mailcount) +// top1(2) -> child11(6) +// -> child12(0) +// top2(2) -> child21(1) +// -> grandchild211(2) +// -> grandchild212(1) +// -> child22(2) + +/** + * Test for MailEntityProcessor. The tests are marked as ignored because we'd need a mail server (real or mocked) for + * these to work. + * + * TODO: Find a way to make the tests actually test code + * + * @version $Id$ + * @see org.apache.solr.handler.dataimport.MailEntityProcessor + * @since solr 1.4 + */ +public class TestMailEntityProcessor { + + // Credentials + private static final String user = "user"; + private static final String password = "password"; + private static final String host = "host"; + private static final String protocol = "imaps"; + + private static Map paramMap = new HashMap(); + + @Test + @Ignore + public void testConnection() { + // also tests recurse = false and default settings + paramMap.put("folders", "top2"); + paramMap.put("recurse", "false"); + paramMap.put("processAttachement", "false"); + DataImporter di = new DataImporter(); + di.loadAndInit(getConfigFromMap(paramMap)); + DataConfig.Entity ent = di.getConfig().document.entities.get(0); + ent.isDocRoot = true; + DataImporter.RequestParams rp = new DataImporter.RequestParams(); + rp.command = "full-import"; + SolrWriterImpl swi = new SolrWriterImpl(); + di.runCmd(rp, swi); + Assert.assertEquals("top1 did not return 2 messages", swi.docs.size(), 2); + } + + @Test + @Ignore + public void testRecursion() { + paramMap.put("folders", "top2"); + paramMap.put("recurse", "true"); + paramMap.put("processAttachement", "false"); + DataImporter di = new DataImporter(); + di.loadAndInit(getConfigFromMap(paramMap)); + DataConfig.Entity ent = di.getConfig().document.entities.get(0); + ent.isDocRoot = true; + DataImporter.RequestParams rp = new DataImporter.RequestParams(); + rp.command = "full-import"; + SolrWriterImpl swi = new SolrWriterImpl(); + di.runCmd(rp, swi); + Assert.assertEquals("top2 and its children did not return 8 messages", swi.docs.size(), 8); + } + + @Test + @Ignore + public void testExclude() { + paramMap.put("folders", "top2"); + paramMap.put("recurse", "true"); + paramMap.put("processAttachement", "false"); + paramMap.put("exclude", ".*grandchild.*"); + DataImporter di = new DataImporter(); + di.loadAndInit(getConfigFromMap(paramMap)); + DataConfig.Entity ent = di.getConfig().document.entities.get(0); + ent.isDocRoot = true; + DataImporter.RequestParams rp = new DataImporter.RequestParams(); + rp.command = "full-import"; + SolrWriterImpl swi = new SolrWriterImpl(); + di.runCmd(rp, swi); + Assert.assertEquals("top2 and its direct children did not return 5 messages", swi.docs.size(), 5); + } + + @Test + @Ignore + public void testInclude() { + paramMap.put("folders", "top2"); + paramMap.put("recurse", "true"); + paramMap.put("processAttachement", "false"); + paramMap.put("include", ".*grandchild.*"); + DataImporter di = new DataImporter(); + di.loadAndInit(getConfigFromMap(paramMap)); + DataConfig.Entity ent = di.getConfig().document.entities.get(0); + ent.isDocRoot = true; + DataImporter.RequestParams rp = new DataImporter.RequestParams(); + rp.command = "full-import"; + SolrWriterImpl swi = new SolrWriterImpl(); + di.runCmd(rp, swi); + Assert.assertEquals("top2 and its direct children did not return 3 messages", swi.docs.size(), 3); + } + + @Test + @Ignore + public void testIncludeAndExclude() { + paramMap.put("folders", "top1,top2"); + paramMap.put("recurse", "true"); + paramMap.put("processAttachement", "false"); + paramMap.put("exclude", ".*top1.*"); + paramMap.put("include", ".*grandchild.*"); + DataImporter di = new DataImporter(); + di.loadAndInit(getConfigFromMap(paramMap)); + DataConfig.Entity ent = di.getConfig().document.entities.get(0); + ent.isDocRoot = true; + DataImporter.RequestParams rp = new DataImporter.RequestParams(); + rp.command = "full-import"; + SolrWriterImpl swi = new SolrWriterImpl(); + di.runCmd(rp, swi); + Assert.assertEquals("top2 and its direct children did not return 3 messages", swi.docs.size(), 3); + } + + @Test + @Ignore + public void testFetchTimeSince() throws ParseException { + paramMap.put("folders", "top1/child11"); + paramMap.put("recurse", "true"); + paramMap.put("processAttachement", "false"); + paramMap.put("fetchMailsSince", "2008-12-26 00:00:00"); + DataImporter di = new DataImporter(); + di.loadAndInit(getConfigFromMap(paramMap)); + DataConfig.Entity ent = di.getConfig().document.entities.get(0); + ent.isDocRoot = true; + DataImporter.RequestParams rp = new DataImporter.RequestParams(); + rp.command = "full-import"; + SolrWriterImpl swi = new SolrWriterImpl(); + di.runCmd(rp, swi); + Assert.assertEquals("top2 and its direct children did not return 3 messages", swi.docs.size(), 3); + } + + private String getConfigFromMap(Map params) { + String conf = + "" + + "" + + "" + + "" + + ""; + params.put("user", user); + params.put("password", password); + params.put("host", host); + params.put("protocol", protocol); + StringBuilder attribs = new StringBuilder(""); + for (String key : params.keySet()) + attribs.append(" ").append(key).append("=" + "\"").append(params.get(key)).append("\""); + attribs.append(" "); + return conf.replace("someconfig", attribs.toString()); + } + + static class SolrWriterImpl extends SolrWriter { + List docs = new ArrayList(); + Boolean deleteAllCalled; + Boolean commitCalled; + + public SolrWriterImpl() { + super(null, "."); + } + + public boolean upload(SolrInputDocument doc) { + return docs.add(doc); + } + + public void log(int event, String name, Object row) { + // Do nothing + } + + public void doDeleteAll() { + deleteAllCalled = Boolean.TRUE; + } + + public void commit(boolean b) { + commitCalled = Boolean.TRUE; + } + } +} diff --git a/solr/contrib/dataimporthandler/src/extras/test/java/org/apache/solr/handler/dataimport/TestTikaEntityProcessor.java b/solr/contrib/dataimporthandler/src/extras/test/java/org/apache/solr/handler/dataimport/TestTikaEntityProcessor.java new file mode 100644 index 00000000000..35059e7bb8c --- /dev/null +++ b/solr/contrib/dataimporthandler/src/extras/test/java/org/apache/solr/handler/dataimport/TestTikaEntityProcessor.java @@ -0,0 +1,61 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import org.junit.After; +import org.junit.Before; + +/**Testcase for TikaEntityProcessor + * @version $Id$ + * @since solr 1.5 + */ +public class TestTikaEntityProcessor extends AbstractDataImportHandlerTest { + + @Before + public void setUp() throws Exception { + super.setUp(); + } + + @After + public void tearDown() throws Exception { + super.tearDown(); + } + + public String getSchemaFile() { + return "dataimport-schema-no-unique-key.xml"; + } + + public String getSolrConfigFile() { + return "dataimport-solrconfig.xml"; + } + + public void testIndexingWithTikaEntityProcessor() throws Exception { + String conf = + "" + + " " + + " " + + " " + + " " + + " " + + " " + + " " + + " " + + ""; + super.runFullImport(conf); + assertQ(req("*:*"), "//*[@numFound='1']"); + } +} diff --git a/solr/contrib/dataimporthandler/src/extras/test/resources/solr/conf/dataimport-schema-no-unique-key.xml b/solr/contrib/dataimporthandler/src/extras/test/resources/solr/conf/dataimport-schema-no-unique-key.xml new file mode 100644 index 00000000000..0be581f386a --- /dev/null +++ b/solr/contrib/dataimporthandler/src/extras/test/resources/solr/conf/dataimport-schema-no-unique-key.xml @@ -0,0 +1,203 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + text + + + + + diff --git a/solr/contrib/dataimporthandler/src/extras/test/resources/solr/conf/dataimport-solrconfig.xml b/solr/contrib/dataimporthandler/src/extras/test/resources/solr/conf/dataimport-solrconfig.xml new file mode 100644 index 00000000000..4b5a06ede92 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/extras/test/resources/solr/conf/dataimport-solrconfig.xml @@ -0,0 +1,404 @@ + + + + + + ${solr.abortOnConfigurationError:true} + + + ${solr.data.dir:./solr/data} + + + + + false + + 10 + + + + 32 + 2147483647 + 10000 + 1000 + 10000 + + + + + + + + + + + single + + + + + false + 32 + 10 + + + 2147483647 + 10000 + + + false + + + + + + + + + 100000 + + + + + + + 1024 + + + + + + + + + + + + + true + + + + + + + + 50 + + + 200 + + + + + + + + + solr 0 10 + rocks 0 10 + static newSearcher warming query from solrconfig.xml + + + + + + + + + + + false + + + 4 + + + + + + + + + + + + + + + + + + + + + + + explicit + + + + + + + + + + + + explicit + + + + + + + + + + + + *:* + + + + + + diff --git a/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/AbstractDataImportHandlerTest.java b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/AbstractDataImportHandlerTest.java new file mode 100644 index 00000000000..2572a10427f --- /dev/null +++ b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/AbstractDataImportHandlerTest.java @@ -0,0 +1,245 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import org.apache.solr.core.SolrCore; +import org.apache.solr.request.LocalSolrQueryRequest; +import org.apache.solr.util.AbstractSolrTestCase; +import org.apache.solr.common.util.NamedList; + +import java.io.IOException; +import java.io.File; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + *

    + * Abstract base class for DataImportHandler tests + *

    + *

    + * This API is experimental and subject to change + * + * @version $Id$ + * @since solr 1.3 + */ +public abstract class AbstractDataImportHandlerTest extends + AbstractSolrTestCase { + + @Override + public void setUp() throws Exception { + super.setUp(); + } + + @Override + public void tearDown() throws Exception { + // remove dataimport.properties + File f = new File("solr/conf/dataimport.properties"); + log.info("Looking for dataimport.properties at: " + f.getAbsolutePath()); + if (f.exists()) { + log.info("Deleting dataimport.properties"); + if (!f.delete()) + log.warn("Could not delete dataimport.properties"); + } + super.tearDown(); + } + + protected String loadDataConfig(String dataConfigFileName) { + try { + SolrCore core = h.getCore(); + return SolrWriter.getResourceAsString(core.getResourceLoader() + .openResource(dataConfigFileName)); + } catch (IOException e) { + e.printStackTrace(); + return null; + } + } + + protected void runFullImport(String dataConfig) throws Exception { + LocalSolrQueryRequest request = lrf.makeRequest("command", "full-import", + "debug", "on", "clean", "true", "commit", "true", "dataConfig", + dataConfig); + h.query("/dataimport", request); + } + + protected void runDeltaImport(String dataConfig) throws Exception { + LocalSolrQueryRequest request = lrf.makeRequest("command", "delta-import", + "debug", "on", "clean", "false", "commit", "true", "dataConfig", + dataConfig); + h.query("/dataimport", request); + } + + /** + * Runs a full-import using the given dataConfig and the provided request parameters. + * + * By default, debug=on, clean=true and commit=true are passed which can be overridden. + * + * @param dataConfig the data-config xml as a string + * @param extraParams any extra request parameters needed to be passed to DataImportHandler + * @throws Exception in case of any error + */ + protected void runFullImport(String dataConfig, Map extraParams) throws Exception { + HashMap params = new HashMap(); + params.put("command", "full-import"); + params.put("debug", "on"); + params.put("dataConfig", dataConfig); + params.put("clean", "true"); + params.put("commit", "true"); + params.putAll(extraParams); + NamedList l = new NamedList(); + for (Map.Entry e : params.entrySet()) { + l.add(e.getKey(),e.getValue()); + } + LocalSolrQueryRequest request = new LocalSolrQueryRequest(h.getCore(), l); + h.query("/dataimport", request); + } + + /** + * Helper for creating a Context instance. Useful for testing Transformers + */ + @SuppressWarnings("unchecked") + public static TestContext getContext(DataConfig.Entity parentEntity, + VariableResolverImpl resolver, DataSource parentDataSource, + String currProcess, final List> entityFields, + final Map entityAttrs) { + if (resolver == null) resolver = new VariableResolverImpl(); + final Context delegate = new ContextImpl(parentEntity, resolver, + parentDataSource, currProcess, + new HashMap(), null, null); + return new TestContext(entityAttrs, delegate, entityFields, parentEntity == null); + } + + /** + * Strings at even index are keys, odd-index strings are values in the + * returned map + */ + @SuppressWarnings("unchecked") + public static Map createMap(Object... args) { + Map result = new HashMap(); + + if (args == null || args.length == 0) + return result; + + for (int i = 0; i < args.length - 1; i += 2) + result.put(args[i], args[i + 1]); + + return result; + } + + static class TestContext extends Context { + private final Map entityAttrs; + private final Context delegate; + private final List> entityFields; + private final boolean root; + String script,scriptlang; + + public TestContext(Map entityAttrs, Context delegate, + List> entityFields, boolean root) { + this.entityAttrs = entityAttrs; + this.delegate = delegate; + this.entityFields = entityFields; + this.root = root; + } + + public String getEntityAttribute(String name) { + return entityAttrs == null ? delegate.getEntityAttribute(name) : entityAttrs.get(name); + } + + public String getResolvedEntityAttribute(String name) { + return entityAttrs == null ? delegate.getResolvedEntityAttribute(name) : + delegate.getVariableResolver().replaceTokens(entityAttrs.get(name)); + } + + public List> getAllEntityFields() { + return entityFields == null ? delegate.getAllEntityFields() + : entityFields; + } + + public VariableResolver getVariableResolver() { + return delegate.getVariableResolver(); + } + + public DataSource getDataSource() { + return delegate.getDataSource(); + } + + public boolean isRootEntity() { + return root; + } + + public String currentProcess() { + return delegate.currentProcess(); + } + + public Map getRequestParameters() { + return delegate.getRequestParameters(); + } + + public EntityProcessor getEntityProcessor() { + return null; + } + + public void setSessionAttribute(String name, Object val, String scope) { + delegate.setSessionAttribute(name, val, scope); + } + + public Object getSessionAttribute(String name, String scope) { + return delegate.getSessionAttribute(name, scope); + } + + public Context getParentContext() { + return delegate.getParentContext(); + } + + public DataSource getDataSource(String name) { + return delegate.getDataSource(name); + } + + public SolrCore getSolrCore() { + return delegate.getSolrCore(); + } + + public Map getStats() { + return delegate.getStats(); + } + + + public String getScript() { + return script == null ? delegate.getScript() : script; + } + + public String getScriptLanguage() { + return scriptlang == null ? delegate.getScriptLanguage() : scriptlang; + } + + public void deleteDoc(String id) { + + } + + public void deleteDocByQuery(String query) { + + } + + public Object resolve(String var) { + return delegate.resolve(var); + } + + public String replaceTokens(String template) { + return delegate.replaceTokens(template); + } + } +} diff --git a/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/BinContentStreamDataSource.java b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/BinContentStreamDataSource.java new file mode 100644 index 00000000000..cf37ec6a373 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/BinContentStreamDataSource.java @@ -0,0 +1,68 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import org.apache.solr.common.util.ContentStream; +import static org.apache.solr.handler.dataimport.DataImportHandlerException.SEVERE; + +import java.io.InputStream; +import java.io.IOException; +import java.util.Properties; +/** + *

    A data source implementation which can be used to read binary stream from content streams.

    Refer to http://wiki.apache.org/solr/DataImportHandler for more + * details.

    + *

    + * This API is experimental and may change in the future. + * + * @version $Id$ + * @since solr 1.5 + */ + +public class BinContentStreamDataSource extends DataSource { + private ContextImpl context; + private ContentStream contentStream; + private InputStream in; + + + public void init(Context context, Properties initProps) { + this.context = (ContextImpl) context; + } + + public InputStream getData(String query) { + contentStream = context.getDocBuilder().requestParameters.contentStream; + if (contentStream == null) + throw new DataImportHandlerException(SEVERE, "No stream available. The request has no body"); + try { + return in = contentStream.getStream(); + } catch (IOException e) { + DataImportHandlerException.wrapAndThrow(SEVERE, e); + return null; + } + } + + public void close() { + if (contentStream != null) { + try { + if (in == null) in = contentStream.getStream(); + in.close(); + } catch (IOException e) { + /*no op*/ + } + } + } +} diff --git a/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/BinFileDataSource.java b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/BinFileDataSource.java new file mode 100644 index 00000000000..769ab637330 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/BinFileDataSource.java @@ -0,0 +1,63 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import static org.apache.solr.handler.dataimport.DataImportHandlerException.wrapAndThrow; +import static org.apache.solr.handler.dataimport.DataImportHandlerException.SEVERE; + +import java.io.InputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.util.Properties; +/** + *

    + * A DataSource which reads from local files + *

    + *

    + *

    + * Refer to http://wiki.apache.org/solr/DataImportHandler + * for more details. + *

    + *

    + * This API is experimental and may change in the future. + * + * @version $Id$ + * @since solr 1.5 + */ + +public class BinFileDataSource extends DataSource{ + protected String basePath; + public void init(Context context, Properties initProps) { + basePath = initProps.getProperty(FileDataSource.BASE_PATH); + } + + public InputStream getData(String query) { + File f = FileDataSource.getFile(basePath,query); + try { + return new FileInputStream(f); + } catch (FileNotFoundException e) { + wrapAndThrow(SEVERE,e,"Unable to open file "+f.getAbsolutePath()); + return null; + } + } + + public void close() { + + } +} diff --git a/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/BinURLDataSource.java b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/BinURLDataSource.java new file mode 100644 index 00000000000..be78fb614d9 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/BinURLDataSource.java @@ -0,0 +1,101 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import static org.apache.solr.handler.dataimport.DataImportHandlerException.*; +import static org.apache.solr.handler.dataimport.URLDataSource.*; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.InputStream; +import java.net.URL; +import java.net.URLConnection; +import java.util.Properties; +/** + *

    A data source implementation which can be used to read binary streams using HTTP.

    Refer to http://wiki.apache.org/solr/DataImportHandler for more + * details.

    + *

    + * This API is experimental and may change in the future. + * + * @version $Id$ + * @since solr 1.5 + */ +public class BinURLDataSource extends DataSource{ + private static final Logger LOG = LoggerFactory.getLogger(BinURLDataSource.class); + + private String baseUrl; + private int connectionTimeout = CONNECTION_TIMEOUT; + + private int readTimeout = READ_TIMEOUT; + + private Context context; + + private Properties initProps; + + public BinURLDataSource() { } + + public void init(Context context, Properties initProps) { + this.context = context; + this.initProps = initProps; + + baseUrl = getInitPropWithReplacements(BASE_URL); + String cTimeout = getInitPropWithReplacements(CONNECTION_TIMEOUT_FIELD_NAME); + String rTimeout = getInitPropWithReplacements(READ_TIMEOUT_FIELD_NAME); + if (cTimeout != null) { + try { + connectionTimeout = Integer.parseInt(cTimeout); + } catch (NumberFormatException e) { + LOG.warn("Invalid connection timeout: " + cTimeout); + } + } + if (rTimeout != null) { + try { + readTimeout = Integer.parseInt(rTimeout); + } catch (NumberFormatException e) { + LOG.warn("Invalid read timeout: " + rTimeout); + } + } + } + + public InputStream getData(String query) { + URL url = null; + try { + if (URIMETHOD.matcher(query).find()) url = new URL(query); + else url = new URL(baseUrl + query); + LOG.debug("Accessing URL: " + url.toString()); + URLConnection conn = url.openConnection(); + conn.setConnectTimeout(connectionTimeout); + conn.setReadTimeout(readTimeout); + return conn.getInputStream(); + } catch (Exception e) { + LOG.error("Exception thrown while getting data", e); + wrapAndThrow (SEVERE, e, "Exception in invoking url " + url); + return null;//unreachable + } + } + + public void close() { } + + private String getInitPropWithReplacements(String propertyName) { + final String expr = initProps.getProperty(propertyName); + if (expr == null) { + return null; + } + return context.replaceTokens(expr); + } +} diff --git a/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/CachedSqlEntityProcessor.java b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/CachedSqlEntityProcessor.java new file mode 100644 index 00000000000..69b7b2b6f18 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/CachedSqlEntityProcessor.java @@ -0,0 +1,79 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +/** + * This class enables caching of data obtained from the DB to avoid too many sql + * queries + *

    + *

    + * Refer to http://wiki.apache.org/solr/DataImportHandler + * for more details. + *

    + *

    + * This API is experimental and subject to change + * + * @version $Id$ + * @since solr 1.3 + */ +public class CachedSqlEntityProcessor extends SqlEntityProcessor { + private boolean isFirst; + + @SuppressWarnings("unchecked") + public void init(Context context) { + super.init(context); + super.cacheInit(); + isFirst = true; + } + + public Map nextRow() { + if (dataSourceRowCache != null) + return getFromRowCacheTransformed(); + if (!isFirst) + return null; + String query = context.replaceTokens(context.getEntityAttribute("query")); + isFirst = false; + if (simpleCache != null) { + return getSimpleCacheData(query); + } else { + return getIdCacheData(query); + } + + } + + protected List> getAllNonCachedRows() { + List> rows = new ArrayList>(); + String q = getQuery(); + initQuery(context.replaceTokens(q)); + if (rowIterator == null) + return rows; + while (rowIterator.hasNext()) { + Map arow = rowIterator.next(); + if (arow == null) { + break; + } else { + rows.add(arow); + } + } + return rows; + } +} diff --git a/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/ClobTransformer.java b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/ClobTransformer.java new file mode 100644 index 00000000000..e28c7a2eb20 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/ClobTransformer.java @@ -0,0 +1,84 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import static org.apache.solr.handler.dataimport.HTMLStripTransformer.TRUE; + +import java.io.IOException; +import java.io.Reader; +import java.sql.Clob; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +/** + * Transformer instance which converts a Clob to a String. + *

    + * Refer to http://wiki.apache.org/solr/DataImportHandler + * for more details. + *

    + * This API is experimental and subject to change + * + * @version $Id$ + * @since solr 1.4 + */ +public class ClobTransformer extends Transformer { + public Object transformRow(Map aRow, Context context) { + for (Map map : context.getAllEntityFields()) { + if (!TRUE.equals(map.get(CLOB))) continue; + String column = map.get(DataImporter.COLUMN); + String srcCol = map.get(RegexTransformer.SRC_COL_NAME); + if (srcCol == null) + srcCol = column; + Object o = aRow.get(srcCol); + if (o instanceof List) { + List inputs = (List) o; + List results = new ArrayList(); + for (Object input : inputs) { + if (input instanceof Clob) { + Clob clob = (Clob) input; + results.add(readFromClob(clob)); + } + } + aRow.put(column, results); + } else { + if (o instanceof Clob) { + Clob clob = (Clob) o; + aRow.put(column, readFromClob(clob)); + } + } + } + return aRow; + } + + private String readFromClob(Clob clob) { + Reader reader = FieldReaderDataSource.readCharStream(clob); + StringBuilder sb = new StringBuilder(); + char[] buf = new char[1024]; + int len; + try { + while ((len = reader.read(buf)) != -1) { + sb.append(buf, 0, len); + } + } catch (IOException e) { + DataImportHandlerException.wrapAndThrow(DataImportHandlerException.SEVERE, e); + } + return sb.toString(); + } + + public static final String CLOB = "clob"; +} diff --git a/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/ContentStreamDataSource.java b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/ContentStreamDataSource.java new file mode 100644 index 00000000000..3b55fd6cf5e --- /dev/null +++ b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/ContentStreamDataSource.java @@ -0,0 +1,67 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import org.apache.solr.common.util.ContentStream; +import static org.apache.solr.handler.dataimport.DataImportHandlerException.SEVERE; + +import java.io.IOException; +import java.io.Reader; +import java.util.Properties; + +/** + * A DataSource implementation which reads from the ContentStream of a POST request + *

    + * Refer to http://wiki.apache.org/solr/DataImportHandler + * for more details. + *

    + * This API is experimental and may change in the future. + * + * @version $Id$ + * @since solr 1.4 + */ +public class ContentStreamDataSource extends DataSource { + private ContextImpl context; + private ContentStream contentStream; + private Reader reader; + + public void init(Context context, Properties initProps) { + this.context = (ContextImpl) context; + } + + public Reader getData(String query) { + contentStream = context.getDocBuilder().requestParameters.contentStream; + if (contentStream == null) + throw new DataImportHandlerException(SEVERE, "No stream available. The request has no body"); + try { + return reader = contentStream.getReader(); + } catch (IOException e) { + DataImportHandlerException.wrapAndThrow(SEVERE, e); + return null; + } + } + + public void close() { + if (contentStream != null) { + try { + if (reader == null) reader = contentStream.getReader(); + reader.close(); + } catch (IOException e) { + } + } + } +} diff --git a/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/Context.java b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/Context.java new file mode 100644 index 00000000000..290b202b2d5 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/Context.java @@ -0,0 +1,226 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.handler.dataimport; + +import org.apache.solr.core.SolrCore; + +import java.util.List; +import java.util.Map; + +/** + *

    + * This abstract class gives access to all available objects. So any + * component implemented by a user can have the full power of DataImportHandler + *

    + *

    + * Refer to http://wiki.apache.org/solr/DataImportHandler + * for more details. + *

    + *

    + * This API is experimental and subject to change + * + * @version $Id$ + * @since solr 1.3 + */ +public abstract class Context { + public static final String FULL_DUMP = "FULL_DUMP", DELTA_DUMP = "DELTA_DUMP", FIND_DELTA = "FIND_DELTA"; + + /** + * An object stored in entity scope is valid only for the current entity for the current document only. + */ + public static final String SCOPE_ENTITY = "entity"; + + /** + * An object stored in global scope is available for the current import only but across entities and documents. + */ + public static final String SCOPE_GLOBAL = "global"; + + /** + * An object stored in document scope is available for the current document only but across entities. + */ + public static final String SCOPE_DOC = "document"; + + /** + * An object stored in 'solrcore' scope is available across imports, entities and documents throughout the life of + * a solr core. A solr core unload or reload will destroy this data. + */ + public static final String SCOPE_SOLR_CORE = "solrcore"; + + /** + * Get the value of any attribute put into this entity + * + * @param name name of the attribute eg: 'name' + * @return value of named attribute in entity + */ + public abstract String getEntityAttribute(String name); + + /** + * Get the value of any attribute put into this entity after resolving all variables found in the attribute value + * @param name name of the attribute + * @return value of the named attribute after resolving all variables + */ + public abstract String getResolvedEntityAttribute(String name); + + /** + * Returns all the fields put into an entity. each item (which is a map ) in + * the list corresponds to one field. each if the map contains the attribute + * names and values in a field + * + * @return all fields in an entity + */ + public abstract List> getAllEntityFields(); + + /** + * Returns the VariableResolver used in this entity which can be used to + * resolve the tokens in ${} + * + * @return a VariableResolver instance + * @see org.apache.solr.handler.dataimport.VariableResolver + */ + + public abstract VariableResolver getVariableResolver(); + + /** + * Gets the datasource instance defined for this entity. Do not close() this instance. + * Transformers should use the getDataSource(String name) method. + * + * @return a new DataSource instance as configured for the current entity + * @see org.apache.solr.handler.dataimport.DataSource + * @see #getDataSource(String) + */ + public abstract DataSource getDataSource(); + + /** + * Gets a new DataSource instance with a name. Ensure that you close() this after use + * because this is created just for this method call. + * + * @param name Name of the dataSource as defined in the dataSource tag + * @return a new DataSource instance + * @see org.apache.solr.handler.dataimport.DataSource + */ + public abstract DataSource getDataSource(String name); + + /** + * Returns the instance of EntityProcessor used for this entity + * + * @return instance of EntityProcessor used for the current entity + * @see org.apache.solr.handler.dataimport.EntityProcessor + */ + public abstract EntityProcessor getEntityProcessor(); + + /** + * Store values in a certain name and scope (entity, document,global) + * + * @param name the key + * @param val the value + * @param scope the scope in which the given key, value pair is to be stored + */ + public abstract void setSessionAttribute(String name, Object val, String scope); + + /** + * get a value by name in the given scope (entity, document,global) + * + * @param name the key + * @param scope the scope from which the value is to be retreived + * @return the object stored in the given scope with the given key + */ + public abstract Object getSessionAttribute(String name, String scope); + + /** + * Get the context instance for the parent entity. works only in the full dump + * If the current entity is rootmost a null is returned + * + * @return parent entity's Context + */ + public abstract Context getParentContext(); + + /** + * The request parameters passed over HTTP for this command the values in the + * map are either String(for single valued parameters) or List (for + * multi-valued parameters) + * + * @return the request parameters passed in the URL to initiate this process + */ + public abstract Map getRequestParameters(); + + /** + * Returns if the current entity is the root entity + * + * @return true if current entity is the root entity, false otherwise + */ + public abstract boolean isRootEntity(); + + /** + * Returns the current process FULL_DUMP, DELTA_DUMP, FIND_DELTA + * + * @return the type of the current running process + */ + public abstract String currentProcess(); + + /** + * Exposing the actual SolrCore to the components + * + * @return the core + */ + public abstract SolrCore getSolrCore(); + + /** + * Makes available some basic running statistics such as "docCount", + * "deletedDocCount", "rowCount", "queryCount" and "skipDocCount" + * + * @return a Map containing running statistics of the current import + */ + public abstract Map getStats(); + + /** + * Returns the text specified in the script tag in the data-config.xml + */ + public abstract String getScript(); + + /** + * Returns the language of the script as specified in the script tag in data-config.xml + */ + public abstract String getScriptLanguage(); + + /**delete a document by id + * @param id + */ + public abstract void deleteDoc(String id); + + /**delete documents by query + * @param query + */ + public abstract void deleteDocByQuery(String query); + + /**Use this directly to resolve variable + * @param var the variable neme + * @return the resolved value + */ + public abstract Object resolve(String var); + + /** Resolve variables in a template + * @param template + * + * @return The string w/ variables resolved + */ + public abstract String replaceTokens(String template); + + static final ThreadLocal CURRENT_CONTEXT = new ThreadLocal(); + +} diff --git a/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/ContextImpl.java b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/ContextImpl.java new file mode 100644 index 00000000000..6dfa48276c9 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/ContextImpl.java @@ -0,0 +1,237 @@ +package org.apache.solr.handler.dataimport; +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +import org.apache.solr.core.SolrCore; + +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +/** + *

    + * An implementation for the Context + *

    + * This API is experimental and subject to change + * + * @version $Id$ + * @since solr 1.3 + */ +public class ContextImpl extends Context { + private DataConfig.Entity entity; + + private ContextImpl parent; + + private VariableResolverImpl resolver; + + private DataSource ds; + + private String currProcess; + + private Map requestParams; + + private DataImporter dataImporter; + + private Map entitySession, globalSession; + + DocBuilder.DocWrapper doc; + + DocBuilder docBuilder; + + + public ContextImpl(DataConfig.Entity entity, VariableResolverImpl resolver, + DataSource ds, String currProcess, + Map global, ContextImpl parentContext, DocBuilder docBuilder) { + this.entity = entity; + this.docBuilder = docBuilder; + this.resolver = resolver; + this.ds = ds; + this.currProcess = currProcess; + if (docBuilder != null) { + this.requestParams = docBuilder.requestParameters.requestParams; + dataImporter = docBuilder.dataImporter; + } + globalSession = global; + parent = parentContext; + } + + public String getEntityAttribute(String name) { + return entity == null ? null : entity.allAttributes.get(name); + } + + public String getResolvedEntityAttribute(String name) { + return entity == null ? null : resolver.replaceTokens(entity.allAttributes.get(name)); + } + + public List> getAllEntityFields() { + return entity == null ? Collections.EMPTY_LIST : entity.allFieldsList; + } + + public VariableResolver getVariableResolver() { + return resolver; + } + + public DataSource getDataSource() { + if (ds != null) return ds; + if(entity == null) return null; + if (entity.dataSrc == null) { + entity.dataSrc = dataImporter.getDataSourceInstance(entity, entity.dataSource, this); + } + if (entity.dataSrc != null && docBuilder != null && docBuilder.verboseDebug && + Context.FULL_DUMP.equals(currentProcess())) { + //debug is not yet implemented properly for deltas + entity.dataSrc = docBuilder.writer.getDebugLogger().wrapDs(entity.dataSrc); + } + return entity.dataSrc; + } + + public DataSource getDataSource(String name) { + return dataImporter.getDataSourceInstance(entity, name, this); + } + + public boolean isRootEntity() { + return entity.isDocRoot; + } + + public String currentProcess() { + return currProcess; + } + + public Map getRequestParameters() { + return requestParams; + } + + public EntityProcessor getEntityProcessor() { + return entity == null ? null : entity.processor; + } + + public void setSessionAttribute(String name, Object val, String scope) { + if(name == null) return; + if (Context.SCOPE_ENTITY.equals(scope)) { + if (entitySession == null) + entitySession = new ConcurrentHashMap(); + + putVal(name, val,entitySession); + } else if (Context.SCOPE_GLOBAL.equals(scope)) { + if (globalSession != null) { + putVal(name, val,globalSession); + } + } else if (Context.SCOPE_DOC.equals(scope)) { + DocBuilder.DocWrapper doc = getDocument(); + if (doc != null) + doc.setSessionAttribute(name, val); + } else if (SCOPE_SOLR_CORE.equals(scope)){ + if(dataImporter != null) { + putVal(name, val,dataImporter.getCoreScopeSession()); + } + } + } + + private void putVal(String name, Object val, Map map) { + if(val == null) map.remove(name); + else entitySession.put(name, val); + } + + public Object getSessionAttribute(String name, String scope) { + if (Context.SCOPE_ENTITY.equals(scope)) { + if (entitySession == null) + return null; + return entitySession.get(name); + } else if (Context.SCOPE_GLOBAL.equals(scope)) { + if (globalSession != null) { + return globalSession.get(name); + } + } else if (Context.SCOPE_DOC.equals(scope)) { + DocBuilder.DocWrapper doc = getDocument(); + return doc == null ? null: doc.getSessionAttribute(name); + } else if (SCOPE_SOLR_CORE.equals(scope)){ + return dataImporter == null ? null : dataImporter.getCoreScopeSession().get(name); + } + return null; + } + + public Context getParentContext() { + return parent; + } + + private DocBuilder.DocWrapper getDocument() { + ContextImpl c = this; + while (true) { + if (c.doc != null) + return c.doc; + if (c.parent != null) + c = c.parent; + else + return null; + } + } + + public void setDoc(DocBuilder.DocWrapper docWrapper) { + this.doc = docWrapper; + } + + + public SolrCore getSolrCore() { + return dataImporter == null ? null : dataImporter.getCore(); + } + + + public Map getStats() { + return docBuilder != null ? docBuilder.importStatistics.getStatsSnapshot() : Collections.emptyMap(); + } + + public String getScript() { + if(dataImporter != null) { + DataConfig.Script script = dataImporter.getConfig().script; + return script == null ? null : script.text; + } + return null; + } + + public String getScriptLanguage() { + if (dataImporter != null) { + DataConfig.Script script = dataImporter.getConfig().script; + return script == null ? null : script.language; + } + return null; + } + + public void deleteDoc(String id) { + if(docBuilder != null){ + docBuilder.writer.deleteDoc(id); + } + } + + public void deleteDocByQuery(String query) { + if(docBuilder != null){ + docBuilder.writer.deleteByQuery(query); + } + } + + DocBuilder getDocBuilder(){ + return docBuilder; + } + public Object resolve(String var) { + return resolver.resolve(var); + } + + public String replaceTokens(String template) { + return resolver.replaceTokens(template); + } +} diff --git a/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/DataConfig.java b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/DataConfig.java new file mode 100644 index 00000000000..5a1380283e3 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/DataConfig.java @@ -0,0 +1,371 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import org.w3c.dom.Element; +import org.w3c.dom.NamedNodeMap; +import org.w3c.dom.Node; +import org.w3c.dom.NodeList; +import org.apache.solr.schema.SchemaField; +import static org.apache.solr.handler.dataimport.DataImportHandlerException.SEVERE; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +/** + *

    + * Mapping for data-config.xml + *

    + *

    + *

    + * Refer to http://wiki.apache.org/solr/DataImportHandler + * for more details. + *

    + *

    + * This API is experimental and subject to change + * + * @version $Id$ + * @since solr 1.3 + */ +public class DataConfig { + static final Logger LOG = LoggerFactory.getLogger(DataConfig.class); + + public Document document; + + public List> functions = new ArrayList>(); + + public Script script; + + public Map dataSources = new HashMap(); + + public Map lowerNameVsSchemaField = new HashMap(); + + boolean isMultiThreaded = false; + + public static class Document { + // TODO - remove from here and add it to entity + public String deleteQuery; + + public List entities = new ArrayList(); + + public String onImportStart, onImportEnd; + + public Document() { + } + + public Document(Element element) { + this.deleteQuery = getStringAttribute(element, "deleteQuery", null); + this.onImportStart = getStringAttribute(element, "onImportStart", null); + this.onImportEnd = getStringAttribute(element, "onImportEnd", null); + List l = getChildNodes(element, "entity"); + for (Element e : l) + entities.add(new Entity(e)); + } + } + + public static class Entity { + public String name; + + public String pk; + + public String pkMappingFromSchema; + + public String dataSource; + + public Map allAttributes; + + public String proc; + + public String docRoot; + + public boolean isDocRoot = false; + + public List fields = new ArrayList(); + + public List> allFieldsList = new ArrayList>(); + + public List entities; + + public Entity parentEntity; + + public EntityProcessorWrapper processor; + + @SuppressWarnings("unchecked") + public DataSource dataSrc; + + public Map> colNameVsField = new HashMap>(); + + public Entity() { + } + + public Entity(Element element) { + name = getStringAttribute(element, NAME, null); + if(name == null){ + LOG.warn("Entity does not have a name"); + name= ""+System.nanoTime(); + } + if(name.indexOf(".") != -1){ + throw new DataImportHandlerException(SEVERE, "Entity name must not have period (.): '" + name); + } + if (RESERVED_WORDS.contains(name)) { + throw new DataImportHandlerException(SEVERE, "Entity name : '" + name + + "' is a reserved keyword. Reserved words are: " + RESERVED_WORDS); + } + pk = getStringAttribute(element, "pk", null); + docRoot = getStringAttribute(element, ROOT_ENTITY, null); + proc = getStringAttribute(element, PROCESSOR, null); + dataSource = getStringAttribute(element, DataImporter.DATA_SRC, null); + allAttributes = getAllAttributes(element); + List n = getChildNodes(element, "field"); + for (Element elem : n) { + Field field = new Field(elem); + fields.add(field); + List l = colNameVsField.get(field.column); + if(l == null) l = new ArrayList(); + boolean alreadyFound = false; + for (Field f : l) { + if(f.getName().equals(field.getName())) { + alreadyFound = true; + break; + } + } + if(!alreadyFound) l.add(field); + colNameVsField.put(field.column, l); + } + n = getChildNodes(element, "entity"); + if (!n.isEmpty()) + entities = new ArrayList(); + for (Element elem : n) + entities.add(new Entity(elem)); + + } + + public void clearCache() { + if (entities != null) { + for (Entity entity : entities) + entity.clearCache(); + } + try { + processor.close(); + } catch (Exception e) { + /*no op*/ + } + processor = null; + if (dataSrc != null) + dataSrc.close(); + dataSrc = null; + } + + public String getPk(){ + return pk == null ? pkMappingFromSchema : pk; + } + } + + public static class Script { + public String language; + + public String text; + + public Script() { + } + + public Script(Element e) { + this.language = getStringAttribute(e, "language", "JavaScript"); + StringBuilder buffer = new StringBuilder(); + String script = getTxt(e, buffer); + if (script != null) + this.text = script.trim(); + } + } + + public static class Field { + + public String column; + + public String name; + + public Float boost = 1.0f; + + public boolean toWrite = true; + + public boolean multiValued = false; + + boolean dynamicName; + + + public Map allAttributes = new HashMap() { + public String put(String key, String value) { + if (super.containsKey(key)) + return super.get(key); + return super.put(key, value); + } + }; + + public Field() { + } + + public Field(Element e) { + this.name = getStringAttribute(e, DataImporter.NAME, null); + this.column = getStringAttribute(e, DataImporter.COLUMN, null); + if (column == null) { + throw new DataImportHandlerException(SEVERE, "Field must have a column attribute"); + } + this.boost = Float.parseFloat(getStringAttribute(e, "boost", "1.0f")); + allAttributes.putAll(getAllAttributes(e)); + } + + public String getName() { + return name == null ? column : name; + } + + public Entity entity; + + } + + public void readFromXml(Element e) { + List n = getChildNodes(e, "document"); + if (n.isEmpty()) { + throw new DataImportHandlerException(SEVERE, "DataImportHandler " + + "configuration file must have one node."); + } + document = new Document(n.get(0)); + + n = getChildNodes(e, SCRIPT); + if (!n.isEmpty()) { + script = new Script(n.get(0)); + } + + // Add the provided evaluators + n = getChildNodes(e, FUNCTION); + if (!n.isEmpty()) { + for (Element element : n) { + String func = getStringAttribute(element, NAME, null); + String clz = getStringAttribute(element, CLASS, null); + if (func == null || clz == null){ + throw new DataImportHandlerException( + SEVERE, + " must have a 'name' and 'class' attributes"); + } else { + functions.add(getAllAttributes(element)); + } + } + } + n = getChildNodes(e, DATA_SRC); + if (!n.isEmpty()) { + for (Element element : n) { + Properties p = new Properties(); + HashMap attrs = getAllAttributes(element); + for (Map.Entry entry : attrs.entrySet()) { + p.setProperty(entry.getKey(), entry.getValue()); + } + dataSources.put(p.getProperty("name"), p); + } + } + if(dataSources.get(null) == null){ + for (Properties properties : dataSources.values()) { + dataSources.put(null,properties); + break; + } + } + } + + private static String getStringAttribute(Element e, String name, String def) { + String r = e.getAttribute(name); + if (r == null || "".equals(r.trim())) + r = def; + return r; + } + + private static HashMap getAllAttributes(Element e) { + HashMap m = new HashMap(); + NamedNodeMap nnm = e.getAttributes(); + for (int i = 0; i < nnm.getLength(); i++) { + m.put(nnm.item(i).getNodeName(), nnm.item(i).getNodeValue()); + } + return m; + } + + public static String getTxt(Node elem, StringBuilder buffer) { + if (elem.getNodeType() != Node.CDATA_SECTION_NODE) { + NodeList childs = elem.getChildNodes(); + for (int i = 0; i < childs.getLength(); i++) { + Node child = childs.item(i); + short childType = child.getNodeType(); + if (childType != Node.COMMENT_NODE + && childType != Node.PROCESSING_INSTRUCTION_NODE) { + getTxt(child, buffer); + } + } + } else { + buffer.append(elem.getNodeValue()); + } + + return buffer.toString(); + } + + public static List getChildNodes(Element e, String byName) { + List result = new ArrayList(); + NodeList l = e.getChildNodes(); + for (int i = 0; i < l.getLength(); i++) { + if (e.equals(l.item(i).getParentNode()) + && byName.equals(l.item(i).getNodeName())) + result.add((Element) l.item(i)); + } + return result; + } + + public void clearCaches() { + for (Entity entity : document.entities) + entity.clearCache(); + } + + public static final String SCRIPT = "script"; + + public static final String NAME = "name"; + + public static final String PROCESSOR = "processor"; + + /** + * @deprecated use IMPORTER_NS_SHORT instead + */ + @Deprecated + public static final String IMPORTER_NS = "dataimporter"; + + public static final String IMPORTER_NS_SHORT = "dih"; + + public static final String ROOT_ENTITY = "rootEntity"; + + public static final String FUNCTION = "function"; + + public static final String CLASS = "class"; + + public static final String DATA_SRC = "dataSource"; + + private static final Set RESERVED_WORDS = new HashSet(); + static{ + RESERVED_WORDS.add(IMPORTER_NS); + RESERVED_WORDS.add(IMPORTER_NS_SHORT); + RESERVED_WORDS.add("request"); + RESERVED_WORDS.add("delta"); + RESERVED_WORDS.add("functions"); + RESERVED_WORDS.add("session"); + RESERVED_WORDS.add(SolrWriter.LAST_INDEX_KEY); + } + +} diff --git a/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/DataImportHandler.java b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/DataImportHandler.java new file mode 100644 index 00000000000..946206a5f7b --- /dev/null +++ b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/DataImportHandler.java @@ -0,0 +1,360 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import static org.apache.solr.handler.dataimport.DataImporter.IMPORT_CMD; +import org.apache.solr.common.SolrException; +import org.apache.solr.common.SolrInputDocument; +import org.apache.solr.common.params.CommonParams; +import org.apache.solr.common.params.ModifiableSolrParams; +import org.apache.solr.common.params.SolrParams; +import org.apache.solr.common.params.UpdateParams; +import org.apache.solr.common.util.ContentStreamBase; +import org.apache.solr.common.util.NamedList; +import org.apache.solr.common.util.ContentStream; +import org.apache.solr.core.SolrConfig; +import org.apache.solr.core.SolrCore; +import org.apache.solr.core.SolrResourceLoader; +import org.apache.solr.handler.RequestHandlerBase; +import org.apache.solr.handler.RequestHandlerUtils; +import org.apache.solr.response.RawResponseWriter; +import org.apache.solr.request.SolrQueryRequest; +import org.apache.solr.response.SolrQueryResponse; +import org.apache.solr.request.SolrRequestHandler; +import org.apache.solr.update.processor.UpdateRequestProcessor; +import org.apache.solr.update.processor.UpdateRequestProcessorChain; +import org.apache.solr.util.plugin.SolrCoreAware; + +import java.util.*; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + *

    + * Solr Request Handler for data import from databases and REST data sources. + *

    + *

    + * It is configured in solrconfig.xml + *

    + *

    + *

    + * Refer to http://wiki.apache.org/solr/DataImportHandler + * for more details. + *

    + *

    + * This API is experimental and subject to change + * + * @version $Id$ + * @since solr 1.3 + */ +public class DataImportHandler extends RequestHandlerBase implements + SolrCoreAware { + + private static final Logger LOG = LoggerFactory.getLogger(DataImportHandler.class); + + private DataImporter importer; + + private Map dataSources = new HashMap(); + + private List debugDocuments; + + private boolean debugEnabled = true; + + private String myName = "dataimport"; + + private Map coreScopeSession = new HashMap(); + + @Override + @SuppressWarnings("unchecked") + public void init(NamedList args) { + super.init(args); + } + + @SuppressWarnings("unchecked") + public void inform(SolrCore core) { + try { + //hack to get the name of this handler + for (Map.Entry e : core.getRequestHandlers().entrySet()) { + SolrRequestHandler handler = e.getValue(); + //this will not work if startup=lazy is set + if( this == handler) { + String name= e.getKey(); + if(name.startsWith("/")){ + myName = name.substring(1); + } + // some users may have '/' in the handler name. replace with '_' + myName = myName.replaceAll("/","_") ; + } + } + String debug = (String) initArgs.get(ENABLE_DEBUG); + if (debug != null && "no".equals(debug)) + debugEnabled = false; + NamedList defaults = (NamedList) initArgs.get("defaults"); + if (defaults != null) { + String configLoc = (String) defaults.get("config"); + if (configLoc != null && configLoc.length() != 0) { + processConfiguration(defaults); + + importer = new DataImporter(SolrWriter.getResourceAsString(core + .getResourceLoader().openResource(configLoc)), core, + dataSources, coreScopeSession); + } + } + } catch (Throwable e) { + SolrConfig.severeErrors.add(e); + LOG.error( DataImporter.MSG.LOAD_EXP, e); + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, + DataImporter.MSG.INVALID_CONFIG, e); + } + } + + @Override + @SuppressWarnings("unchecked") + public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) + throws Exception { + rsp.setHttpCaching(false); + SolrParams params = req.getParams(); + DataImporter.RequestParams requestParams = new DataImporter.RequestParams(getParamsMap(params)); + String command = requestParams.command; + Iterable streams = req.getContentStreams(); + if(streams != null){ + for (ContentStream stream : streams) { + requestParams.contentStream = stream; + break; + } + } + if (DataImporter.SHOW_CONF_CMD.equals(command)) { + // Modify incoming request params to add wt=raw + ModifiableSolrParams rawParams = new ModifiableSolrParams(req.getParams()); + rawParams.set(CommonParams.WT, "raw"); + req.setParams(rawParams); + String dataConfigFile = defaults.get("config"); + ContentStreamBase content = new ContentStreamBase.StringStream(SolrWriter + .getResourceAsString(req.getCore().getResourceLoader().openResource( + dataConfigFile))); + rsp.add(RawResponseWriter.CONTENT, content); + return; + } + + rsp.add("initArgs", initArgs); + String message = ""; + + if (command != null) + rsp.add("command", command); + + if (requestParams.debug && (importer == null || !importer.isBusy())) { + // Reload the data-config.xml + importer = null; + if (requestParams.dataConfig != null) { + try { + processConfiguration((NamedList) initArgs.get("defaults")); + importer = new DataImporter(requestParams.dataConfig, req.getCore() + , dataSources, coreScopeSession); + } catch (RuntimeException e) { + rsp.add("exception", DebugLogger.getStacktraceString(e)); + importer = null; + return; + } + } else { + inform(req.getCore()); + } + message = DataImporter.MSG.CONFIG_RELOADED; + } + + // If importer is still null + if (importer == null) { + rsp.add("status", DataImporter.MSG.NO_INIT); + return; + } + + if (command != null && DataImporter.ABORT_CMD.equals(command)) { + importer.runCmd(requestParams, null); + } else if (importer.isBusy()) { + message = DataImporter.MSG.CMD_RUNNING; + } else if (command != null) { + if (DataImporter.FULL_IMPORT_CMD.equals(command) + || DataImporter.DELTA_IMPORT_CMD.equals(command) || + IMPORT_CMD.equals(command)) { + + UpdateRequestProcessorChain processorChain = + req.getCore().getUpdateProcessingChain(params.get(UpdateParams.UPDATE_PROCESSOR)); + UpdateRequestProcessor processor = processorChain.createProcessor(req, rsp); + SolrResourceLoader loader = req.getCore().getResourceLoader(); + SolrWriter sw = getSolrWriter(processor, loader, requestParams); + + if (requestParams.debug) { + if (debugEnabled) { + // Synchronous request for the debug mode + importer.runCmd(requestParams, sw); + rsp.add("mode", "debug"); + rsp.add("documents", debugDocuments); + if (sw.debugLogger != null) + rsp.add("verbose-output", sw.debugLogger.output); + debugDocuments = null; + } else { + message = DataImporter.MSG.DEBUG_NOT_ENABLED; + } + } else { + // Asynchronous request for normal mode + if(requestParams.contentStream == null && !requestParams.syncMode){ + importer.runAsync(requestParams, sw); + } else { + importer.runCmd(requestParams, sw); + } + } + } else if (DataImporter.RELOAD_CONF_CMD.equals(command)) { + importer = null; + inform(req.getCore()); + message = DataImporter.MSG.CONFIG_RELOADED; + } + } + rsp.add("status", importer.isBusy() ? "busy" : "idle"); + rsp.add("importResponse", message); + rsp.add("statusMessages", importer.getStatusMessages()); + + RequestHandlerUtils.addExperimentalFormatWarning(rsp); + } + + private Map getParamsMap(SolrParams params) { + Iterator names = params.getParameterNamesIterator(); + Map result = new HashMap(); + while (names.hasNext()) { + String s = names.next(); + String[] val = params.getParams(s); + if (val == null || val.length < 1) + continue; + if (val.length == 1) + result.put(s, val[0]); + else + result.put(s, Arrays.asList(val)); + } + return result; + } + + @SuppressWarnings("unchecked") + private void processConfiguration(NamedList defaults) { + if (defaults == null) { + LOG.info("No configuration specified in solrconfig.xml for DataImportHandler"); + return; + } + + LOG.info("Processing configuration from solrconfig.xml: " + defaults); + + dataSources = new HashMap(); + + int position = 0; + + while (position < defaults.size()) { + if (defaults.getName(position) == null) + break; + + String name = defaults.getName(position); + if (name.equals("datasource")) { + NamedList dsConfig = (NamedList) defaults.getVal(position); + Properties props = new Properties(); + for (int i = 0; i < dsConfig.size(); i++) + props.put(dsConfig.getName(i), dsConfig.getVal(i)); + LOG.info("Adding properties to datasource: " + props); + dataSources.put((String) dsConfig.get("name"), props); + } + position++; + } + } + + private SolrWriter getSolrWriter(final UpdateRequestProcessor processor, + final SolrResourceLoader loader, final DataImporter.RequestParams requestParams) { + + return new SolrWriter(processor, loader.getConfigDir(), myName) { + + @Override + public boolean upload(SolrInputDocument document) { + try { + if (requestParams.debug) { + if (debugDocuments == null) + debugDocuments = new ArrayList(); + debugDocuments.add(document); + } + return super.upload(document); + } catch (RuntimeException e) { + LOG.error( "Exception while adding: " + document, e); + return false; + } + } + }; + } + + @Override + @SuppressWarnings("unchecked") + public NamedList getStatistics() { + if (importer == null) + return super.getStatistics(); + + DocBuilder.Statistics cumulative = importer.cumulativeStatistics; + NamedList result = new NamedList(); + + result.add("Status", importer.getStatus().toString()); + + if (importer.docBuilder != null) { + DocBuilder.Statistics running = importer.docBuilder.importStatistics; + result.add("Documents Processed", running.docCount); + result.add("Requests made to DataSource", running.queryCount); + result.add("Rows Fetched", running.rowsCount); + result.add("Documents Deleted", running.deletedDocCount); + result.add("Documents Skipped", running.skipDocCount); + } + + result.add(DataImporter.MSG.TOTAL_DOC_PROCESSED, cumulative.docCount); + result.add(DataImporter.MSG.TOTAL_QUERIES_EXECUTED, cumulative.queryCount); + result.add(DataImporter.MSG.TOTAL_ROWS_EXECUTED, cumulative.rowsCount); + result.add(DataImporter.MSG.TOTAL_DOCS_DELETED, cumulative.deletedDocCount); + result.add(DataImporter.MSG.TOTAL_DOCS_SKIPPED, cumulative.skipDocCount); + + NamedList requestStatistics = super.getStatistics(); + if (requestStatistics != null) { + for (int i = 0; i < requestStatistics.size(); i++) { + result.add(requestStatistics.getName(i), requestStatistics.getVal(i)); + } + } + + return result; + } + + // //////////////////////SolrInfoMBeans methods ////////////////////// + + @Override + public String getDescription() { + return DataImporter.MSG.JMX_DESC; + } + + @Override + public String getSourceId() { + return "$Id$"; + } + + @Override + public String getVersion() { + return "1.0"; + } + + @Override + public String getSource() { + return "$URL$"; + } + + public static final String ENABLE_DEBUG = "enableDebug"; +} diff --git a/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/DataImportHandlerException.java b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/DataImportHandlerException.java new file mode 100644 index 00000000000..0ffeb2c25c6 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/DataImportHandlerException.java @@ -0,0 +1,78 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.handler.dataimport; + +/** + *

    Exception class for all DataImportHandler exceptions

    + *

    + * This API is experimental and subject to change + *

    + * $Id$ + * + * @since solr 1.3 + */ +public class DataImportHandlerException extends RuntimeException { + private int errCode; + + public boolean debugged = false; + + public static final int SEVERE = 500, WARN = 400, SKIP = 300, SKIP_ROW =301; + + public DataImportHandlerException(int err) { + super(); + errCode = err; + } + + public DataImportHandlerException(int err, String message) { + super(message + (SolrWriter.getDocCount() == null ? "" : MSG + SolrWriter.getDocCount())); + errCode = err; + } + + public DataImportHandlerException(int err, String message, Throwable cause) { + super(message + (SolrWriter.getDocCount() == null ? "" : MSG + SolrWriter.getDocCount()), cause); + errCode = err; + } + + public DataImportHandlerException(int err, Throwable cause) { + super(cause); + errCode = err; + } + + public int getErrCode() { + return errCode; + } + + public static void wrapAndThrow(int err, Exception e) { + if (e instanceof DataImportHandlerException) { + throw (DataImportHandlerException) e; + } else { + throw new DataImportHandlerException(err, e); + } + } + + public static void wrapAndThrow(int err, Exception e, String msg) { + if (e instanceof DataImportHandlerException) { + throw (DataImportHandlerException) e; + } else { + throw new DataImportHandlerException(err, msg, e); + } + } + + + public static final String MSG = " Processing Document # "; +} diff --git a/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/DataImporter.java b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/DataImporter.java new file mode 100644 index 00000000000..0b33119e21f --- /dev/null +++ b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/DataImporter.java @@ -0,0 +1,583 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.handler.dataimport; + +import org.apache.solr.core.SolrConfig; +import org.apache.solr.core.SolrCore; +import org.apache.solr.schema.IndexSchema; +import org.apache.solr.schema.SchemaField; +import org.apache.solr.common.util.ContentStream; +import static org.apache.solr.handler.dataimport.DataImportHandlerException.wrapAndThrow; +import static org.apache.solr.handler.dataimport.DataImportHandlerException.SEVERE; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.w3c.dom.Document; +import org.w3c.dom.Element; +import org.w3c.dom.NodeList; +import org.xml.sax.InputSource; + +import javax.xml.parsers.DocumentBuilder; +import javax.xml.parsers.DocumentBuilderFactory; +import java.io.StringReader; +import java.text.SimpleDateFormat; +import java.util.*; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.locks.ReentrantLock; +import java.util.concurrent.ConcurrentHashMap; + +/** + *

    Stores all configuration information for pulling and indexing data.

    + *

    + * This API is experimental and subject to change + * + * @version $Id$ + * @since solr 1.3 + */ +public class DataImporter { + + public enum Status { + IDLE, RUNNING_FULL_DUMP, RUNNING_DELTA_DUMP, JOB_FAILED + } + + private static final Logger LOG = LoggerFactory.getLogger(DataImporter.class); + + private Status status = Status.IDLE; + + private DataConfig config; + + private Date indexStartTime; + + private Properties store = new Properties(); + + private Map dataSourceProps = new HashMap(); + + private IndexSchema schema; + + public DocBuilder docBuilder; + + public DocBuilder.Statistics cumulativeStatistics = new DocBuilder.Statistics(); + + private SolrCore core; + + private ReentrantLock importLock = new ReentrantLock(); + + private final Map coreScopeSession; + + /** + * Only for testing purposes + */ + DataImporter() { + coreScopeSession = new ConcurrentHashMap(); + } + + DataImporter(String dataConfig, SolrCore core, Map ds, Map session) { + if (dataConfig == null) + throw new DataImportHandlerException(SEVERE, + "Configuration not found"); + this.core = core; + this.schema = core.getSchema(); + dataSourceProps = ds; + if (session == null) + session = new HashMap(); + coreScopeSession = session; + loadDataConfig(dataConfig); + + for (Map.Entry entry : schema.getFields().entrySet()) { + config.lowerNameVsSchemaField.put(entry.getKey().toLowerCase(), entry.getValue()); + } + + for (DataConfig.Entity e : config.document.entities) { + Map fields = new HashMap(); + initEntity(e, fields, false); + verifyWithSchema(fields); + identifyPk(e); + } + } + + private void verifyWithSchema(Map fields) { + Map schemaFields = schema.getFields(); + for (Map.Entry entry : schemaFields.entrySet()) { + SchemaField sf = entry.getValue(); + if (!fields.containsKey(sf.getName())) { + if (sf.isRequired()) { + LOG + .info(sf.getName() + + " is a required field in SolrSchema . But not found in DataConfig"); + } + } + } + for (Map.Entry entry : fields.entrySet()) { + DataConfig.Field fld = entry.getValue(); + SchemaField field = schema.getFieldOrNull(fld.getName()); + if (field == null) { + field = config.lowerNameVsSchemaField.get(fld.getName().toLowerCase()); + if (field == null) { + LOG.info("The field :" + fld.getName() + " present in DataConfig does not have a counterpart in Solr Schema"); + } + } + } + + } + + /** + * Used by tests + */ + void loadAndInit(String configStr) { + loadDataConfig(configStr); + Map fields = new HashMap(); + for (DataConfig.Entity entity : config.document.entities) { + initEntity(entity, fields, false); + } + } + + private void identifyPk(DataConfig.Entity entity) { + SchemaField uniqueKey = schema.getUniqueKeyField(); + String schemaPk = ""; + if (uniqueKey != null) + schemaPk = uniqueKey.getName(); + else return; + //if no fields are mentioned . solr uniqueKey is same as dih 'pk' + entity.pkMappingFromSchema = schemaPk; + for (DataConfig.Field field : entity.fields) { + if(field.getName().equals(schemaPk)) { + entity.pkMappingFromSchema = field.column; + //get the corresponding column mapping for the solr uniqueKey + // But if there are multiple columns mapping to the solr uniqueKey, it will fail + // so , in one off cases we may need pk + break; + } + } + + } + + private void loadDataConfig(String configFile) { + + try { + DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); + try { + dbf.setXIncludeAware(true); + dbf.setNamespaceAware(true); + } catch( UnsupportedOperationException e ) { + LOG.warn( "XML parser doesn't support XInclude option" ); + } + DocumentBuilder builder = dbf.newDocumentBuilder(); + Document document = builder.parse(new InputSource(new StringReader( + configFile))); + + config = new DataConfig(); + NodeList elems = document.getElementsByTagName("dataConfig"); + if(elems == null || elems.getLength() == 0) { + throw new DataImportHandlerException(SEVERE, "the root node '' is missing"); + } + config.readFromXml((Element) elems.item(0)); + LOG.info("Data Configuration loaded successfully"); + } catch (Exception e) { + SolrConfig.severeErrors.add(e); + throw new DataImportHandlerException(SEVERE, + "Exception occurred while initializing context", e); + } + } + + private void initEntity(DataConfig.Entity e, + Map fields, boolean docRootFound) { + e.allAttributes.put(DATA_SRC, e.dataSource); + + if (!docRootFound && !"false".equals(e.docRoot)) { + // if in this chain no document root is found() + e.isDocRoot = true; + } + if (e.allAttributes.get("threads") != null) { + if(docRootFound) throw new DataImportHandlerException(DataImportHandlerException.SEVERE, "'threads' not allowed below rootEntity "); + config.isMultiThreaded = true; + } + + if (e.fields != null) { + for (DataConfig.Field f : e.fields) { + if (schema != null) { + if(f.name != null && f.name.contains("${")){ + f.dynamicName = true; + continue; + } + SchemaField schemaField = schema.getFieldOrNull(f.getName()); + if (schemaField == null) { + schemaField = config.lowerNameVsSchemaField.get(f.getName().toLowerCase()); + if (schemaField != null) f.name = schemaField.getName(); + } + if (schemaField != null) { + f.multiValued = schemaField.multiValued(); + f.allAttributes.put(MULTI_VALUED, Boolean.toString(schemaField + .multiValued())); + f.allAttributes.put(TYPE, schemaField.getType().getTypeName()); + f.allAttributes.put("indexed", Boolean.toString(schemaField.indexed())); + f.allAttributes.put("stored", Boolean.toString(schemaField.stored())); + f.allAttributes.put("defaultValue", schemaField.getDefaultValue()); + } else { + f.toWrite = false; + } + } + fields.put(f.getName(), f); + f.entity = e; + f.allAttributes.put("boost", f.boost.toString()); + f.allAttributes.put("toWrite", Boolean.toString(f.toWrite)); + e.allFieldsList.add(Collections.unmodifiableMap(f.allAttributes)); + } + } + e.allFieldsList = Collections.unmodifiableList(e.allFieldsList); + e.allAttributes = Collections.unmodifiableMap(e.allAttributes); + + if (e.entities == null) + return; + for (DataConfig.Entity e1 : e.entities) { + e1.parentEntity = e; + initEntity(e1, fields, e.isDocRoot || docRootFound); + } + + } + + DataConfig getConfig() { + return config; + } + + Date getIndexStartTime() { + return indexStartTime; + } + + void setIndexStartTime(Date indextStartTime) { + this.indexStartTime = indextStartTime; + } + + void store(Object key, Object value) { + store.put(key, value); + } + + Object retrieve(Object key) { + return store.get(key); + } + + DataSource getDataSourceInstance(DataConfig.Entity key, String name, Context ctx) { + Properties p = dataSourceProps.get(name); + if (p == null) + p = config.dataSources.get(name); + if (p == null) + p = dataSourceProps.get(null);// for default data source + if (p == null) + p = config.dataSources.get(null); + if (p == null) + throw new DataImportHandlerException(SEVERE, + "No dataSource :" + name + " available for entity :" + + key.name); + String type = p.getProperty(TYPE); + DataSource dataSrc = null; + if (type == null) { + dataSrc = new JdbcDataSource(); + } else { + try { + dataSrc = (DataSource) DocBuilder.loadClass(type, getCore()).newInstance(); + } catch (Exception e) { + wrapAndThrow(SEVERE, e, "Invalid type for data source: " + type); + } + } + try { + Properties copyProps = new Properties(); + copyProps.putAll(p); + Map map = ctx.getRequestParameters(); + if (map.containsKey("rows")) { + int rows = Integer.parseInt((String) map.get("rows")); + if (map.containsKey("start")) { + rows += Integer.parseInt((String) map.get("start")); + } + copyProps.setProperty("maxRows", String.valueOf(rows)); + } + dataSrc.init(ctx, copyProps); + } catch (Exception e) { + wrapAndThrow(SEVERE, e, "Failed to initialize DataSource: " + key.dataSource); + } + return dataSrc; + } + + public Status getStatus() { + return status; + } + + public void setStatus(Status status) { + this.status = status; + } + + public boolean isBusy() { + return importLock.isLocked(); + } + + public void doFullImport(SolrWriter writer, RequestParams requestParams) { + LOG.info("Starting Full Import"); + setStatus(Status.RUNNING_FULL_DUMP); + + setIndexStartTime(new Date()); + + try { + docBuilder = new DocBuilder(this, writer, requestParams); + docBuilder.execute(); + if (!requestParams.debug) + cumulativeStatistics.add(docBuilder.importStatistics); + } catch (Throwable t) { + LOG.error("Full Import failed", t); + docBuilder.rollback(); + } finally { + setStatus(Status.IDLE); + config.clearCaches(); + DocBuilder.INSTANCE.set(null); + } + + } + + public void doDeltaImport(SolrWriter writer, RequestParams requestParams) { + LOG.info("Starting Delta Import"); + setStatus(Status.RUNNING_DELTA_DUMP); + + try { + setIndexStartTime(new Date()); + docBuilder = new DocBuilder(this, writer, requestParams); + docBuilder.execute(); + if (!requestParams.debug) + cumulativeStatistics.add(docBuilder.importStatistics); + } catch (Throwable t) { + LOG.error("Delta Import Failed", t); + docBuilder.rollback(); + } finally { + setStatus(Status.IDLE); + config.clearCaches(); + DocBuilder.INSTANCE.set(null); + } + + } + + public void runAsync(final RequestParams reqParams, final SolrWriter sw) { + new Thread() { + @Override + public void run() { + runCmd(reqParams, sw); + } + }.start(); + } + + void runCmd(RequestParams reqParams, SolrWriter sw) { + String command = reqParams.command; + if (command.equals(ABORT_CMD)) { + if (docBuilder != null) { + docBuilder.abort(); + } + return; + } + if (!importLock.tryLock()){ + LOG.warn("Import command failed . another import is running"); + return; + } + try { + if (FULL_IMPORT_CMD.equals(command) || IMPORT_CMD.equals(command)) { + doFullImport(sw, reqParams); + } else if (command.equals(DELTA_IMPORT_CMD)) { + doDeltaImport(sw, reqParams); + } + } finally { + importLock.unlock(); + } + } + + @SuppressWarnings("unchecked") + Map getStatusMessages() { + //this map object is a Collections.synchronizedMap(new LinkedHashMap()). if we + // synchronize on the object it must be safe to iterate through the map + Map statusMessages = (Map) retrieve(STATUS_MSGS); + Map result = new LinkedHashMap(); + if (statusMessages != null) { + synchronized (statusMessages) { + for (Object o : statusMessages.entrySet()) { + Map.Entry e = (Map.Entry) o; + //the toString is taken because some of the Objects create the data lazily when toString() is called + result.put((String) e.getKey(), e.getValue().toString()); + } + } + } + return result; + + } + + DocBuilder getDocBuilder() { + return docBuilder; + } + + static final ThreadLocal QUERY_COUNT = new ThreadLocal() { + protected AtomicLong initialValue() { + return new AtomicLong(); + } + }; + + static final ThreadLocal DATE_TIME_FORMAT = new ThreadLocal() { + @Override + protected SimpleDateFormat initialValue() { + return new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); + } + }; + + static final class MSG { + public static final String NO_CONFIG_FOUND = "Configuration not found"; + + public static final String NO_INIT = "DataImportHandler started. Not Initialized. No commands can be run"; + + public static final String INVALID_CONFIG = "FATAL: Could not create importer. DataImporter config invalid"; + + public static final String LOAD_EXP = "Exception while loading DataImporter"; + + public static final String JMX_DESC = "Manage data import from databases to Solr"; + + public static final String CMD_RUNNING = "A command is still running..."; + + public static final String DEBUG_NOT_ENABLED = "Debug not enabled. Add a tag true in solrconfig.xml"; + + public static final String CONFIG_RELOADED = "Configuration Re-loaded sucessfully"; + + public static final String TOTAL_DOC_PROCESSED = "Total Documents Processed"; + + public static final String TOTAL_FAILED_DOCS = "Total Documents Failed"; + + public static final String TOTAL_QUERIES_EXECUTED = "Total Requests made to DataSource"; + + public static final String TOTAL_ROWS_EXECUTED = "Total Rows Fetched"; + + public static final String TOTAL_DOCS_DELETED = "Total Documents Deleted"; + + public static final String TOTAL_DOCS_SKIPPED = "Total Documents Skipped"; + } + + static final class RequestParams { + public String command = null; + + public boolean debug = false; + + public boolean verbose = false; + + public boolean syncMode = false; + + public boolean commit = true; + + public boolean optimize = true; + + public int start = 0; + + public long rows = Integer.MAX_VALUE; + + public boolean clean = true; + + public List entities; + + public Map requestParams; + + public String dataConfig; + + public ContentStream contentStream; + + public RequestParams() { + } + + public RequestParams(Map requestParams) { + if (requestParams.containsKey("command")) + command = (String) requestParams.get("command"); + + if ("on".equals(requestParams.get("debug"))) { + debug = true; + rows = 10; + // Set default values suitable for debug mode + commit = false; + clean = false; + verbose = "true".equals(requestParams.get("verbose")) + || "on".equals(requestParams.get("verbose")); + } + syncMode = "true".equals(requestParams.get("synchronous")); + if (DELTA_IMPORT_CMD.equals(command) || IMPORT_CMD.equals(command)) { + clean = false; + } + if (requestParams.containsKey("commit")) + commit = Boolean.parseBoolean((String) requestParams.get("commit")); + if (requestParams.containsKey("start")) + start = Integer.parseInt((String) requestParams.get("start")); + if (requestParams.containsKey("rows")) + rows = Integer.parseInt((String) requestParams.get("rows")); + if (requestParams.containsKey("clean")) + clean = Boolean.parseBoolean((String) requestParams.get("clean")); + if (requestParams.containsKey("optimize")) { + optimize = Boolean.parseBoolean((String) requestParams.get("optimize")); + if (optimize) + commit = true; + } + + Object o = requestParams.get("entity"); + + if (o != null && o instanceof String) { + entities = new ArrayList(); + entities.add((String) o); + } else if (o != null && o instanceof List) { + entities = (List) requestParams.get("entity"); + } + + dataConfig = (String) requestParams.get("dataConfig"); + if (dataConfig != null && dataConfig.trim().length() == 0) { + // Empty data-config param is not valid, change it to null + dataConfig = null; + } + + this.requestParams = requestParams; + } + } + + IndexSchema getSchema() { + return schema; + } + + Map getCoreScopeSession() { + return coreScopeSession; + } + + SolrCore getCore() { + return core; + } + + public static final String COLUMN = "column"; + + public static final String TYPE = "type"; + + public static final String DATA_SRC = "dataSource"; + + public static final String MULTI_VALUED = "multiValued"; + + public static final String NAME = "name"; + + public static final String STATUS_MSGS = "status-messages"; + + public static final String FULL_IMPORT_CMD = "full-import"; + + public static final String IMPORT_CMD = "import"; + + public static final String DELTA_IMPORT_CMD = "delta-import"; + + public static final String ABORT_CMD = "abort"; + + public static final String DEBUG_MODE = "debug"; + + public static final String RELOAD_CONF_CMD = "reload-config"; + + public static final String SHOW_CONF_CMD = "show-config"; +} diff --git a/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/DataSource.java b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/DataSource.java new file mode 100644 index 00000000000..0226f504928 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/DataSource.java @@ -0,0 +1,72 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.handler.dataimport; + +import java.util.Properties; + +/** + *

    + * Provides data from a source with a given query. + *

    + *

    + *

    + * Implementation of this abstract class must provide a default no-arg constructor + *

    + *

    + *

    + * Refer to http://wiki.apache.org/solr/DataImportHandler + * for more details. + *

    + *

    + * This API is experimental and may change in the future. + * + * @version $Id$ + * @since solr 1.3 + */ +public abstract class DataSource { + + /** + * Initializes the DataSource with the Context and + * initialization properties. + *

    + * This is invoked by the DataImporter after creating an + * instance of this class. + * + * @param context + * @param initProps + */ + public abstract void init(Context context, Properties initProps); + + /** + * Get records for the given query.The return type depends on the + * implementation . + * + * @param query The query string. It can be a SQL for JdbcDataSource or a URL + * for HttpDataSource or a file location for FileDataSource or a custom + * format for your own custom DataSource. + * @return Depends on the implementation. For instance JdbcDataSource returns + * an Iterator> + */ + public abstract T getData(String query); + + /** + * Cleans up resources of this DataSource after use. + */ + public abstract void close(); +} diff --git a/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/DateFormatTransformer.java b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/DateFormatTransformer.java new file mode 100644 index 00000000000..aaccadc22bc --- /dev/null +++ b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/DateFormatTransformer.java @@ -0,0 +1,100 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.handler.dataimport; + +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.*; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + *

    + * Transformer instance which creates Date instances out of Strings. + *

    + *

    + *

    + * Refer to http://wiki.apache.org/solr/DataImportHandler + * for more details. + *

    + *

    + * This API is experimental and subject to change + * + * @version $Id$ + * @since solr 1.3 + */ +public class DateFormatTransformer extends Transformer { + private Map fmtCache = new HashMap(); + private static final Logger LOG = LoggerFactory + .getLogger(DateFormatTransformer.class); + + @SuppressWarnings("unchecked") + public Object transformRow(Map aRow, Context context) { + + for (Map map : context.getAllEntityFields()) { + Locale locale = Locale.getDefault(); + String customLocale = map.get("locale"); + if(customLocale != null){ + locale = new Locale(customLocale); + } + + String fmt = map.get(DATE_TIME_FMT); + if (fmt == null) + continue; + String column = map.get(DataImporter.COLUMN); + String srcCol = map.get(RegexTransformer.SRC_COL_NAME); + if (srcCol == null) + srcCol = column; + try { + Object o = aRow.get(srcCol); + if (o instanceof List) { + List inputs = (List) o; + List results = new ArrayList(); + for (Object input : inputs) { + results.add(process(input, fmt, locale)); + } + aRow.put(column, results); + } else { + if (o != null) { + aRow.put(column, process(o, fmt, locale)); + } + } + } catch (ParseException e) { + LOG.warn("Could not parse a Date field ", e); + } + } + return aRow; + } + + private Date process(Object value, String format, Locale locale) throws ParseException { + if (value == null) return null; + String strVal = value.toString().trim(); + if (strVal.length() == 0) + return null; + SimpleDateFormat fmt = fmtCache.get(format); + if (fmt == null) { + fmt = new SimpleDateFormat(format, locale); + fmtCache.put(format, fmt); + } + return fmt.parse(strVal); + } + + public static final String DATE_TIME_FMT = "dateTimeFormat"; +} diff --git a/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/DebugLogger.java b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/DebugLogger.java new file mode 100644 index 00000000000..77c1ea7669e --- /dev/null +++ b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/DebugLogger.java @@ -0,0 +1,281 @@ +package org.apache.solr.handler.dataimport; +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import org.apache.solr.common.util.NamedList; + +import java.io.PrintWriter; +import java.io.StringWriter; +import java.text.MessageFormat; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import java.util.Stack; + +/** + *

    + * Implements most of the interactive development functionality + *

    + *

    + *

    + * Refer to http://wiki.apache.org/solr/DataImportHandler + * for more details. + *

    + *

    + * This API is experimental and subject to change + * + * @version $Id$ + * @since solr 1.3 + */ +class DebugLogger { + private Stack debugStack; + + NamedList output; + private final SolrWriter writer; + + private static final String LINE = "---------------------------------------------"; + + private MessageFormat fmt = new MessageFormat( + "----------- row #{0}-------------"); + + boolean enabled = true; + + public DebugLogger(SolrWriter solrWriter) { + writer = solrWriter; + output = new NamedList(); + debugStack = new Stack() { + + public DebugInfo pop() { + if (size() == 1) + throw new DataImportHandlerException( + DataImportHandlerException.SEVERE, "Stack is becoming empty"); + return super.pop(); + } + }; + debugStack.push(new DebugInfo(null, -1, null)); + output = debugStack.peek().lst; + } + + private DebugInfo peekStack() { + return debugStack.isEmpty() ? null : debugStack.peek(); + } + + public void log(int event, String name, Object row) { + if (event == SolrWriter.DISABLE_LOGGING) { + enabled = false; + return; + } else if (event == SolrWriter.ENABLE_LOGGING) { + enabled = true; + return; + } + + if (!enabled && event != SolrWriter.START_ENTITY + && event != SolrWriter.END_ENTITY) { + return; + } + + if (event == SolrWriter.START_DOC) { + debugStack.push(new DebugInfo(null, SolrWriter.START_DOC, peekStack())); + } else if (SolrWriter.START_ENTITY == event) { + debugStack + .push(new DebugInfo(name, SolrWriter.START_ENTITY, peekStack())); + } else if (SolrWriter.ENTITY_OUT == event + || SolrWriter.PRE_TRANSFORMER_ROW == event) { + if (debugStack.peek().type == SolrWriter.START_ENTITY + || debugStack.peek().type == SolrWriter.START_DOC) { + debugStack.peek().lst.add(null, fmt.format(new Object[]{++debugStack + .peek().rowCount})); + addToNamedList(debugStack.peek().lst, row); + debugStack.peek().lst.add(null, LINE); + } + } else if (event == SolrWriter.ROW_END) { + popAllTransformers(); + } else if (SolrWriter.END_ENTITY == event) { + while (debugStack.pop().type != SolrWriter.START_ENTITY) + ; + } else if (SolrWriter.END_DOC == event) { + while (debugStack.pop().type != SolrWriter.START_DOC) + ; + } else if (event == SolrWriter.TRANSFORMER_EXCEPTION) { + debugStack.push(new DebugInfo(name, event, peekStack())); + debugStack.peek().lst.add("EXCEPTION", + getStacktraceString((Exception) row)); + } else if (SolrWriter.TRANSFORMED_ROW == event) { + debugStack.push(new DebugInfo(name, event, peekStack())); + debugStack.peek().lst.add(null, LINE); + addToNamedList(debugStack.peek().lst, row); + debugStack.peek().lst.add(null, LINE); + if (row instanceof DataImportHandlerException) { + DataImportHandlerException dataImportHandlerException = (DataImportHandlerException) row; + dataImportHandlerException.debugged = true; + } + } else if (SolrWriter.ENTITY_META == event) { + popAllTransformers(); + debugStack.peek().lst.add(name, row); + } else if (SolrWriter.ENTITY_EXCEPTION == event) { + if (row instanceof DataImportHandlerException) { + DataImportHandlerException dihe = (DataImportHandlerException) row; + if (dihe.debugged) + return; + dihe.debugged = true; + } + + popAllTransformers(); + debugStack.peek().lst.add("EXCEPTION", + getStacktraceString((Exception) row)); + } + } + + private void popAllTransformers() { + while (true) { + int type = debugStack.peek().type; + if (type == SolrWriter.START_DOC || type == SolrWriter.START_ENTITY) + break; + debugStack.pop(); + } + } + + private void addToNamedList(NamedList nl, Object row) { + if (row instanceof List) { + List list = (List) row; + NamedList l = new NamedList(); + nl.add(null, l); + for (Object o : list) { + Map map = (Map) o; + for (Map.Entry entry : map.entrySet()) + nl.add(entry.getKey(), entry.getValue()); + } + } else if (row instanceof Map) { + Map map = (Map) row; + for (Map.Entry entry : map.entrySet()) + nl.add(entry.getKey(), entry.getValue()); + } + } + + DataSource wrapDs(final DataSource ds) { + return new DataSource() { + public void init(Context context, Properties initProps) { + ds.init(context, initProps); + } + + public void close() { + ds.close(); + } + + public Object getData(String query) { + writer.log(SolrWriter.ENTITY_META, "query", query); + long start = System.currentTimeMillis(); + try { + return ds.getData(query); + } catch (DataImportHandlerException de) { + writer.log(SolrWriter.ENTITY_EXCEPTION, + null, de); + throw de; + } catch (Exception e) { + writer.log(SolrWriter.ENTITY_EXCEPTION, + null, e); + DataImportHandlerException de = new DataImportHandlerException( + DataImportHandlerException.SEVERE, "", e); + de.debugged = true; + throw de; + } finally { + writer.log(SolrWriter.ENTITY_META, "time-taken", DocBuilder + .getTimeElapsedSince(start)); + } + } + }; + } + + Transformer wrapTransformer(final Transformer t) { + return new Transformer() { + public Object transformRow(Map row, Context context) { + writer.log(SolrWriter.PRE_TRANSFORMER_ROW, null, row); + String tName = getTransformerName(t); + Object result = null; + try { + result = t.transformRow(row, context); + writer.log(SolrWriter.TRANSFORMED_ROW, tName, result); + } catch (DataImportHandlerException de) { + writer.log(SolrWriter.TRANSFORMER_EXCEPTION, tName, de); + de.debugged = true; + throw de; + } catch (Exception e) { + writer.log(SolrWriter.TRANSFORMER_EXCEPTION, tName, e); + DataImportHandlerException de = new DataImportHandlerException(DataImportHandlerException.SEVERE, "", e); + de.debugged = true; + throw de; + } + return result; + } + }; + } + + public static String getStacktraceString(Exception e) { + StringWriter sw = new StringWriter(); + e.printStackTrace(new PrintWriter(sw)); + return sw.toString(); + } + + static String getTransformerName(Transformer t) { + Class transClass = t.getClass(); + if (t instanceof EntityProcessorWrapper.ReflectionTransformer) { + return ((EntityProcessorWrapper.ReflectionTransformer) t).trans; + } + if (t instanceof ScriptTransformer) { + ScriptTransformer scriptTransformer = (ScriptTransformer) t; + return "script:" + scriptTransformer.getFunctionName(); + } + if (transClass.getPackage().equals(DebugLogger.class.getPackage())) { + return transClass.getSimpleName(); + } else { + return transClass.getName(); + } + } + + private static class DebugInfo { + String name; + + int tCount, rowCount; + + NamedList lst; + + int type; + + DebugInfo parent; + + public DebugInfo(String name, int type, DebugInfo parent) { + this.name = name; + this.type = type; + this.parent = parent; + lst = new NamedList(); + if (parent != null) { + String displayName = null; + if (type == SolrWriter.START_ENTITY) { + displayName = "entity:" + name; + } else if (type == SolrWriter.TRANSFORMED_ROW + || type == SolrWriter.TRANSFORMER_EXCEPTION) { + displayName = "transformer:" + name; + } else if (type == SolrWriter.START_DOC) { + this.name = displayName = "document#" + SolrWriter.getDocCount(); + } + parent.lst.add(displayName, lst); + } + } + } + +} diff --git a/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/DocBuilder.java b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/DocBuilder.java new file mode 100644 index 00000000000..ab49f6b7ad1 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/DocBuilder.java @@ -0,0 +1,1019 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.handler.dataimport; + +import org.apache.solr.common.SolrInputDocument; +import org.apache.solr.core.SolrCore; +import static org.apache.solr.handler.dataimport.SolrWriter.LAST_INDEX_KEY; +import static org.apache.solr.handler.dataimport.DataImportHandlerException.*; +import static org.apache.solr.handler.dataimport.DataImportHandlerException.wrapAndThrow; +import org.apache.solr.schema.SchemaField; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.*; + +/** + *

    DocBuilder is responsible for creating Solr documents out of the given configuration. It also maintains + * statistics information. It depends on the EntityProcessor implementations to fetch data.

    + *

    + * This API is experimental and subject to change + * + * @version $Id$ + * @since solr 1.3 + */ +public class DocBuilder { + + private static final Logger LOG = LoggerFactory.getLogger(DocBuilder.class); + + private static final Date EPOCH = new Date(0); + + DataImporter dataImporter; + + private DataConfig.Document document; + + private DataConfig.Entity root; + + @SuppressWarnings("unchecked") + private Map statusMessages = Collections.synchronizedMap(new LinkedHashMap()); + + public Statistics importStatistics = new Statistics(); + + SolrWriter writer; + + DataImporter.RequestParams requestParameters; + + boolean verboseDebug = false; + + Map session = new ConcurrentHashMap(); + + static final ThreadLocal INSTANCE = new ThreadLocal(); + Map functionsNamespace; + private Properties persistedProperties; + + public DocBuilder(DataImporter dataImporter, SolrWriter writer, DataImporter.RequestParams reqParams) { + INSTANCE.set(this); + this.dataImporter = dataImporter; + this.writer = writer; + DataImporter.QUERY_COUNT.set(importStatistics.queryCount); + requestParameters = reqParams; + verboseDebug = requestParameters.debug && requestParameters.verbose; + functionsNamespace = EvaluatorBag.getFunctionsNamespace(this.dataImporter.getConfig().functions, this); + persistedProperties = writer.readIndexerProperties(); + } + + public VariableResolverImpl getVariableResolver() { + try { + VariableResolverImpl resolver = null; + if(dataImporter != null && dataImporter.getCore() != null){ + resolver = new VariableResolverImpl(dataImporter.getCore().getResourceLoader().getCoreProperties()); + } else resolver = new VariableResolverImpl(); + Map indexerNamespace = new HashMap(); + if (persistedProperties.getProperty(LAST_INDEX_TIME) != null) { + indexerNamespace.put(LAST_INDEX_TIME, persistedProperties.getProperty(LAST_INDEX_TIME)); + } else { + // set epoch + indexerNamespace.put(LAST_INDEX_TIME, DataImporter.DATE_TIME_FORMAT.get().format(EPOCH)); + } + indexerNamespace.put(INDEX_START_TIME, dataImporter.getIndexStartTime()); + indexerNamespace.put("request", requestParameters.requestParams); + indexerNamespace.put("functions", functionsNamespace); + for (DataConfig.Entity entity : dataImporter.getConfig().document.entities) { + String key = entity.name + "." + SolrWriter.LAST_INDEX_KEY; + String lastIndex = persistedProperties.getProperty(key); + if (lastIndex != null) { + indexerNamespace.put(key, lastIndex); + } else { + indexerNamespace.put(key, DataImporter.DATE_TIME_FORMAT.get().format(EPOCH)); + } + } + resolver.addNamespace(DataConfig.IMPORTER_NS_SHORT, indexerNamespace); + resolver.addNamespace(DataConfig.IMPORTER_NS, indexerNamespace); + return resolver; + } catch (Exception e) { + wrapAndThrow(SEVERE, e); + // unreachable statement + return null; + } + } + + private void invokeEventListener(String className) { + try { + EventListener listener = (EventListener) loadClass(className, dataImporter.getCore()).newInstance(); + notifyListener(listener); + } catch (Exception e) { + wrapAndThrow(SEVERE, e, "Unable to load class : " + className); + } + } + + private void notifyListener(EventListener listener) { + String currentProcess; + if (dataImporter.getStatus() == DataImporter.Status.RUNNING_DELTA_DUMP) { + currentProcess = Context.DELTA_DUMP; + } else { + currentProcess = Context.FULL_DUMP; + } + listener.onEvent(new ContextImpl(null, getVariableResolver(), null, currentProcess, session, null, this)); + } + + @SuppressWarnings("unchecked") + public void execute() { + dataImporter.store(DataImporter.STATUS_MSGS, statusMessages); + document = dataImporter.getConfig().document; + final AtomicLong startTime = new AtomicLong(System.currentTimeMillis()); + statusMessages.put(TIME_ELAPSED, new Object() { + public String toString() { + return getTimeElapsedSince(startTime.get()); + } + }); + + statusMessages.put(DataImporter.MSG.TOTAL_QUERIES_EXECUTED, + importStatistics.queryCount); + statusMessages.put(DataImporter.MSG.TOTAL_ROWS_EXECUTED, + importStatistics.rowsCount); + statusMessages.put(DataImporter.MSG.TOTAL_DOC_PROCESSED, + importStatistics.docCount); + statusMessages.put(DataImporter.MSG.TOTAL_DOCS_SKIPPED, + importStatistics.skipDocCount); + + List entities = requestParameters.entities; + + // Trigger onImportStart + if (document.onImportStart != null) { + invokeEventListener(document.onImportStart); + } + AtomicBoolean fullCleanDone = new AtomicBoolean(false); + //we must not do a delete of *:* multiple times if there are multiple root entities to be run + Properties lastIndexTimeProps = new Properties(); + lastIndexTimeProps.setProperty(LAST_INDEX_KEY, + DataImporter.DATE_TIME_FORMAT.get().format(dataImporter.getIndexStartTime())); + for (DataConfig.Entity e : document.entities) { + if (entities != null && !entities.contains(e.name)) + continue; + lastIndexTimeProps.setProperty(e.name + "." + LAST_INDEX_KEY, + DataImporter.DATE_TIME_FORMAT.get().format(new Date())); + root = e; + String delQuery = e.allAttributes.get("preImportDeleteQuery"); + if (dataImporter.getStatus() == DataImporter.Status.RUNNING_DELTA_DUMP) { + cleanByQuery(delQuery, fullCleanDone); + doDelta(); + delQuery = e.allAttributes.get("postImportDeleteQuery"); + if (delQuery != null) { + fullCleanDone.set(false); + cleanByQuery(delQuery, fullCleanDone); + } + } else { + cleanByQuery(delQuery, fullCleanDone); + doFullDump(); + delQuery = e.allAttributes.get("postImportDeleteQuery"); + if (delQuery != null) { + fullCleanDone.set(false); + cleanByQuery(delQuery, fullCleanDone); + } + } + statusMessages.remove(DataImporter.MSG.TOTAL_DOC_PROCESSED); + } + + if (stop.get()) { + // Dont commit if aborted using command=abort + statusMessages.put("Aborted", DataImporter.DATE_TIME_FORMAT.get().format(new Date())); + rollback(); + } else { + // Do not commit unnecessarily if this is a delta-import and no documents were created or deleted + if (!requestParameters.clean) { + if (importStatistics.docCount.get() > 0 || importStatistics.deletedDocCount.get() > 0) { + finish(lastIndexTimeProps); + } + } else { + // Finished operation normally, commit now + finish(lastIndexTimeProps); + } + if (document.onImportEnd != null) { + invokeEventListener(document.onImportEnd); + } + } + + statusMessages.remove(TIME_ELAPSED); + statusMessages.put(DataImporter.MSG.TOTAL_DOC_PROCESSED, ""+ importStatistics.docCount.get()); + if(importStatistics.failedDocCount.get() > 0) + statusMessages.put(DataImporter.MSG.TOTAL_FAILED_DOCS, ""+ importStatistics.failedDocCount.get()); + + statusMessages.put("Time taken ", getTimeElapsedSince(startTime.get())); + LOG.info("Time taken = " + getTimeElapsedSince(startTime.get())); + } + + @SuppressWarnings("unchecked") + private void finish(Properties lastIndexTimeProps) { + LOG.info("Import completed successfully"); + statusMessages.put("", "Indexing completed. Added/Updated: " + + importStatistics.docCount + " documents. Deleted " + + importStatistics.deletedDocCount + " documents."); + if(requestParameters.commit) { + writer.commit(requestParameters.optimize); + addStatusMessage("Committed"); + if (requestParameters.optimize) + addStatusMessage("Optimized"); + } + try { + writer.persist(lastIndexTimeProps); + } catch (Exception e) { + LOG.error("Could not write property file", e); + statusMessages.put("error", "Could not write property file. Delta imports will not work. " + + "Make sure your conf directory is writable"); + } + } + + void rollback() { + writer.rollback(); + statusMessages.put("", "Indexing failed. Rolled back all changes."); + addStatusMessage("Rolledback"); + } + + @SuppressWarnings("unchecked") + private void doFullDump() { + addStatusMessage("Full Dump Started"); + if(dataImporter.getConfig().isMultiThreaded && !verboseDebug){ + try { + LOG.info("running multithreaded full-import"); + new EntityRunner(root,null).run(null,Context.FULL_DUMP,null); + } catch (Exception e) { + LOG.error("error in import", e); + } + } else { + buildDocument(getVariableResolver(), null, null, root, true, null); + } + + } + + @SuppressWarnings("unchecked") + private void doDelta() { + addStatusMessage("Delta Dump started"); + VariableResolverImpl resolver = getVariableResolver(); + + if (document.deleteQuery != null) { + writer.deleteByQuery(document.deleteQuery); + } + + addStatusMessage("Identifying Delta"); + LOG.info("Starting delta collection."); + Set> deletedKeys = new HashSet>(); + Set> allPks = collectDelta(root, resolver, deletedKeys); + if (stop.get()) + return; + addStatusMessage("Deltas Obtained"); + addStatusMessage("Building documents"); + if (!deletedKeys.isEmpty()) { + allPks.removeAll(deletedKeys); + deleteAll(deletedKeys); + // Make sure that documents are not re-created + } + deletedKeys = null; + + statusMessages.put("Total Changed Documents", allPks.size()); + VariableResolverImpl vri = getVariableResolver(); + Iterator> pkIter = allPks.iterator(); + while (pkIter.hasNext()) { + Map map = pkIter.next(); + vri.addNamespace(DataConfig.IMPORTER_NS_SHORT + ".delta", map); + buildDocument(vri, null, map, root, true, null); + pkIter.remove(); + // check for abort + if (stop.get()) + break; + } + + if (!stop.get()) { + LOG.info("Delta Import completed successfully"); + } + } + + private void deleteAll(Set> deletedKeys) { + LOG.info("Deleting stale documents "); + Iterator> iter = deletedKeys.iterator(); + while (iter.hasNext()) { + Map map = iter.next(); + Object key = map.get(root.getPk()); + if(key == null) { + LOG.warn("no key was available for deleteted pk query"); + continue; + } + writer.deleteDoc(key); + importStatistics.deletedDocCount.incrementAndGet(); + iter.remove(); + } + } + Executor executorSvc = new ThreadPoolExecutor( + 0, + Integer.MAX_VALUE, + 5, TimeUnit.SECONDS, // terminate idle threads after 5 sec + new SynchronousQueue() // directly hand off tasks + ); + + @SuppressWarnings("unchecked") + public void addStatusMessage(String msg) { + statusMessages.put(msg, DataImporter.DATE_TIME_FORMAT.get().format(new Date())); + } + EntityRunner createRunner(DataConfig.Entity entity, EntityRunner parent){ + return new EntityRunner(entity, parent); + } + + /**This class is a just a structure to hold runtime information of one entity + * + */ + class EntityRunner { + final DataConfig.Entity entity; + private EntityProcessor entityProcessor; + private final List entityProcessorWrapper = new ArrayList(); + private DocWrapper docWrapper; + private volatile boolean entityInitialized ; + String currentProcess; + final ThreadLocal currentEntityProcWrapper = new ThreadLocal(); + + private ContextImpl context; + final EntityRunner parent; + final AtomicBoolean entityEnded = new AtomicBoolean(false); + private Exception exception; + + public EntityRunner(DataConfig.Entity entity, EntityRunner parent) { + this.parent = parent; + this.entity = entity; + if (entity.proc == null) { + entityProcessor = new SqlEntityProcessor(); + } else { + try { + entityProcessor = (EntityProcessor) loadClass(entity.proc, dataImporter.getCore()) + .newInstance(); + } catch (Exception e) { + wrapAndThrow(SEVERE, e, + "Unable to load EntityProcessor implementation for entity:" + entity.name); + } + } + int threads = 1; + if (entity.allAttributes.get("threads") != null) { + threads = Integer.parseInt(entity.allAttributes.get("threads")); + } + for (int i = 0; i < threads; i++) { + entityProcessorWrapper.add(new ThreadedEntityProcessorWrapper(entityProcessor, DocBuilder.this, this, getVariableResolver())); + } + context = new ThreadedContext(this, DocBuilder.this); + } + + + public void run(DocWrapper docWrapper, final String currProcess, final EntityRow rows) throws Exception { + entityInitialized = false; + this.docWrapper = docWrapper; + this.currentProcess = currProcess; + entityEnded.set(false); + try { + if(entityProcessorWrapper.size() <= 1){ + runAThread(entityProcessorWrapper.get(0), rows, currProcess); + } else { + final CountDownLatch latch = new CountDownLatch(entityProcessorWrapper.size()); + for (final ThreadedEntityProcessorWrapper processorWrapper : entityProcessorWrapper) { + Runnable runnable = new Runnable() { + public void run() { + try { + runAThread(processorWrapper, rows, currProcess); + }catch(Exception e) { + entityEnded.set(true); + exception = e; + } finally { + latch.countDown(); + } + } + }; + executorSvc.execute(runnable); + } + try { + latch.await(); + } catch (InterruptedException e) { + //TODO + } + Exception copy = exception; + if(copy != null){ + exception = null; + throw copy; + } + } + } finally { + entityProcessor.destroy(); + } + + + } + + private void runAThread(ThreadedEntityProcessorWrapper epw, EntityRow rows, String currProcess) throws Exception { + currentEntityProcWrapper.set(epw); + epw.threadedInit(context); + initEntity(); + try { + epw.init(rows); + DocWrapper docWrapper = this.docWrapper; + Context.CURRENT_CONTEXT.set(context); + for (; ;) { + if(DocBuilder.this.stop.get()) break; + try { + Map arow = epw.nextRow(); + if (arow == null) { + break; + } else { + importStatistics.rowsCount.incrementAndGet(); + if (docWrapper == null && entity.isDocRoot) { + docWrapper = new DocWrapper(); + context.setDoc(docWrapper); + DataConfig.Entity e = entity.parentEntity; + for (EntityRow row = rows; row != null&& e !=null; row = row.tail,e=e.parentEntity) { + addFields(e, docWrapper, row.row, epw.resolver); + } + } + if (docWrapper != null) { + handleSpecialCommands(arow, docWrapper); + addFields(entity, docWrapper, arow, epw.resolver); + } + if (entity.entities != null) { + EntityRow nextRow = new EntityRow(arow, rows, entity.name); + for (DataConfig.Entity e : entity.entities) { + epw.children.get(e).run(docWrapper,currProcess,nextRow); + } + } + } + if (entity.isDocRoot) { + LOG.info("a row on docroot" + docWrapper); + if (!docWrapper.isEmpty()) { + LOG.info("adding a doc "+docWrapper); + boolean result = writer.upload(docWrapper); + docWrapper = null; + if (result){ + importStatistics.docCount.incrementAndGet(); + } else { + importStatistics.failedDocCount.incrementAndGet(); + } + } + } + } catch (DataImportHandlerException dihe) { + exception = dihe; + if(dihe.getErrCode() == SKIP_ROW || dihe.getErrCode() == SKIP) { + importStatistics.skipDocCount.getAndIncrement(); + exception = null;//should not propogate up + continue; + } + if (entity.isDocRoot) { + if (dihe.getErrCode() == DataImportHandlerException.SKIP) { + importStatistics.skipDocCount.getAndIncrement(); + exception = null;//should not propogate up + } else { + LOG.error("Exception while processing: " + + entity.name + " document : " + docWrapper, dihe); + } + if (dihe.getErrCode() == DataImportHandlerException.SEVERE) + throw dihe; + } else { + //if this is not the docRoot then the execution has happened in the same thread. so propogate up, + // it will be handled at the docroot + entityEnded.set(true); + throw dihe; + } + entityEnded.set(true); + } + } + } finally { + epw.destroy(); + currentEntityProcWrapper.remove(); + Context.CURRENT_CONTEXT.remove(); + } + } + + private void initEntity() { + if (!entityInitialized) { + synchronized (this) { + if (!entityInitialized) { + entityProcessor.init(context); + entityInitialized = true; + } + } + } + } + } + + /**A reverse linked list . + * + */ + static class EntityRow { + final Map row; + final EntityRow tail; + final String name; + + EntityRow(Map row, EntityRow tail, String name) { + this.row = row; + this.tail = tail; + this.name = name; + } + } + + @SuppressWarnings("unchecked") + private void buildDocument(VariableResolverImpl vr, DocWrapper doc, + Map pk, DataConfig.Entity entity, boolean isRoot, + ContextImpl parentCtx) { + + EntityProcessorWrapper entityProcessor = getEntityProcessor(entity); + + ContextImpl ctx = new ContextImpl(entity, vr, null, + pk == null ? Context.FULL_DUMP : Context.DELTA_DUMP, + session, parentCtx, this); + entityProcessor.init(ctx); + Context.CURRENT_CONTEXT.set(ctx); + + if (requestParameters.start > 0) { + writer.log(SolrWriter.DISABLE_LOGGING, null, null); + } + + if (verboseDebug) { + writer.log(SolrWriter.START_ENTITY, entity.name, null); + } + + int seenDocCount = 0; + + try { + while (true) { + if (stop.get()) + return; + if(importStatistics.docCount.get() > (requestParameters.start + requestParameters.rows)) break; + try { + seenDocCount++; + + if (seenDocCount > requestParameters.start) { + writer.log(SolrWriter.ENABLE_LOGGING, null, null); + } + + if (verboseDebug && entity.isDocRoot) { + writer.log(SolrWriter.START_DOC, entity.name, null); + } + if (doc == null && entity.isDocRoot) { + doc = new DocWrapper(); + ctx.setDoc(doc); + DataConfig.Entity e = entity; + while (e.parentEntity != null) { + addFields(e.parentEntity, doc, (Map) vr + .resolve(e.parentEntity.name), vr); + e = e.parentEntity; + } + } + + Map arow = entityProcessor.nextRow(); + if (arow == null) { + break; + } + + // Support for start parameter in debug mode + if (entity.isDocRoot) { + if (seenDocCount <= requestParameters.start) + continue; + if (seenDocCount > requestParameters.start + requestParameters.rows) { + LOG.info("Indexing stopped at docCount = " + importStatistics.docCount); + break; + } + } + + if (verboseDebug) { + writer.log(SolrWriter.ENTITY_OUT, entity.name, arow); + } + importStatistics.rowsCount.incrementAndGet(); + if (doc != null) { + handleSpecialCommands(arow, doc); + addFields(entity, doc, arow, vr); + } + if (entity.entities != null) { + vr.addNamespace(entity.name, arow); + for (DataConfig.Entity child : entity.entities) { + buildDocument(vr, doc, null, child, false, ctx); + } + vr.removeNamespace(entity.name); + } + /*The child entities would have changed the CURRENT_CONTEXT. So when they are done, set it back to the old. + * + */ + Context.CURRENT_CONTEXT.set(ctx); + + if (entity.isDocRoot) { + if (stop.get()) + return; + if (!doc.isEmpty()) { + boolean result = writer.upload(doc); + doc = null; + if (result){ + importStatistics.docCount.incrementAndGet(); + } else { + importStatistics.failedDocCount.incrementAndGet(); + } + } + } + + } catch (DataImportHandlerException e) { + if (verboseDebug) { + writer.log(SolrWriter.ENTITY_EXCEPTION, entity.name, e); + } + if(e.getErrCode() == DataImportHandlerException.SKIP_ROW){ + continue; + } + if (isRoot) { + if (e.getErrCode() == DataImportHandlerException.SKIP) { + importStatistics.skipDocCount.getAndIncrement(); + doc = null; + } else { + LOG.error("Exception while processing: " + + entity.name + " document : " + doc, e); + } + if (e.getErrCode() == DataImportHandlerException.SEVERE) + throw e; + } else + throw e; + } catch (Throwable t) { + if (verboseDebug) { + writer.log(SolrWriter.ENTITY_EXCEPTION, entity.name, t); + } + throw new DataImportHandlerException(DataImportHandlerException.SEVERE, t); + } finally { + if (verboseDebug) { + writer.log(SolrWriter.ROW_END, entity.name, null); + if (entity.isDocRoot) + writer.log(SolrWriter.END_DOC, null, null); + Context.CURRENT_CONTEXT.remove(); + } + } + } + } finally { + if (verboseDebug) { + writer.log(SolrWriter.END_ENTITY, null, null); + } + entityProcessor.destroy(); + } + } + + static class DocWrapper extends SolrInputDocument { + //final SolrInputDocument solrDocument = new SolrInputDocument(); + Map session; + + public void setSessionAttribute(String key, Object val){ + if(session == null) session = new HashMap(); + session.put(key, val); + } + + public Object getSessionAttribute(String key) { + return session == null ? null : session.get(key); + } + } + + private void handleSpecialCommands(Map arow, DocWrapper doc) { + Object value = arow.get("$deleteDocById"); + if (value != null) { + if (value instanceof Collection) { + Collection collection = (Collection) value; + for (Object o : collection) { + writer.deleteDoc(o.toString()); + } + } else { + writer.deleteDoc(value); + } + } + value = arow.get("$deleteDocByQuery"); + if (value != null) { + if (value instanceof Collection) { + Collection collection = (Collection) value; + for (Object o : collection) { + writer.deleteByQuery(o.toString()); + } + } else { + writer.deleteByQuery(value.toString()); + } + } + value = arow.get("$docBoost"); + if (value != null) { + float value1 = 1.0f; + if (value instanceof Number) { + value1 = ((Number) value).floatValue(); + } else { + value1 = Float.parseFloat(value.toString()); + } + doc.setDocumentBoost(value1); + } + + value = arow.get("$skipDoc"); + if (value != null) { + if (Boolean.parseBoolean(value.toString())) { + throw new DataImportHandlerException(DataImportHandlerException.SKIP, + "Document skipped :" + arow); + } + } + + value = arow.get("$skipRow"); + if (value != null) { + if (Boolean.parseBoolean(value.toString())) { + throw new DataImportHandlerException(DataImportHandlerException.SKIP_ROW); + } + } + } + + @SuppressWarnings("unchecked") + private void addFields(DataConfig.Entity entity, DocWrapper doc, + Map arow, VariableResolver vr) { + for (Map.Entry entry : arow.entrySet()) { + String key = entry.getKey(); + Object value = entry.getValue(); + if (value == null) continue; + if (key.startsWith("$")) continue; + List field = entity.colNameVsField.get(key); + if (field == null && dataImporter.getSchema() != null) { + // This can be a dynamic field or a field which does not have an entry in data-config ( an implicit field) + SchemaField sf = dataImporter.getSchema().getFieldOrNull(key); + if (sf == null) { + sf = dataImporter.getConfig().lowerNameVsSchemaField.get(key.toLowerCase()); + } + if (sf != null) { + addFieldToDoc(entry.getValue(), sf.getName(), 1.0f, sf.multiValued(), doc); + } + //else do nothing. if we add it it may fail + } else { + if (field != null) { + for (DataConfig.Field f : field) { + String name = f.getName(); + if(f.dynamicName){ + name = vr.replaceTokens(name); + } + if (f.toWrite) addFieldToDoc(entry.getValue(), name, f.boost, f.multiValued, doc); + } + } + } + } + } + + private void addFieldToDoc(Object value, String name, float boost, boolean multiValued, DocWrapper doc) { + if (value instanceof Collection) { + Collection collection = (Collection) value; + if (multiValued) { + for (Object o : collection) { + if (o != null) + doc.addField(name, o, boost); + } + } else { + if (doc.getField(name) == null) + for (Object o : collection) { + if (o != null) { + doc.addField(name, o, boost); + break; + } + } + } + } else if (multiValued) { + if (value != null) { + doc.addField(name, value, boost); + } + } else { + if (doc.getField(name) == null && value != null) + doc.addField(name, value, boost); + } + } + + private EntityProcessorWrapper getEntityProcessor(DataConfig.Entity entity) { + if (entity.processor != null) + return entity.processor; + EntityProcessor entityProcessor = null; + if (entity.proc == null) { + entityProcessor = new SqlEntityProcessor(); + } else { + try { + entityProcessor = (EntityProcessor) loadClass(entity.proc, dataImporter.getCore()) + .newInstance(); + } catch (Exception e) { + wrapAndThrow (SEVERE,e, + "Unable to load EntityProcessor implementation for entity:" + entity.name); + } + } + return entity.processor = new EntityProcessorWrapper(entityProcessor, this); + } + + /** + *

    Collects unique keys of all Solr documents for whom one or more source tables have been changed since the last + * indexed time.

    Note: In our definition, unique key of Solr document is the primary key of the top level + * entity (unless skipped using docRoot=false) in the Solr document in data-config.xml

    + * + * @return an iterator to the list of keys for which Solr documents should be updated. + */ + @SuppressWarnings("unchecked") + public Set> collectDelta(DataConfig.Entity entity, VariableResolverImpl resolver, + Set> deletedRows) { + //someone called abort + if (stop.get()) + return new HashSet(); + + EntityProcessor entityProcessor = getEntityProcessor(entity); + ContextImpl context1 = new ContextImpl(entity, resolver, null, Context.FIND_DELTA, session, null, this); + entityProcessor.init(context1); + + Set> myModifiedPks = new HashSet>(); + + if (entity.entities != null) { + + for (DataConfig.Entity entity1 : entity.entities) { + //this ensures that we start from the leaf nodes + myModifiedPks.addAll(collectDelta(entity1, resolver, deletedRows)); + //someone called abort + if (stop.get()) + return new HashSet(); + } + + } + // identifying the modified rows for this entity + + Set> deltaSet = new HashSet>(); + LOG.info("Running ModifiedRowKey() for Entity: " + entity.name); + //get the modified rows in this entity + while (true) { + Map row = entityProcessor.nextModifiedRowKey(); + + if (row == null) + break; + + deltaSet.add(row); + importStatistics.rowsCount.incrementAndGet(); + // check for abort + if (stop.get()) + return new HashSet(); + } + //get the deleted rows for this entity + Set> deletedSet = new HashSet>(); + Set> deltaRemoveSet = new HashSet>(); + while (true) { + Map row = entityProcessor.nextDeletedRowKey(); + if (row == null) + break; + + //Check to see if this delete is in the current delta set + for (Map modifiedRow : deltaSet) { + if (modifiedRow.get(entity.getPk()).equals(row.get(entity.getPk()))) { + deltaRemoveSet.add(modifiedRow); + } + } + + deletedSet.add(row); + importStatistics.rowsCount.incrementAndGet(); + // check for abort + if (stop.get()) + return new HashSet(); + } + + //asymmetric Set difference + deltaSet.removeAll(deltaRemoveSet); + + LOG.info("Completed ModifiedRowKey for Entity: " + entity.name + " rows obtained : " + deltaSet.size()); + LOG.info("Completed DeletedRowKey for Entity: " + entity.name + " rows obtained : " + deletedSet.size()); + + myModifiedPks.addAll(deltaSet); + Set> parentKeyList = new HashSet>(); + //all that we have captured is useless (in a sub-entity) if no rows in the parent is modified because of these + //propogate up the changes in the chain + if (entity.parentEntity != null) { + // identifying deleted rows with deltas + + for (Map row : myModifiedPks) { + getModifiedParentRows(resolver.addNamespace(entity.name, row), entity.name, entityProcessor, parentKeyList); + // check for abort + if (stop.get()) + return new HashSet(); + } + // running the same for deletedrows + for (Map row : deletedSet) { + getModifiedParentRows(resolver.addNamespace(entity.name, row), entity.name, entityProcessor, parentKeyList); + // check for abort + if (stop.get()) + return new HashSet(); + } + } + LOG.info("Completed parentDeltaQuery for Entity: " + entity.name); + if (entity.isDocRoot) + deletedRows.addAll(deletedSet); + + return entity.isDocRoot ? myModifiedPks : new HashSet>( + parentKeyList); + } + + private void getModifiedParentRows(VariableResolverImpl resolver, + String entity, EntityProcessor entityProcessor, + Set> parentKeyList) { + try { + while (true) { + Map parentRow = entityProcessor + .nextModifiedParentRowKey(); + if (parentRow == null) + break; + + parentKeyList.add(parentRow); + importStatistics.rowsCount.incrementAndGet(); + // check for abort + if (stop.get()) + return; + } + + } finally { + resolver.removeNamespace(entity); + } + } + + public void abort() { + stop.set(true); + } + + private AtomicBoolean stop = new AtomicBoolean(false); + + public static final String TIME_ELAPSED = "Time Elapsed"; + + static String getTimeElapsedSince(long l) { + l = System.currentTimeMillis() - l; + return (l / (60000 * 60)) % 60 + ":" + (l / 60000) % 60 + ":" + (l / 1000) + % 60 + "." + l % 1000; + } + + @SuppressWarnings("unchecked") + static Class loadClass(String name, SolrCore core) throws ClassNotFoundException { + try { + return core != null ? + core.getResourceLoader().findClass(name) : + Class.forName(name); + } catch (Exception e) { + try { + String n = DocBuilder.class.getPackage().getName() + "." + name; + return core != null ? + core.getResourceLoader().findClass(n) : + Class.forName(n); + } catch (Exception e1) { + throw new ClassNotFoundException("Unable to load " + name + " or " + DocBuilder.class.getPackage().getName() + "." + name, e); + } + } + } + + public static class Statistics { + public AtomicLong docCount = new AtomicLong(); + + public AtomicLong deletedDocCount = new AtomicLong(); + + public AtomicLong failedDocCount = new AtomicLong(); + + public AtomicLong rowsCount = new AtomicLong(); + + public AtomicLong queryCount = new AtomicLong(); + + public AtomicLong skipDocCount = new AtomicLong(); + + public Statistics add(Statistics stats) { + this.docCount.addAndGet(stats.docCount.get()); + this.deletedDocCount.addAndGet(stats.deletedDocCount.get()); + this.rowsCount.addAndGet(stats.rowsCount.get()); + this.queryCount.addAndGet(stats.queryCount.get()); + + return this; + } + + public Map getStatsSnapshot() { + Map result = new HashMap(); + result.put("docCount", docCount.get()); + result.put("deletedDocCount", deletedDocCount.get()); + result.put("rowCount", rowsCount.get()); + result.put("queryCount", rowsCount.get()); + result.put("skipDocCount", skipDocCount.get()); + return result; + } + + } + + private void cleanByQuery(String delQuery, AtomicBoolean completeCleanDone) { + delQuery = getVariableResolver().replaceTokens(delQuery); + if (requestParameters.clean) { + if (delQuery == null && !completeCleanDone.get()) { + writer.doDeleteAll(); + completeCleanDone.set(true); + } else if (delQuery != null) { + writer.deleteByQuery(delQuery); + } + } + } + + public static final String LAST_INDEX_TIME = "last_index_time"; + public static final String INDEX_START_TIME = "index_start_time"; +} diff --git a/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/EntityProcessor.java b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/EntityProcessor.java new file mode 100644 index 00000000000..be3fe49d33b --- /dev/null +++ b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/EntityProcessor.java @@ -0,0 +1,117 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import java.util.Map; + +/** + *

    + * An instance of entity processor serves an entity. It is reused throughout the + * import process. + *

    + *

    + *

    + * Implementations of this abstract class must provide a public no-args constructor. + *

    + *

    + *

    + * Refer to http://wiki.apache.org/solr/DataImportHandler + * for more details. + *

    + *

    + * This API is experimental and may change in the future. + * + * @version $Id$ + * @since solr 1.3 + */ +public abstract class EntityProcessor { + + /** + * This method is called when it starts processing an entity. When it comes + * back to the entity it is called again. So it can reset anything at that point. + * For a rootmost entity this is called only once for an ingestion. For sub-entities , this + * is called multiple once for each row from its parent entity + * + * @param context The current context + */ + public abstract void init(Context context); + + /** + * This method helps streaming the data for each row . The implementation + * would fetch as many rows as needed and gives one 'row' at a time. Only this + * method is used during a full import + * + * @return A 'row'. The 'key' for the map is the column name and the 'value' + * is the value of that column. If there are no more rows to be + * returned, return 'null' + */ + public abstract Map nextRow(); + + /** + * This is used for delta-import. It gives the pks of the changed rows in this + * entity + * + * @return the pk vs value of all changed rows + */ + public abstract Map nextModifiedRowKey(); + + /** + * This is used during delta-import. It gives the primary keys of the rows + * that are deleted from this entity. If this entity is the root entity, solr + * document is deleted. If this is a sub-entity, the Solr document is + * considered as 'changed' and will be recreated + * + * @return the pk vs value of all changed rows + */ + public abstract Map nextDeletedRowKey(); + + /** + * This is used during delta-import. This gives the primary keys and their + * values of all the rows changed in a parent entity due to changes in this + * entity. + * + * @return the pk vs value of all changed rows in the parent entity + */ + public abstract Map nextModifiedParentRowKey(); + + /** + * Invoked for each parent-row after the last row for this entity is processed. If this is the root-most + * entity, it will be called only once in the import, at the very end. + * + */ + public abstract void destroy(); + + /** + * Invoked after the transformers are invoked. EntityProcessors can add, remove or modify values + * added by Transformers in this method. + * + * @param r The transformed row + * @since solr 1.4 + */ + public void postTransform(Map r) { + } + + /** + * Invoked when the Entity processor is destroyed towards the end of import. + * + * @since solr 1.4 + */ + public void close() { + //no-op + } +} diff --git a/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/EntityProcessorBase.java b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/EntityProcessorBase.java new file mode 100644 index 00000000000..db19a3c624d --- /dev/null +++ b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/EntityProcessorBase.java @@ -0,0 +1,288 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import static org.apache.solr.handler.dataimport.DataImportHandlerException.*; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +/** + *

    Base class for all implementations of EntityProcessor

    Most implementations of EntityProcessor + * extend this base class which provides common functionality.

    + *

    + * This API is experimental and subject to change + * + * @version $Id$ + * @since solr 1.3 + */ +public class EntityProcessorBase extends EntityProcessor { + private static final Logger log = LoggerFactory.getLogger(EntityProcessorBase.class); + + protected boolean isFirstInit = true; + + protected String entityName; + + protected Context context; + + protected Iterator> rowIterator; + + protected List transformers; + + protected String query; + + protected String onError = ABORT; + + + public void init(Context context) { + rowIterator = null; + this.context = context; + if (isFirstInit) { + firstInit(context); + } + query = null; + } + + /**first time init call. do one-time operations here + */ + protected void firstInit(Context context) { + entityName = context.getEntityAttribute("name"); + String s = context.getEntityAttribute(ON_ERROR); + if (s != null) onError = s; + isFirstInit = false; + } + + + protected Map getNext() { + try { + if (rowIterator == null) + return null; + if (rowIterator.hasNext()) + return rowIterator.next(); + query = null; + rowIterator = null; + return null; + } catch (Exception e) { + log.error("getNext() failed for query '" + query + "'", e); + query = null; + rowIterator = null; + wrapAndThrow(DataImportHandlerException.WARN, e); + return null; + } + } + + public Map nextModifiedRowKey() { + return null; + } + + public Map nextDeletedRowKey() { + return null; + } + + public Map nextModifiedParentRowKey() { + return null; + } + + /** + * For a simple implementation, this is the only method that the sub-class should implement. This is intended to + * stream rows one-by-one. Return null to signal end of rows + * + * @return a row where the key is the name of the field and value can be any Object or a Collection of objects. Return + * null to signal end of rows + */ + public Map nextRow() { + return null;// do not do anything + } + + + public void destroy() { + /*no op*/ + } + + /** + * Only used by cache implementations + */ + protected String cachePk; + + /** + * Only used by cache implementations + */ + protected String cacheVariableName; + + /** + * Only used by cache implementations + */ + protected Map>> simpleCache; + + /** + * Only used by cache implementations + */ + protected Map>>> cacheWithWhereClause; + + protected List> dataSourceRowCache; + + /** + * Only used by cache implementations + */ + protected void cacheInit() { + if (simpleCache != null || cacheWithWhereClause != null) + return; + String where = context.getEntityAttribute("where"); + + String cacheKey = context.getEntityAttribute(CACHE_KEY); + String lookupKey = context.getEntityAttribute(CACHE_LOOKUP); + if(cacheKey != null && lookupKey == null){ + throw new DataImportHandlerException(DataImportHandlerException.SEVERE, + "'cacheKey' is specified for the entity "+ entityName+" but 'cacheLookup' is missing" ); + + } + if (where == null && cacheKey == null) { + simpleCache = new HashMap>>(); + } else { + if (where != null) { + String[] splits = where.split("="); + cachePk = splits[0]; + cacheVariableName = splits[1].trim(); + } else { + cachePk = cacheKey; + cacheVariableName = lookupKey; + } + cacheWithWhereClause = new HashMap>>>(); + } + } + + /** + * If the where clause is present the cache is sql Vs Map of key Vs List of Rows. Only used by cache implementations. + * + * @param query the query string for which cached data is to be returned + * + * @return the cached row corresponding to the given query after all variables have been resolved + */ + protected Map getIdCacheData(String query) { + Map>> rowIdVsRows = cacheWithWhereClause + .get(query); + List> rows = null; + Object key = context.resolve(cacheVariableName); + if (key == null) { + throw new DataImportHandlerException(DataImportHandlerException.WARN, + "The cache lookup value : " + cacheVariableName + " is resolved to be null in the entity :" + + context.getEntityAttribute("name")); + + } + if (rowIdVsRows != null) { + rows = rowIdVsRows.get(key); + if (rows == null) + return null; + dataSourceRowCache = new ArrayList>(rows); + return getFromRowCacheTransformed(); + } else { + rows = getAllNonCachedRows(); + if (rows.isEmpty()) { + return null; + } else { + rowIdVsRows = new HashMap>>(); + for (Map row : rows) { + Object k = row.get(cachePk); + if (k == null) { + throw new DataImportHandlerException(DataImportHandlerException.WARN, + "No value available for the cache key : " + cachePk + " in the entity : " + + context.getEntityAttribute("name")); + } + if (!k.getClass().equals(key.getClass())) { + throw new DataImportHandlerException(DataImportHandlerException.WARN, + "The key in the cache type : " + k.getClass().getName() + + "is not same as the lookup value type " + key.getClass().getName() + " in the entity " + + context.getEntityAttribute("name")); + } + if (rowIdVsRows.get(k) == null) + rowIdVsRows.put(k, new ArrayList>()); + rowIdVsRows.get(k).add(row); + } + cacheWithWhereClause.put(query, rowIdVsRows); + if (!rowIdVsRows.containsKey(key)) + return null; + dataSourceRowCache = new ArrayList>(rowIdVsRows.get(key)); + if (dataSourceRowCache.isEmpty()) { + dataSourceRowCache = null; + return null; + } + return getFromRowCacheTransformed(); + } + } + } + + /** + *

    Get all the rows from the the datasource for the given query. Only used by cache implementations.

    This + * must be implemented by sub-classes which intend to provide a cached implementation + * + * @return the list of all rows fetched from the datasource. + */ + protected List> getAllNonCachedRows() { + return Collections.EMPTY_LIST; + } + + /** + * If where clause is not present the cache is a Map of query vs List of Rows. Only used by cache implementations. + * + * @param query string for which cached row is to be returned + * + * @return the cached row corresponding to the given query + */ + protected Map getSimpleCacheData(String query) { + List> rows = simpleCache.get(query); + if (rows != null) { + dataSourceRowCache = new ArrayList>(rows); + return getFromRowCacheTransformed(); + } else { + rows = getAllNonCachedRows(); + if (rows.isEmpty()) { + return null; + } else { + dataSourceRowCache = new ArrayList>(rows); + simpleCache.put(query, rows); + return getFromRowCacheTransformed(); + } + } + } + + protected Map getFromRowCacheTransformed() { + Map r = dataSourceRowCache.remove(0); + if (dataSourceRowCache.isEmpty()) + dataSourceRowCache = null; + return r; + } + + public static final String TRANSFORMER = "transformer"; + + public static final String TRANSFORM_ROW = "transformRow"; + + public static final String ON_ERROR = "onError"; + + public static final String ABORT = "abort"; + + public static final String CONTINUE = "continue"; + + public static final String SKIP = "skip"; + + public static final String SKIP_DOC = "$skipDoc"; + + public static final String CACHE_KEY = "cacheKey"; + + public static final String CACHE_LOOKUP = "cacheLookup"; + +} diff --git a/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/EntityProcessorWrapper.java b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/EntityProcessorWrapper.java new file mode 100644 index 00000000000..2a30c6ceb53 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/EntityProcessorWrapper.java @@ -0,0 +1,289 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import static org.apache.solr.handler.dataimport.DataImportHandlerException.*; +import static org.apache.solr.handler.dataimport.EntityProcessorBase.*; +import static org.apache.solr.handler.dataimport.EntityProcessorBase.SKIP; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.lang.reflect.Method; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +/** + * A Wrapper over EntityProcessor instance which performs transforms and handles multi-row outputs correctly. + * + * @version $Id$ + * @since solr 1.4 + */ +public class EntityProcessorWrapper extends EntityProcessor { + private static final Logger log = LoggerFactory.getLogger(EntityProcessorWrapper.class); + + EntityProcessor delegate; + private DocBuilder docBuilder; + + String onError; + protected Context context; + protected VariableResolverImpl resolver; + String entityName; + + protected List transformers; + + protected List> rowcache; + + public EntityProcessorWrapper(EntityProcessor delegate, DocBuilder docBuilder) { + this.delegate = delegate; + this.docBuilder = docBuilder; + } + + public void init(Context context) { + rowcache = null; + this.context = context; + resolver = (VariableResolverImpl) context.getVariableResolver(); + //context has to be set correctly . keep the copy of the old one so that it can be restored in destroy + if (entityName == null) { + onError = resolver.replaceTokens(context.getEntityAttribute(ON_ERROR)); + if (onError == null) onError = ABORT; + entityName = context.getEntityAttribute(DataConfig.NAME); + } + delegate.init(context); + + } + + @SuppressWarnings("unchecked") + void loadTransformers() { + String transClasses = context.getEntityAttribute(TRANSFORMER); + + if (transClasses == null) { + transformers = Collections.EMPTY_LIST; + return; + } + + String[] transArr = transClasses.split(","); + transformers = new ArrayList() { + public boolean add(Transformer transformer) { + if (docBuilder != null && docBuilder.verboseDebug) { + transformer = docBuilder.writer.getDebugLogger().wrapTransformer(transformer); + } + return super.add(transformer); + } + }; + for (String aTransArr : transArr) { + String trans = aTransArr.trim(); + if (trans.startsWith("script:")) { + String functionName = trans.substring("script:".length()); + ScriptTransformer scriptTransformer = new ScriptTransformer(); + scriptTransformer.setFunctionName(functionName); + transformers.add(scriptTransformer); + continue; + } + try { + Class clazz = DocBuilder.loadClass(trans, context.getSolrCore()); + if (Transformer.class.isAssignableFrom(clazz)) { + transformers.add((Transformer) clazz.newInstance()); + } else { + Method meth = clazz.getMethod(TRANSFORM_ROW, Map.class); + transformers.add(new ReflectionTransformer(meth, clazz, trans)); + } + } catch (NoSuchMethodException nsme){ + String msg = "Transformer :" + + trans + + "does not implement Transformer interface or does not have a transformRow(Map m)method"; + log.error(msg); + wrapAndThrow(SEVERE, nsme,msg); + } catch (Exception e) { + log.error("Unable to load Transformer: " + aTransArr, e); + wrapAndThrow(SEVERE, e,"Unable to load Transformer: " + trans); + } + } + + } + + @SuppressWarnings("unchecked") + static class ReflectionTransformer extends Transformer { + final Method meth; + + final Class clazz; + + final String trans; + + final Object o; + + public ReflectionTransformer(Method meth, Class clazz, String trans) + throws Exception { + this.meth = meth; + this.clazz = clazz; + this.trans = trans; + o = clazz.newInstance(); + } + + public Object transformRow(Map aRow, Context context) { + try { + return meth.invoke(o, aRow); + } catch (Exception e) { + log.warn("method invocation failed on transformer : " + trans, e); + throw new DataImportHandlerException(WARN, e); + } + } + } + + protected Map getFromRowCache() { + Map r = rowcache.remove(0); + if (rowcache.isEmpty()) + rowcache = null; + return r; + } + + @SuppressWarnings("unchecked") + protected Map applyTransformer(Map row) { + if(row == null) return null; + if (transformers == null) + loadTransformers(); + if (transformers == Collections.EMPTY_LIST) + return row; + Map transformedRow = row; + List> rows = null; + boolean stopTransform = checkStopTransform(row); + VariableResolverImpl resolver = (VariableResolverImpl) context.getVariableResolver(); + for (Transformer t : transformers) { + if (stopTransform) break; + try { + if (rows != null) { + List> tmpRows = new ArrayList>(); + for (Map map : rows) { + resolver.addNamespace(entityName, map); + Object o = t.transformRow(map, context); + if (o == null) + continue; + if (o instanceof Map) { + Map oMap = (Map) o; + stopTransform = checkStopTransform(oMap); + tmpRows.add((Map) o); + } else if (o instanceof List) { + tmpRows.addAll((List) o); + } else { + log.error("Transformer must return Map or a List>"); + } + } + rows = tmpRows; + } else { + resolver.addNamespace(entityName, transformedRow); + Object o = t.transformRow(transformedRow, context); + if (o == null) + return null; + if (o instanceof Map) { + Map oMap = (Map) o; + stopTransform = checkStopTransform(oMap); + transformedRow = (Map) o; + } else if (o instanceof List) { + rows = (List) o; + } else { + log.error("Transformer must return Map or a List>"); + } + } + } catch (Exception e) { + log.warn("transformer threw error", e); + if (ABORT.equals(onError)) { + wrapAndThrow(SEVERE, e); + } else if (SKIP.equals(onError)) { + wrapAndThrow(DataImportHandlerException.SKIP, e); + } + // onError = continue + } + } + if (rows == null) { + return transformedRow; + } else { + rowcache = rows; + return getFromRowCache(); + } + + } + + private boolean checkStopTransform(Map oMap) { + return oMap.get("$stopTransform") != null + && Boolean.parseBoolean(oMap.get("$stopTransform").toString()); + } + + public Map nextRow() { + if (rowcache != null) { + return getFromRowCache(); + } + while (true) { + Map arow = null; + try { + arow = delegate.nextRow(); + } catch (Exception e) { + if(ABORT.equals(onError)){ + wrapAndThrow(SEVERE, e); + } else { + //SKIP is not really possible. If this calls the nextRow() again the Entityprocessor would be in an inconisttent state + log.error("Exception in entity : "+ entityName, e); + return null; + } + } + if (arow == null) { + return null; + } else { + arow = applyTransformer(arow); + if (arow != null) { + delegate.postTransform(arow); + return arow; + } + } + } + } + + public Map nextModifiedRowKey() { + Map row = delegate.nextModifiedRowKey(); + row = applyTransformer(row); + rowcache = null; + return row; + } + + public Map nextDeletedRowKey() { + Map row = delegate.nextDeletedRowKey(); + row = applyTransformer(row); + rowcache = null; + return row; + } + + public Map nextModifiedParentRowKey() { + return delegate.nextModifiedParentRowKey(); + } + + public void destroy() { + delegate.destroy(); + } + + public VariableResolverImpl getVariableResolver() { + return (VariableResolverImpl) context.getVariableResolver(); + } + + public Context getContext() { + return context; + } + + @Override + public void close() { + delegate.close(); + } +} diff --git a/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/Evaluator.java b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/Evaluator.java new file mode 100644 index 00000000000..9f6d11c372d --- /dev/null +++ b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/Evaluator.java @@ -0,0 +1,47 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +/** + *

    + * Pluggable functions for resolving variables + *

    + *

    + * Implementations of this abstract class must provide a public no-arg constructor. + *

    + *

    + * Refer to http://wiki.apache.org/solr/DataImportHandler + * for more details. + *

    + * This API is experimental and may change in the future. + * + * @version $Id$ + * @since solr 1.3 + */ +public abstract class Evaluator { + + /** + * Return a String after processing an expression and a VariableResolver + * + * @see org.apache.solr.handler.dataimport.VariableResolver + * @param expression string to be evaluated + * @param context instance + * @return the value of the given expression evaluated using the resolver + */ + public abstract String evaluate(String expression, Context context); +} diff --git a/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/EvaluatorBag.java b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/EvaluatorBag.java new file mode 100644 index 00000000000..6e623f5eaee --- /dev/null +++ b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/EvaluatorBag.java @@ -0,0 +1,303 @@ +package org.apache.solr.handler.dataimport; +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import org.apache.solr.core.SolrCore; +import static org.apache.solr.handler.dataimport.DataConfig.CLASS; +import static org.apache.solr.handler.dataimport.DataConfig.NAME; +import static org.apache.solr.handler.dataimport.DataImportHandlerException.SEVERE; +import static org.apache.solr.handler.dataimport.DataImportHandlerException.wrapAndThrow; +import static org.apache.solr.handler.dataimport.DocBuilder.loadClass; +import org.apache.solr.util.DateMathParser; +import org.apache.solr.client.solrj.util.ClientUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.net.URLEncoder; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.*; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + *

    Holds definitions for evaluators provided by DataImportHandler

    Refer to http://wiki.apache.org/solr/DataImportHandler for more + * details.

    + *

    + * This API is experimental and may change in the future. + * + * @version $Id$ + * @since solr 1.3 + */ +public class EvaluatorBag { + private static final Logger LOG = LoggerFactory.getLogger(EvaluatorBag.class); + + public static final String DATE_FORMAT_EVALUATOR = "formatDate"; + + public static final String URL_ENCODE_EVALUATOR = "encodeUrl"; + + public static final String ESCAPE_SOLR_QUERY_CHARS = "escapeQueryChars"; + + public static final String SQL_ESCAPE_EVALUATOR = "escapeSql"; + static final Pattern FORMAT_METHOD = Pattern + .compile("^(\\w*?)\\((.*?)\\)$"); + + /** + *

    Returns an Evaluator instance meant to be used for escaping values in SQL queries.

    It + * escapes the value of the given expression by replacing all occurrences of single-quotes by two single-quotes and + * similarily for double-quotes

    + * + * @return an Evaluator instance capable of SQL-escaping expressions. + */ + public static Evaluator getSqlEscapingEvaluator() { + return new Evaluator() { + public String evaluate(String expression, Context context) { + List l = parseParams(expression, context.getVariableResolver()); + if (l.size() != 1) { + throw new DataImportHandlerException(SEVERE, "'escapeSql' must have at least one parameter "); + } + String s = l.get(0).toString(); + // escape single quote with two single quotes, double quote + // with two doule quotes, and backslash with double backslash. + // See: http://dev.mysql.com/doc/refman/4.1/en/mysql-real-escape-string.html + return s.replaceAll("'", "''").replaceAll("\"", "\"\"").replaceAll("\\\\", "\\\\\\\\"); + } + }; + } + + /** + *

    Returns an Evaluator instance meant to be used for escaping reserved characters in Solr + * queries

    + * + * @return an Evaluator instance capable of escaping reserved characters in solr queries. + * + * @see org.apache.solr.client.solrj.util.ClientUtils#escapeQueryChars(String) + */ + public static Evaluator getSolrQueryEscapingEvaluator() { + return new Evaluator() { + public String evaluate(String expression, Context context) { + List l = parseParams(expression, context.getVariableResolver()); + if (l.size() != 1) { + throw new DataImportHandlerException(SEVERE, "'escapeQueryChars' must have at least one parameter "); + } + String s = l.get(0).toString(); + return ClientUtils.escapeQueryChars(s); + } + }; + } + + /** + *

    Returns an Evaluator instance capable of URL-encoding expressions. The expressions are evaluated + * using a VariableResolver

    + * + * @return an Evaluator instance capable of URL-encoding expressions. + */ + public static Evaluator getUrlEvaluator() { + return new Evaluator() { + public String evaluate(String expression, Context context) { + List l = parseParams(expression, context.getVariableResolver()); + if (l.size() != 1) { + throw new DataImportHandlerException(SEVERE, "'encodeUrl' must have at least one parameter "); + } + String s = l.get(0).toString(); + + try { + return URLEncoder.encode(s.toString(), "UTF-8"); + } catch (Exception e) { + wrapAndThrow(SEVERE, e, "Unable to encode expression: " + expression + " with value: " + s); + return null; + } + } + }; + } + + /** + *

    Returns an Evaluator instance capable of formatting values using a given date format.

    + * The value to be formatted can be a entity.field or a date expression parsed with DateMathParser class. + * If the value is in a String, then it is assumed to be a datemath expression, otherwise it resolved using a + * VariableResolver instance

    + * + * @return an Evaluator instance capable of formatting values to a given date format + * + * @see DateMathParser + */ + public static Evaluator getDateFormatEvaluator() { + return new Evaluator() { + public String evaluate(String expression, Context context) { + List l = parseParams(expression, context.getVariableResolver()); + if (l.size() != 2) { + throw new DataImportHandlerException(SEVERE, "'formatDate()' must have two parameters "); + } + Object o = l.get(0); + Object format = l.get(1); + if (format instanceof VariableWrapper) { + VariableWrapper wrapper = (VariableWrapper) format; + o = wrapper.resolve(); + if (o == null) { + format = wrapper.varName; + LOG.warn("Deprecated syntax used. The syntax of formatDate has been changed to formatDate(, ''). " + + "The old syntax will stop working in Solr 1.5"); + } else { + format = o.toString(); + } + } + String dateFmt = format.toString(); + SimpleDateFormat fmt = new SimpleDateFormat(dateFmt); + Date date = null; + if (o instanceof VariableWrapper) { + VariableWrapper variableWrapper = (VariableWrapper) o; + Object variableval = variableWrapper.resolve(); + if (variableval instanceof Date) { + date = (Date) variableval; + } else { + String s = variableval.toString(); + try { + date = DataImporter.DATE_TIME_FORMAT.get().parse(s); + } catch (ParseException exp) { + wrapAndThrow(SEVERE, exp, "Invalid expression for date"); + } + } + } else { + String datemathfmt = o.toString(); + datemathfmt = datemathfmt.replaceAll("NOW", ""); + try { + date = dateMathParser.parseMath(datemathfmt); + } catch (ParseException e) { + wrapAndThrow(SEVERE, e, "Invalid expression for date"); + } + } + return fmt.format(date); + } + + }; + } + + static Map getFunctionsNamespace(final List> fn, DocBuilder docBuilder) { + final Map evaluators = new HashMap(); + evaluators.put(DATE_FORMAT_EVALUATOR, getDateFormatEvaluator()); + evaluators.put(SQL_ESCAPE_EVALUATOR, getSqlEscapingEvaluator()); + evaluators.put(URL_ENCODE_EVALUATOR, getUrlEvaluator()); + evaluators.put(ESCAPE_SOLR_QUERY_CHARS, getSolrQueryEscapingEvaluator()); + SolrCore core = docBuilder == null ? null : docBuilder.dataImporter.getCore(); + for (Map map : fn) { + try { + evaluators.put(map.get(NAME), (Evaluator) loadClass(map.get(CLASS), core).newInstance()); + } catch (Exception e) { + wrapAndThrow(SEVERE, e, "Unable to instantiate evaluator: " + map.get(CLASS)); + } + } + + return new HashMap() { + @Override + public String get(Object key) { + if (key == null) + return null; + Matcher m = FORMAT_METHOD.matcher((String) key); + if (!m.find()) + return null; + String fname = m.group(1); + Evaluator evaluator = evaluators.get(fname); + if (evaluator == null) + return null; + VariableResolverImpl vri = VariableResolverImpl.CURRENT_VARIABLE_RESOLVER.get(); + return evaluator.evaluate(m.group(2), Context.CURRENT_CONTEXT.get()); + } + + }; + } + + /** + * Parses a string of expression into separate params. The values are separated by commas. each value will be + * translated into one of the following: + * <ol> + * <li>If it is in single quotes the value will be translated to a String</li> + * <li>If is is not in quotes and is a number a it will be translated into a Double</li> + * <li>else it is a variable which can be resolved and it will be put in as an instance of VariableWrapper</li> + * </ol> + * + * @param expression the expression to be parsed + * @param vr the VariableResolver instance for resolving variables + * + * @return a List of objects which can either be a string, number or a variable wrapper + */ + public static List parseParams(String expression, VariableResolver vr) { + List result = new ArrayList(); + expression = expression.trim(); + String[] ss = expression.split(","); + for (int i = 0; i < ss.length; i++) { + ss[i] = ss[i].trim(); + if (ss[i].startsWith("'")) {//a string param has started + StringBuilder sb = new StringBuilder(); + while (true) { + sb.append(ss[i]); + if (ss[i].endsWith("'")) break; + i++; + if (i >= ss.length) + throw new DataImportHandlerException(SEVERE, "invalid string at " + ss[i - 1] + " in function params: " + expression); + sb.append(","); + } + String s = sb.substring(1, sb.length() - 1); + s = s.replaceAll("\\\\'", "'"); + result.add(s); + } else { + if (Character.isDigit(ss[i].charAt(0))) { + try { + Double doub = Double.parseDouble(ss[i]); + result.add(doub); + } catch (NumberFormatException e) { + if (vr.resolve(ss[i]) == null) { + wrapAndThrow( + SEVERE, e, "Invalid number :" + ss[i] + + "in parameters " + expression); + } + } + } else { + result.add(new VariableWrapper(ss[i], vr)); + } + } + } + return result; + } + + public static class VariableWrapper { + String varName; + VariableResolver vr; + + public VariableWrapper(String s, VariableResolver vr) { + this.varName = s; + this.vr = vr; + } + + public Object resolve() { + return vr.resolve(varName); + + } + + public String toString() { + Object o = vr.resolve(varName); + return o == null ? null : o.toString(); + + } + } + + static Pattern IN_SINGLE_QUOTES = Pattern.compile("^'(.*?)'$"); + + static DateMathParser dateMathParser = new DateMathParser(TimeZone + .getDefault(), Locale.getDefault()); + +} diff --git a/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/EventListener.java b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/EventListener.java new file mode 100644 index 00000000000..5ac603cff9d --- /dev/null +++ b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/EventListener.java @@ -0,0 +1,36 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +/** + * Event listener for DataImportHandler + * + * This API is experimental and subject to change + * + * @version $Id$ + * @since solr 1.4 + */ +public interface EventListener { + + /** + * Event callback + * + * @param ctx the Context in which this event was called + */ + public void onEvent(Context ctx); + +} diff --git a/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/FieldReaderDataSource.java b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/FieldReaderDataSource.java new file mode 100644 index 00000000000..8c08360ff94 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/FieldReaderDataSource.java @@ -0,0 +1,136 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import static org.apache.solr.handler.dataimport.DataImportHandlerException.SEVERE; +import static org.apache.solr.handler.dataimport.DataImportHandlerException.wrapAndThrow; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.*; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.lang.reflect.Modifier; +import java.sql.Blob; +import java.sql.Clob; +import java.util.Properties; + +/** + * This can be useful for users who have a DB field containing xml and wish to use a nested XPathEntityProcessor + *

    + * The datasouce may be configured as follows + *

    + * + *

    + * The enity which uses this datasource must keep the url value as the variable name url="field-name" + *

    + * The fieldname must be resolvable from VariableResolver + *

    + * This may be used with any EntityProcessor which uses a DataSource eg:XPathEntityProcessor + *

    + * Supports String, BLOB, CLOB data types and there is an extra field (in the entity) 'encoding' for BLOB types + * + * @version $Id$ + * @since 1.4 + */ +public class FieldReaderDataSource extends DataSource { + private static final Logger LOG = LoggerFactory.getLogger(FieldReaderDataSource.class); + protected VariableResolver vr; + protected String dataField; + private String encoding; + private EntityProcessorWrapper entityProcessor; + + public void init(Context context, Properties initProps) { + dataField = context.getEntityAttribute("dataField"); + encoding = context.getEntityAttribute("encoding"); + entityProcessor = (EntityProcessorWrapper) context.getEntityProcessor(); + /*no op*/ + } + + public Reader getData(String query) { + Object o = entityProcessor.getVariableResolver().resolve(dataField); + if (o == null) { + throw new DataImportHandlerException (SEVERE, "No field available for name : " +dataField); + } + if (o instanceof String) { + return new StringReader((String) o); + } else if (o instanceof Clob) { + Clob clob = (Clob) o; + try { + //Most of the JDBC drivers have getCharacterStream defined as public + // so let us just check it + return readCharStream(clob); + } catch (Exception e) { + LOG.info("Unable to get data from CLOB"); + return null; + + } + + } else if (o instanceof Blob) { + Blob blob = (Blob) o; + try { + //Most of the JDBC drivers have getBinaryStream defined as public + // so let us just check it + Method m = blob.getClass().getDeclaredMethod("getBinaryStream"); + if (Modifier.isPublic(m.getModifiers())) { + return getReader(m, blob); + } else { + // force invoke + m.setAccessible(true); + return getReader(m, blob); + } + } catch (Exception e) { + LOG.info("Unable to get data from BLOB"); + return null; + + } + } else { + return new StringReader(o.toString()); + } + + } + + static Reader readCharStream(Clob clob) { + try { + Method m = clob.getClass().getDeclaredMethod("getCharacterStream"); + if (Modifier.isPublic(m.getModifiers())) { + return (Reader) m.invoke(clob); + } else { + // force invoke + m.setAccessible(true); + return (Reader) m.invoke(clob); + } + } catch (Exception e) { + wrapAndThrow(SEVERE, e,"Unable to get reader from clob"); + return null;//unreachable + } + } + + private Reader getReader(Method m, Blob blob) + throws IllegalAccessException, InvocationTargetException, UnsupportedEncodingException { + InputStream is = (InputStream) m.invoke(blob); + if (encoding == null) { + return (new InputStreamReader(is)); + } else { + return (new InputStreamReader(is, encoding)); + } + } + + public void close() { + + } +} diff --git a/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/FieldStreamDataSource.java b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/FieldStreamDataSource.java new file mode 100644 index 00000000000..f476adf696c --- /dev/null +++ b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/FieldStreamDataSource.java @@ -0,0 +1,95 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.ByteArrayInputStream; +import java.io.InputStream; +import java.lang.reflect.Method; +import java.lang.reflect.Modifier; +import java.sql.Blob; +import java.util.Properties; + +import static org.apache.solr.handler.dataimport.DataImportHandlerException.SEVERE; + + +/** + * This can be useful for users who have a DB field containing BLOBs which may be Rich documents + *

    + * The datasouce may be configured as follows + *

    + * + *

    + * The enity which uses this datasource must keep and attribute dataField + *

    + * The fieldname must be resolvable from VariableResolver + *

    + * This may be used with any EntityProcessor which uses a DataSource eg:TikaEntityProcessor + *

    + * + * @version $Id$ + * @since 1.5 + */ +public class FieldStreamDataSource extends DataSource { + private static final Logger LOG = LoggerFactory.getLogger(FieldReaderDataSource.class); + protected VariableResolver vr; + protected String dataField; + private EntityProcessorWrapper wrapper; + + public void init(Context context, Properties initProps) { + dataField = context.getEntityAttribute("dataField"); + wrapper = (EntityProcessorWrapper) context.getEntityProcessor(); + /*no op*/ + } + + public InputStream getData(String query) { + Object o = wrapper.getVariableResolver().resolve(dataField); + if (o == null) { + throw new DataImportHandlerException(SEVERE, "No field available for name : " + dataField); + } + if (o instanceof Blob) { + Blob blob = (Blob) o; + try { + //Most of the JDBC drivers have getBinaryStream defined as public + // so let us just check it + Method m = blob.getClass().getDeclaredMethod("getBinaryStream"); + if (Modifier.isPublic(m.getModifiers())) { + return (InputStream) m.invoke(blob); + } else { + // force invoke + m.setAccessible(true); + return (InputStream) m.invoke(blob); + } + } catch (Exception e) { + LOG.info("Unable to get data from BLOB"); + return null; + + } + } else if (o instanceof byte[]) { + byte[] bytes = (byte[]) o; + return new ByteArrayInputStream(bytes); + } else { + throw new RuntimeException("unsupported type : " + o.getClass()); + } + + } + + public void close() { + } +} diff --git a/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/FileDataSource.java b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/FileDataSource.java new file mode 100644 index 00000000000..daacdaec48d --- /dev/null +++ b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/FileDataSource.java @@ -0,0 +1,136 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import java.io.*; +import java.util.Properties; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import static org.apache.solr.handler.dataimport.DataImportHandlerException.wrapAndThrow; +import static org.apache.solr.handler.dataimport.DataImportHandlerException.SEVERE; + +/** + *

    + * A DataSource which reads from local files + *

    + *

    + * The file is read with the default platform encoding. It can be overriden by + * specifying the encoding in solrconfig.xml + *

    + *

    + *

    + * Refer to http://wiki.apache.org/solr/DataImportHandler + * for more details. + *

    + *

    + * This API is experimental and may change in the future. + * + * @version $Id$ + * @since solr 1.3 + */ +public class FileDataSource extends DataSource { + public static final String BASE_PATH = "basePath"; + + /** + * The basePath for this data source + */ + protected String basePath; + + /** + * The encoding using which the given file should be read + */ + protected String encoding = null; + + private static final Logger LOG = LoggerFactory.getLogger(FileDataSource.class); + + public void init(Context context, Properties initProps) { + basePath = initProps.getProperty(BASE_PATH); + if (initProps.get(URLDataSource.ENCODING) != null) + encoding = initProps.getProperty(URLDataSource.ENCODING); + } + + /** + *

    + * Returns a reader for the given file. + *

    + *

    + * If the given file is not absolute, we try to construct an absolute path + * using basePath configuration. If that fails, then the relative path is + * tried. If file is not found a RuntimeException is thrown. + *

    + *

    + * It is the responsibility of the calling method to properly close the + * returned Reader + *

    + */ + public Reader getData(String query) { + File f = getFile(basePath,query); + try { + return openStream(f); + } catch (Exception e) { + wrapAndThrow(SEVERE,e,"Unable to open File : "+f.getAbsolutePath()); + return null; + } + } + + static File getFile(String basePath, String query) { + try { + File file0 = new File(query); + File file = file0; + + if (!file.isAbsolute()) + file = new File(basePath + query); + + if (file.isFile() && file.canRead()) { + LOG.debug("Accessing File: " + file.toString()); + return file; + } else if (file != file0) + if (file0.isFile() && file0.canRead()) { + LOG.debug("Accessing File0: " + file0.toString()); + return file0; + } + + throw new FileNotFoundException("Could not find file: " + query); + } catch (FileNotFoundException e) { + throw new RuntimeException(e); + } + } + + /** + * Open a {@link java.io.Reader} for the given file name + * + * @param file a {@link java.io.File} instance + * @return a Reader on the given file + * @throws FileNotFoundException if the File does not exist + * @throws UnsupportedEncodingException if the encoding is unsupported + * @since solr 1.4 + */ + protected Reader openStream(File file) throws FileNotFoundException, + UnsupportedEncodingException { + if (encoding == null) { + return new InputStreamReader(new FileInputStream(file)); + } else { + return new InputStreamReader(new FileInputStream(file), encoding); + } + } + + public void close() { + + } +} diff --git a/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/FileListEntityProcessor.java b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/FileListEntityProcessor.java new file mode 100644 index 00000000000..7fa3ab1fdce --- /dev/null +++ b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/FileListEntityProcessor.java @@ -0,0 +1,292 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import java.io.File; +import java.io.FilenameFilter; +import java.text.ParseException; +import java.util.*; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + *

    + * An EntityProcessor instance which can stream file names found in a given base + * directory matching patterns and returning rows containing file information. + *

    + *

    + *

    + * It supports querying a give base directory by matching: + *

      + *
    • regular expressions to file names
    • + *
    • excluding certain files based on regular expression
    • + *
    • last modification date (newer or older than a given date or time)
    • + *
    • size (bigger or smaller than size given in bytes)
    • + *
    • recursively iterating through sub-directories
    • + *
    + * Its output can be used along with FileDataSource to read from files in file + * systems. + *

    + *

    + *

    + * Refer to http://wiki.apache.org/solr/DataImportHandler + * for more details. + *

    + *

    + * This API is experimental and may change in the future. + * + * @version $Id$ + * @since solr 1.3 + */ +public class FileListEntityProcessor extends EntityProcessorBase { + /** + * A regex pattern to identify files given in data-config.xml after resolving any variables + */ + protected String fileName; + + /** + * The baseDir given in data-config.xml after resolving any variables + */ + protected String baseDir; + + /** + * A Regex pattern of excluded file names as given in data-config.xml after resolving any variables + */ + protected String excludes; + + /** + * The newerThan given in data-config as a {@link java.util.Date} + *

    + * Note: This variable is resolved just-in-time in the {@link #nextRow()} method. + *

    + */ + protected Date newerThan; + + /** + * The newerThan given in data-config as a {@link java.util.Date} + */ + protected Date olderThan; + + /** + * The biggerThan given in data-config as a long value + *

    + * Note: This variable is resolved just-in-time in the {@link #nextRow()} method. + *

    + */ + protected long biggerThan = -1; + + /** + * The smallerThan given in data-config as a long value + *

    + * Note: This variable is resolved just-in-time in the {@link #nextRow()} method. + *

    + */ + protected long smallerThan = -1; + + /** + * The recursive given in data-config. Default value is false. + */ + protected boolean recursive = false; + + private Pattern fileNamePattern, excludesPattern; + + public void init(Context context) { + super.init(context); + fileName = context.getEntityAttribute(FILE_NAME); + if (fileName != null) { + fileName = context.replaceTokens(fileName); + fileNamePattern = Pattern.compile(fileName); + } + baseDir = context.getEntityAttribute(BASE_DIR); + if (baseDir == null) + throw new DataImportHandlerException(DataImportHandlerException.SEVERE, + "'baseDir' is a required attribute"); + baseDir = context.replaceTokens(baseDir); + File dir = new File(baseDir); + if (!dir.isDirectory()) + throw new DataImportHandlerException(DataImportHandlerException.SEVERE, + "'baseDir' value: " + baseDir + " is not a directory"); + + String r = context.getEntityAttribute(RECURSIVE); + if (r != null) + recursive = Boolean.parseBoolean(r); + excludes = context.getEntityAttribute(EXCLUDES); + if (excludes != null) { + excludes = context.replaceTokens(excludes); + excludesPattern = Pattern.compile(excludes); + } + } + + /** + * Get the Date object corresponding to the given string. + * + * @param dateStr the date string. It can be a DateMath string or it may have a evaluator function + * @return a Date instance corresponding to the input string + */ + private Date getDate(String dateStr) { + if (dateStr == null) + return null; + + Matcher m = PLACE_HOLDER_PATTERN.matcher(dateStr); + if (m.find()) { + Object o = context.resolve(m.group(1)); + if (o instanceof Date) return (Date)o; + dateStr = (String) o; + } else { + dateStr = context.replaceTokens(dateStr); + } + m = EvaluatorBag.IN_SINGLE_QUOTES.matcher(dateStr); + if (m.find()) { + String expr = null; + expr = m.group(1).replaceAll("NOW", ""); + try { + return EvaluatorBag.dateMathParser.parseMath(expr); + } catch (ParseException exp) { + throw new DataImportHandlerException(DataImportHandlerException.SEVERE, + "Invalid expression for date", exp); + } + } + try { + return DataImporter.DATE_TIME_FORMAT.get().parse(dateStr); + } catch (ParseException exp) { + throw new DataImportHandlerException(DataImportHandlerException.SEVERE, + "Invalid expression for date", exp); + } + } + + /** + * Get the Long value for the given string after resolving any evaluator or variable. + * + * @param sizeStr the size as a string + * @return the Long value corresponding to the given string + */ + private Long getSize(String sizeStr) { + if (sizeStr == null) + return null; + + Matcher m = PLACE_HOLDER_PATTERN.matcher(sizeStr); + if (m.find()) { + Object o = context.resolve(m.group(1)); + if (o instanceof Number) { + Number number = (Number) o; + return number.longValue(); + } + sizeStr = (String) o; + } else { + sizeStr = context.replaceTokens(sizeStr); + } + + return Long.parseLong(sizeStr); + } + + public Map nextRow() { + if (rowIterator != null) + return getNext(); + List> fileDetails = new ArrayList>(); + File dir = new File(baseDir); + + String dateStr = context.getEntityAttribute(NEWER_THAN); + newerThan = getDate(dateStr); + dateStr = context.getEntityAttribute(OLDER_THAN); + olderThan = getDate(dateStr); + String biggerThanStr = context.getEntityAttribute(BIGGER_THAN); + if (biggerThanStr != null) + biggerThan = getSize(biggerThanStr); + String smallerThanStr = context.getEntityAttribute(SMALLER_THAN); + if (smallerThanStr != null) + smallerThan = getSize(smallerThanStr); + + getFolderFiles(dir, fileDetails); + rowIterator = fileDetails.iterator(); + return getNext(); + } + + private void getFolderFiles(File dir, final List> fileDetails) { + // Fetch an array of file objects that pass the filter, however the + // returned array is never populated; accept() always returns false. + // Rather we make use of the fileDetails array which is populated as + // a side affect of the accept method. + dir.list(new FilenameFilter() { + public boolean accept(File dir, String name) { + File fileObj = new File(dir, name); + if (fileObj.isDirectory()) { + if (recursive) getFolderFiles(fileObj, fileDetails); + } else if (fileNamePattern == null) { + addDetails(fileDetails, dir, name); + } else if (fileNamePattern.matcher(name).find()) { + if (excludesPattern != null && excludesPattern.matcher(name).find()) + return false; + addDetails(fileDetails, dir, name); + } + return false; + } + }); + } + + private void addDetails(List> files, File dir, String name) { + Map details = new HashMap(); + File aFile = new File(dir, name); + if (aFile.isDirectory()) return; + long sz = aFile.length(); + Date lastModified = new Date(aFile.lastModified()); + if (biggerThan != -1 && sz <= biggerThan) + return; + if (smallerThan != -1 && sz >= smallerThan) + return; + if (olderThan != null && lastModified.after(olderThan)) + return; + if (newerThan != null && lastModified.before(newerThan)) + return; + details.put(DIR, dir.getAbsolutePath()); + details.put(FILE, name); + details.put(ABSOLUTE_FILE, aFile.getAbsolutePath()); + details.put(SIZE, sz); + details.put(LAST_MODIFIED, lastModified); + files.add(details); + } + + public static final Pattern PLACE_HOLDER_PATTERN = Pattern + .compile("\\$\\{(.*?)\\}"); + + public static final String DIR = "fileDir"; + + public static final String FILE = "file"; + + public static final String ABSOLUTE_FILE = "fileAbsolutePath"; + + public static final String SIZE = "fileSize"; + + public static final String LAST_MODIFIED = "fileLastModified"; + + public static final String FILE_NAME = "fileName"; + + public static final String BASE_DIR = "baseDir"; + + public static final String EXCLUDES = "excludes"; + + public static final String NEWER_THAN = "newerThan"; + + public static final String OLDER_THAN = "olderThan"; + + public static final String BIGGER_THAN = "biggerThan"; + + public static final String SMALLER_THAN = "smallerThan"; + + public static final String RECURSIVE = "recursive"; + +} diff --git a/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/HTMLStripTransformer.java b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/HTMLStripTransformer.java new file mode 100644 index 00000000000..9d2f7034fc0 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/HTMLStripTransformer.java @@ -0,0 +1,97 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import org.apache.solr.analysis.HTMLStripCharFilter; +import org.apache.lucene.analysis.CharReader; + +import java.io.IOException; +import java.io.StringReader; +import java.io.BufferedReader; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +/** + * A Transformer implementation which strip off HTML tags using org.apache.solr.analysis.HTMLStripReader This is useful + * in case you don't need this HTML anyway. + * + * @version $Id$ + * @see org.apache.solr.analysis.HTMLStripCharFilter + * @since solr 1.4 + */ +public class HTMLStripTransformer extends Transformer { + + @Override + @SuppressWarnings("unchecked") + public Object transformRow(Map row, Context context) { + List> fields = context.getAllEntityFields(); + for (Map field : fields) { + String col = field.get(DataImporter.COLUMN); + String splitHTML = context.replaceTokens(field.get(STRIP_HTML)); + if (!TRUE.equals(splitHTML)) + continue; + Object tmpVal = row.get(col); + if (tmpVal == null) + continue; + + if (tmpVal instanceof List) { + List inputs = (List) tmpVal; + List results = new ArrayList(); + for (String input : inputs) { + if (input == null) + continue; + Object o = stripHTML(input, col); + if (o != null) + results.add(o); + } + row.put(col, results); + } else { + String value = tmpVal.toString(); + Object o = stripHTML(value, col); + if (o != null) + row.put(col, o); + } + } + return row; + } + + private Object stripHTML(String value, String column) { + StringBuilder out = new StringBuilder(); + StringReader strReader = new StringReader(value); + try { + HTMLStripCharFilter html = new HTMLStripCharFilter(CharReader.get(strReader.markSupported() ? strReader : new BufferedReader(strReader))); + char[] cbuf = new char[1024 * 10]; + while (true) { + int count = html.read(cbuf); + if (count == -1) + break; // end of stream mark is -1 + if (count > 0) + out.append(cbuf, 0, count); + } + html.close(); + } catch (IOException e) { + throw new DataImportHandlerException(DataImportHandlerException.SEVERE, + "Failed stripping HTML for column: " + column, e); + } + return out.toString(); + } + + public static final String STRIP_HTML = "stripHTML"; + + public static final String TRUE = "true"; +} diff --git a/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/HttpDataSource.java b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/HttpDataSource.java new file mode 100644 index 00000000000..d69828683e8 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/HttpDataSource.java @@ -0,0 +1,51 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.Reader; +import java.net.URL; +import java.net.URLConnection; +import java.util.Properties; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + *

    + * A data source implementation which can be used to read character files using + * HTTP. + *

    + *

    + *

    + * Refer to http://wiki.apache.org/solr/DataImportHandler + * for more details. + *

    + *

    + * This API is experimental and may change in the future. + * + * @version $Id$ + * @since solr 1.3 + * @deprecated use {@link org.apache.solr.handler.dataimport.URLDataSource} instead + */ +@Deprecated +public class HttpDataSource extends URLDataSource { + +} diff --git a/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/JdbcDataSource.java b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/JdbcDataSource.java new file mode 100644 index 00000000000..cb38e480bf0 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/JdbcDataSource.java @@ -0,0 +1,415 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import static org.apache.solr.handler.dataimport.DataImportHandlerException.wrapAndThrow; +import static org.apache.solr.handler.dataimport.DataImportHandlerException.SEVERE; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.naming.InitialContext; +import java.sql.*; +import java.util.*; +import java.util.concurrent.Callable; + +/** + *

    A DataSource implementation which can fetch data using JDBC.

    Refer to http://wiki.apache.org/solr/DataImportHandler for more + * details.

    + *

    + * This API is experimental and may change in the future. + * + * @version $Id$ + * @since solr 1.3 + */ +public class JdbcDataSource extends + DataSource>> { + private static final Logger LOG = LoggerFactory.getLogger(JdbcDataSource.class); + + protected Callable factory; + + private long connLastUsed = 0; + + private Connection conn; + + private Map fieldNameVsType = new HashMap(); + + private boolean convertType = false; + + private int batchSize = FETCH_SIZE; + + private int maxRows = 0; + + public void init(Context context, Properties initProps) { + Object o = initProps.get(CONVERT_TYPE); + if (o != null) + convertType = Boolean.parseBoolean(o.toString()); + + factory = createConnectionFactory(context, initProps); + + String bsz = initProps.getProperty("batchSize"); + if (bsz != null) { + bsz = context.replaceTokens(bsz); + try { + batchSize = Integer.parseInt(bsz); + if (batchSize == -1) + batchSize = Integer.MIN_VALUE; + } catch (NumberFormatException e) { + LOG.warn("Invalid batch size: " + bsz); + } + } + + for (Map map : context.getAllEntityFields()) { + String n = map.get(DataImporter.COLUMN); + String t = map.get(DataImporter.TYPE); + if ("sint".equals(t) || "integer".equals(t)) + fieldNameVsType.put(n, Types.INTEGER); + else if ("slong".equals(t) || "long".equals(t)) + fieldNameVsType.put(n, Types.BIGINT); + else if ("float".equals(t) || "sfloat".equals(t)) + fieldNameVsType.put(n, Types.FLOAT); + else if ("double".equals(t) || "sdouble".equals(t)) + fieldNameVsType.put(n, Types.DOUBLE); + else if ("date".equals(t)) + fieldNameVsType.put(n, Types.DATE); + else if ("boolean".equals(t)) + fieldNameVsType.put(n, Types.BOOLEAN); + else if ("binary".equals(t)) + fieldNameVsType.put(n, Types.BLOB); + else + fieldNameVsType.put(n, Types.VARCHAR); + } + } + + protected Callable createConnectionFactory(final Context context, + final Properties initProps) { +// final VariableResolver resolver = context.getVariableResolver(); + resolveVariables(context, initProps); + final String jndiName = initProps.getProperty(JNDI_NAME); + final String url = initProps.getProperty(URL); + final String driver = initProps.getProperty(DRIVER); + + if (url == null && jndiName == null) + throw new DataImportHandlerException(SEVERE, + "JDBC URL or JNDI name has to be specified"); + + if (driver != null) { + try { + DocBuilder.loadClass(driver, context.getSolrCore()); + } catch (ClassNotFoundException e) { + wrapAndThrow(SEVERE, e, "Could not load driver: " + driver); + } + } else { + if(jndiName == null){ + throw new DataImportHandlerException(SEVERE, "One of driver or jndiName must be specified in the data source"); + } + } + + String s = initProps.getProperty("maxRows"); + if (s != null) { + maxRows = Integer.parseInt(s); + } + + return factory = new Callable() { + public Connection call() throws Exception { + LOG.info("Creating a connection for entity " + + context.getEntityAttribute(DataImporter.NAME) + " with URL: " + + url); + long start = System.currentTimeMillis(); + Connection c = null; + try { + if(url != null){ + c = DriverManager.getConnection(url, initProps); + } else if(jndiName != null){ + InitialContext ctx = new InitialContext(); + Object jndival = ctx.lookup(jndiName); + if (jndival instanceof javax.sql.DataSource) { + javax.sql.DataSource dataSource = (javax.sql.DataSource) jndival; + String user = (String) initProps.get("user"); + String pass = (String) initProps.get("password"); + if(user == null || user.trim().equals("")){ + c = dataSource.getConnection(); + } else { + c = dataSource.getConnection(user, pass); + } + } else { + throw new DataImportHandlerException(SEVERE, + "the jndi name : '"+jndiName +"' is not a valid javax.sql.DataSource"); + } + } + } catch (SQLException e) { + // DriverManager does not allow you to use a driver which is not loaded through + // the class loader of the class which is trying to make the connection. + // This is a workaround for cases where the user puts the driver jar in the + // solr.home/lib or solr.home/core/lib directories. + Driver d = (Driver) DocBuilder.loadClass(driver, context.getSolrCore()).newInstance(); + c = d.connect(url, initProps); + } + if (c != null) { + if (Boolean.parseBoolean(initProps.getProperty("readOnly"))) { + c.setReadOnly(true); + // Add other sane defaults + c.setAutoCommit(true); + c.setTransactionIsolation(Connection.TRANSACTION_READ_UNCOMMITTED); + c.setHoldability(ResultSet.CLOSE_CURSORS_AT_COMMIT); + } + if (!Boolean.parseBoolean(initProps.getProperty("autoCommit"))) { + c.setAutoCommit(false); + } + String transactionIsolation = initProps.getProperty("transactionIsolation"); + if ("TRANSACTION_READ_UNCOMMITTED".equals(transactionIsolation)) { + c.setTransactionIsolation(Connection.TRANSACTION_READ_UNCOMMITTED); + } else if ("TRANSACTION_READ_COMMITTED".equals(transactionIsolation)) { + c.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED); + } else if ("TRANSACTION_REPEATABLE_READ".equals(transactionIsolation)) { + c.setTransactionIsolation(Connection.TRANSACTION_REPEATABLE_READ); + } else if ("TRANSACTION_SERIALIZABLE".equals(transactionIsolation)) { + c.setTransactionIsolation(Connection.TRANSACTION_SERIALIZABLE); + } else if ("TRANSACTION_NONE".equals(transactionIsolation)) { + c.setTransactionIsolation(Connection.TRANSACTION_NONE); + } + String holdability = initProps.getProperty("holdability"); + if ("CLOSE_CURSORS_AT_COMMIT".equals(holdability)) { + c.setHoldability(ResultSet.CLOSE_CURSORS_AT_COMMIT); + } else if ("HOLD_CURSORS_OVER_COMMIT".equals(holdability)) { + c.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); + } + } + LOG.info("Time taken for getConnection(): " + + (System.currentTimeMillis() - start)); + return c; + } + }; + } + + private void resolveVariables(Context ctx, Properties initProps) { + for (Map.Entry entry : initProps.entrySet()) { + if (entry.getValue() != null) { + entry.setValue(ctx.replaceTokens((String) entry.getValue())); + } + } + } + + public Iterator> getData(String query) { + ResultSetIterator r = new ResultSetIterator(query); + return r.getIterator(); + } + + private void logError(String msg, Exception e) { + LOG.warn(msg, e); + } + + private List readFieldNames(ResultSetMetaData metaData) + throws SQLException { + List colNames = new ArrayList(); + int count = metaData.getColumnCount(); + for (int i = 0; i < count; i++) { + colNames.add(metaData.getColumnLabel(i + 1)); + } + return colNames; + } + + private class ResultSetIterator { + ResultSet resultSet; + + Statement stmt = null; + + List colNames; + + Iterator> rSetIterator; + + public ResultSetIterator(String query) { + + try { + Connection c = getConnection(); + stmt = c.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); + stmt.setFetchSize(batchSize); + stmt.setMaxRows(maxRows); + LOG.debug("Executing SQL: " + query); + long start = System.currentTimeMillis(); + if (stmt.execute(query)) { + resultSet = stmt.getResultSet(); + } + LOG.trace("Time taken for sql :" + + (System.currentTimeMillis() - start)); + colNames = readFieldNames(resultSet.getMetaData()); + } catch (Exception e) { + wrapAndThrow(SEVERE, e, "Unable to execute query: " + query); + } + if (resultSet == null) { + rSetIterator = new ArrayList>().iterator(); + return; + } + + rSetIterator = new Iterator>() { + public boolean hasNext() { + return hasnext(); + } + + public Map next() { + return getARow(); + } + + public void remove() {/* do nothing */ + } + }; + } + + private Iterator> getIterator() { + return rSetIterator; + } + + private Map getARow() { + if (resultSet == null) + return null; + Map result = new HashMap(); + for (String colName : colNames) { + try { + if (!convertType) { + // Use underlying database's type information + result.put(colName, resultSet.getObject(colName)); + continue; + } + + Integer type = fieldNameVsType.get(colName); + if (type == null) + type = Types.VARCHAR; + switch (type) { + case Types.INTEGER: + result.put(colName, resultSet.getInt(colName)); + break; + case Types.FLOAT: + result.put(colName, resultSet.getFloat(colName)); + break; + case Types.BIGINT: + result.put(colName, resultSet.getLong(colName)); + break; + case Types.DOUBLE: + result.put(colName, resultSet.getDouble(colName)); + break; + case Types.DATE: + result.put(colName, resultSet.getDate(colName)); + break; + case Types.BOOLEAN: + result.put(colName, resultSet.getBoolean(colName)); + break; + case Types.BLOB: + result.put(colName, resultSet.getBytes(colName)); + break; + default: + result.put(colName, resultSet.getString(colName)); + break; + } + } catch (SQLException e) { + logError("Error reading data ", e); + wrapAndThrow(SEVERE, e, "Error reading data from database"); + } + } + return result; + } + + private boolean hasnext() { + if (resultSet == null) + return false; + try { + if (resultSet.next()) { + return true; + } else { + close(); + return false; + } + } catch (SQLException e) { + close(); + wrapAndThrow(SEVERE,e); + return false; + } + } + + private void close() { + try { + if (resultSet != null) + resultSet.close(); + if (stmt != null) + stmt.close(); + } catch (Exception e) { + logError("Exception while closing result set", e); + } finally { + resultSet = null; + stmt = null; + } + } + } + + private Connection getConnection() throws Exception { + long currTime = System.currentTimeMillis(); + if (currTime - connLastUsed > CONN_TIME_OUT) { + synchronized (this) { + Connection tmpConn = factory.call(); + closeConnection(); + connLastUsed = System.currentTimeMillis(); + return conn = tmpConn; + } + + } else { + connLastUsed = currTime; + return conn; + } + } + + protected void finalize() throws Throwable { + try { + if(!isClosed){ + LOG.error("JdbcDataSource was not closed prior to finalize(), indicates a bug -- POSSIBLE RESOURCE LEAK!!!"); + close(); + } + } finally { + super.finalize(); + } + } + + private boolean isClosed = false; + + public void close() { + try { + closeConnection(); + } finally { + isClosed = true; + } + } + + private void closeConnection() { + try { + if (conn != null) { + conn.close(); + } + } catch (Exception e) { + LOG.error("Ignoring Error when closing connection", e); + } + } + + private static final long CONN_TIME_OUT = 10 * 1000; // 10 seconds + + private static final int FETCH_SIZE = 500; + + public static final String URL = "url"; + + public static final String JNDI_NAME = "jndiName"; + + public static final String DRIVER = "driver"; + + public static final String CONVERT_TYPE = "convertType"; +} diff --git a/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/LineEntityProcessor.java b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/LineEntityProcessor.java new file mode 100644 index 00000000000..0f1979f2cbd --- /dev/null +++ b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/LineEntityProcessor.java @@ -0,0 +1,156 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import java.io.*; +import java.util.*; +import java.util.regex.Pattern; + +import org.apache.commons.io.IOUtils; + + +/** + *

    + * An EntityProcessor instance which can stream lines of text read from a + * datasource. Options allow lines to be explicitly skipped or included in the index. + *

    + *

    + *

    + * Attribute summary + *

      + *
    • url is the required location of the input file. If this value is + * relative, it assumed to be relative to baseLoc.
    • + *
    • acceptLineRegex is an optional attribute that if present discards any + * line which does not match the regExp.
    • + *
    • skipLineRegex is an optional attribute that is applied after any + * acceptLineRegex and discards any line which matches this regExp.
    • + *
    + *

    + * Although envisioned for reading lines from a file or url, LineEntityProcessor may also be useful + * for dealing with change lists, where each line contains filenames which can be used by subsequent entities + * to parse content from those files. + *

    + *

    + * Refer to http://wiki.apache.org/solr/DataImportHandler + * for more details. + *

    + *

    + * This API is experimental and may change in the future. + * + * @version $Id$ + * @since solr 1.4 + */ +public class LineEntityProcessor extends EntityProcessorBase { + private Pattern acceptLineRegex, skipLineRegex; + private String url; + private BufferedReader reader; + + /** + * Parses each of the entity attributes. + */ + public void init(Context context) { + super.init(context); + String s; + + // init a regex to locate files from the input we want to index + s = context.getResolvedEntityAttribute(ACCEPT_LINE_REGEX); + if (s != null) { + acceptLineRegex = Pattern.compile(s); + } + + // init a regex to locate files from the input to be skipped + s = context.getResolvedEntityAttribute(SKIP_LINE_REGEX); + if (s != null) { + skipLineRegex = Pattern.compile(s); + } + + // the FileName is required. + url = context.getResolvedEntityAttribute(URL); + if (url == null) throw + new DataImportHandlerException(DataImportHandlerException.SEVERE, + "'"+ URL +"' is a required attribute"); + } + + + /** + * Reads lines from the url till it finds a lines that matches the + * optional acceptLineRegex and does not match the optional skipLineRegex. + * + * @return A row containing a minimum of one field "rawLine" or null to signal + * end of file. The rawLine is the as line as returned by readLine() + * from the url. However transformers can be used to create as + * many other fields as required. + */ + public Map nextRow() { + if (reader == null) { + reader = new BufferedReader((Reader) context.getDataSource().getData(url)); + } + + String line; + + while ( true ) { + // read a line from the input file + try { + line = reader.readLine(); + } + catch (IOException exp) { + throw new DataImportHandlerException(DataImportHandlerException.SEVERE, + "Problem reading from input", exp); + } + + if (line == null) return null; // end of input + + // First scan whole line to see if we want it + if (acceptLineRegex != null && ! acceptLineRegex.matcher(line).find()) continue; + if (skipLineRegex != null && skipLineRegex.matcher(line).find()) continue; + // Contruct the 'row' of fields + Map row = new HashMap(); + row.put("rawLine", line); + return row; + } + } + + @Override + public void destroy() { + if (reader != null) { + IOUtils.closeQuietly(reader); + } + reader= null; + super.destroy(); + } + + /** + * Holds the name of entity attribute that will be parsed to obtain + * the filename containing the changelist. + */ + public static final String URL = "url"; + + /** + * Holds the name of entity attribute that will be parsed to obtain + * the pattern to be used when checking to see if a line should + * be returned. + */ + public static final String ACCEPT_LINE_REGEX = "acceptLineRegex"; + + /** + * Holds the name of entity attribute that will be parsed to obtain + * the pattern to be used when checking to see if a line should + * be ignored. + */ + public static final String SKIP_LINE_REGEX = "skipLineRegex"; +} diff --git a/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/LogTransformer.java b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/LogTransformer.java new file mode 100644 index 00000000000..9831290baea --- /dev/null +++ b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/LogTransformer.java @@ -0,0 +1,66 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Map; + +/** + * A Transformer implementation which logs messages in a given template format. + *

    + * Refer to http://wiki.apache.org/solr/DataImportHandler + * for more details. + *

    + * This API is experimental and may change in the future. + * + * @version $Id$ + * @since solr 1.4 + */ +public class LogTransformer extends Transformer { + Logger LOG = LoggerFactory.getLogger(LogTransformer.class); + + public Object transformRow(Map row, Context ctx) { + String expr = ctx.getEntityAttribute(LOG_TEMPLATE); + String level = ctx.replaceTokens(ctx.getEntityAttribute(LOG_LEVEL)); + + if (expr == null || level == null) return row; + + if ("info".equals(level)) { + if (LOG.isInfoEnabled()) + LOG.info(ctx.replaceTokens(expr)); + } else if ("trace".equals(level)) { + if (LOG.isTraceEnabled()) + LOG.trace(ctx.replaceTokens(expr)); + } else if ("warn".equals(level)) { + if (LOG.isWarnEnabled()) + LOG.warn(ctx.replaceTokens(expr)); + } else if ("error".equals(level)) { + if (LOG.isErrorEnabled()) + LOG.error(ctx.replaceTokens(expr)); + } else if ("debug".equals(level)) { + if (LOG.isDebugEnabled()) + LOG.debug(ctx.replaceTokens(expr)); + } + + return row; + } + + public static final String LOG_TEMPLATE = "logTemplate"; + public static final String LOG_LEVEL = "logLevel"; +} diff --git a/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/MockDataSource.java b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/MockDataSource.java new file mode 100644 index 00000000000..7b747d72a56 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/MockDataSource.java @@ -0,0 +1,59 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; +import java.util.Properties; + +/** + *

    + * A mock DataSource implementation which can be used for testing. + *

    + *

    + * This API is experimental and may change in the future. + * + * @version $Id$ + * @since solr 1.3 + */ +public class MockDataSource extends + DataSource>> { + + private static Map>> cache = new HashMap>>(); + + public static void setIterator(String query, + Iterator> iter) { + cache.put(query, iter); + } + + public static void clearCache() { + cache.clear(); + } + + public void init(Context context, Properties initProps) { + } + + public Iterator> getData(String query) { + return cache.get(query); + } + + public void close() { + cache.clear(); + + } +} diff --git a/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/NumberFormatTransformer.java b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/NumberFormatTransformer.java new file mode 100644 index 00000000000..00bad109615 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/NumberFormatTransformer.java @@ -0,0 +1,140 @@ +package org.apache.solr.handler.dataimport; +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import java.text.NumberFormat; +import java.text.ParseException; +import java.text.ParsePosition; +import java.util.ArrayList; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + *

    + * A Transformer instance which can extract numbers out of strings. It uses + * java.text.NumberFormat class to parse strings and supports + * Number, Integer, Currency and Percent styles as supported by + * java.text.NumberFormat with configurable locales. + *

    + *

    + *

    + * Refer to http://wiki.apache.org/solr/DataImportHandler + * for more details. + *

    + *

    + * This API is experimental and may change in the future. + * + * @version $Id$ + * @since solr 1.3 + */ +public class NumberFormatTransformer extends Transformer { + + private static final Pattern localeRegex = Pattern.compile("^([a-z]{2})-([A-Z]{2})$"); + + @SuppressWarnings("unchecked") + public Object transformRow(Map row, Context context) { + for (Map fld : context.getAllEntityFields()) { + String style = context.replaceTokens(fld.get(FORMAT_STYLE)); + if (style != null) { + String column = fld.get(DataImporter.COLUMN); + String srcCol = fld.get(RegexTransformer.SRC_COL_NAME); + Locale locale = null; + String localeStr = context.replaceTokens(fld.get(LOCALE)); + if (srcCol == null) + srcCol = column; + if (localeStr != null) { + Matcher matcher = localeRegex.matcher(localeStr); + if (matcher.find() && matcher.groupCount() == 2) { + locale = new Locale(matcher.group(1), matcher.group(2)); + } else { + throw new DataImportHandlerException(DataImportHandlerException.SEVERE, "Invalid Locale specified for field: " + fld); + } + } else { + locale = Locale.getDefault(); + } + + Object val = row.get(srcCol); + String styleSmall = style.toLowerCase(); + + if (val instanceof List) { + List inputs = (List) val; + List results = new ArrayList(); + for (String input : inputs) { + try { + results.add(process(input, styleSmall, locale)); + } catch (ParseException e) { + throw new DataImportHandlerException( + DataImportHandlerException.SEVERE, + "Failed to apply NumberFormat on column: " + column, e); + } + } + row.put(column, results); + } else { + if (val == null || val.toString().trim().equals("")) + continue; + try { + row.put(column, process(val.toString(), styleSmall, locale)); + } catch (ParseException e) { + throw new DataImportHandlerException( + DataImportHandlerException.SEVERE, + "Failed to apply NumberFormat on column: " + column, e); + } + } + } + } + return row; + } + + private Number process(String val, String style, Locale locale) throws ParseException { + if (INTEGER.equals(style)) { + return parseNumber(val, NumberFormat.getIntegerInstance(locale)); + } else if (NUMBER.equals(style)) { + return parseNumber(val, NumberFormat.getNumberInstance(locale)); + } else if (CURRENCY.equals(style)) { + return parseNumber(val, NumberFormat.getCurrencyInstance(locale)); + } else if (PERCENT.equals(style)) { + return parseNumber(val, NumberFormat.getPercentInstance(locale)); + } + + return null; + } + + private Number parseNumber(String val, NumberFormat numFormat) throws ParseException { + ParsePosition parsePos = new ParsePosition(0); + Number num = numFormat.parse(val, parsePos); + if (parsePos.getIndex() != val.length()) { + throw new ParseException("illegal number format", parsePos.getIndex()); + } + return num; + } + + public static final String FORMAT_STYLE = "formatStyle"; + + public static final String LOCALE = "locale"; + + public static final String NUMBER = "number"; + + public static final String PERCENT = "percent"; + + public static final String INTEGER = "integer"; + + public static final String CURRENCY = "currency"; +} diff --git a/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/PlainTextEntityProcessor.java b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/PlainTextEntityProcessor.java new file mode 100644 index 00000000000..ecb8ac270ae --- /dev/null +++ b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/PlainTextEntityProcessor.java @@ -0,0 +1,79 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import static org.apache.solr.handler.dataimport.DataImportHandlerException.SEVERE; +import static org.apache.solr.handler.dataimport.DataImportHandlerException.wrapAndThrow; +import static org.apache.solr.handler.dataimport.XPathEntityProcessor.URL; +import org.apache.commons.io.IOUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.io.Reader; +import java.io.StringWriter; +import java.util.HashMap; +import java.util.Map; + +/** + *

    An implementation of EntityProcessor which reads data from a url/file and give out a row which contains one String + * value. The name of the field is 'plainText'. + * + * @version $Id$ + * @since solr 1.4 + */ +public class PlainTextEntityProcessor extends EntityProcessorBase { + private static final Logger LOG = LoggerFactory.getLogger(PlainTextEntityProcessor.class); + private boolean ended = false; + + public void init(Context context) { + super.init(context); + ended = false; + } + + public Map nextRow() { + if (ended) return null; + DataSource ds = context.getDataSource(); + String url = context.replaceTokens(context.getEntityAttribute(URL)); + Reader r = null; + try { + r = ds.getData(url); + } catch (Exception e) { + wrapAndThrow(SEVERE, e, "Exception reading url : " + url); + } + StringWriter sw = new StringWriter(); + char[] buf = new char[1024]; + while (true) { + int len = 0; + try { + len = r.read(buf); + } catch (IOException e) { + IOUtils.closeQuietly(r); + wrapAndThrow(SEVERE, e, "Exception reading url : " + url); + } + if (len <= 0) break; + sw.append(new String(buf, 0, len)); + } + Map row = new HashMap(); + row.put(PLAIN_TEXT, sw.toString()); + ended = true; + IOUtils.closeQuietly(r); + return row; + } + + public static final String PLAIN_TEXT = "plainText"; +} diff --git a/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/RegexTransformer.java b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/RegexTransformer.java new file mode 100644 index 00000000000..e86d90ad5d8 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/RegexTransformer.java @@ -0,0 +1,200 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import java.util.*; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + *

    + * A Transformer implementation which uses Regular Expressions to extract, split + * and replace data in fields. + *

    + *

    + *

    + * Refer to http://wiki.apache.org/solr/DataImportHandler + * for more details. + *

    + *

    + * This API is experimental and may change in the future. + * + * @version $Id$ + * @since solr 1.3 + */ +public class RegexTransformer extends Transformer { + private static final Logger LOG = LoggerFactory.getLogger(RegexTransformer.class); + + @SuppressWarnings("unchecked") + public Map transformRow(Map row, + Context ctx) { + List> fields = ctx.getAllEntityFields(); + for (Map field : fields) { + String col = field.get(DataImporter.COLUMN); + String reStr = ctx.replaceTokens(field.get(REGEX)); + String splitBy = ctx.replaceTokens(field.get(SPLIT_BY)); + String replaceWith = ctx.replaceTokens(field.get(REPLACE_WITH)); + String groupNames = ctx.replaceTokens(field.get(GROUP_NAMES)); + if (reStr != null || splitBy != null) { + String srcColName = field.get(SRC_COL_NAME); + if (srcColName == null) { + srcColName = col; + } + Object tmpVal = row.get(srcColName); + if (tmpVal == null) + continue; + + if (tmpVal instanceof List) { + List inputs = (List) tmpVal; + List results = new ArrayList(); + Map otherVars= null; + for (String input : inputs) { + Object o = process(col, reStr, splitBy, replaceWith, input, groupNames); + if (o != null){ + if (o instanceof Map) { + Map map = (Map) o; + for (Object e : map.entrySet()) { + Map.Entry entry = (Map.Entry) e; + List l = results; + if(!col.equals(entry.getKey())){ + if(otherVars == null) otherVars = new HashMap(); + l = otherVars.get(entry.getKey()); + if(l == null){ + l = new ArrayList(); + otherVars.put(entry.getKey(), l); + } + } + if (entry.getValue() instanceof Collection) { + l.addAll((Collection) entry.getValue()); + } else { + l.add(entry.getValue()); + } + } + } else { + if (o instanceof Collection) { + results.addAll((Collection) o); + } else { + results.add(o); + } + } + } + } + row.put(col, results); + if(otherVars != null) row.putAll(otherVars); + } else { + String value = tmpVal.toString(); + Object o = process(col, reStr, splitBy, replaceWith, value, groupNames); + if (o != null){ + if (o instanceof Map) { + row.putAll((Map) o); + } else{ + row.put(col, o); + } + } + } + } + } + return row; + } + + private Object process(String col, String reStr, String splitBy, + String replaceWith, String value, String groupNames) { + if (splitBy != null) { + return readBySplit(splitBy, value); + } else if (replaceWith != null) { + Pattern p = getPattern(reStr); + Matcher m = p.matcher(value); + return m.find()? m.replaceAll(replaceWith): null; + } else { + return readfromRegExp(reStr, value, col, groupNames); + } + } + + @SuppressWarnings("unchecked") + private List readBySplit(String splitBy, String value) { + String[] vals = value.split(splitBy); + List l = new ArrayList(); + l.addAll(Arrays.asList(vals)); + return l; + } + + @SuppressWarnings("unchecked") + private Object readfromRegExp(String reStr, String value, String columnName, String gNames) { + String[] groupNames = null; + if(gNames != null && gNames.trim().length() >0){ + groupNames = gNames.split(","); + } + Pattern regexp = getPattern(reStr); + Matcher m = regexp.matcher(value); + if (m.find() && m.groupCount() > 0) { + if (m.groupCount() > 1) { + List l = null; + Map map = null; + if(groupNames == null){ + l = new ArrayList(); + } else { + map = new HashMap(); + } + for (int i = 1; i <= m.groupCount(); i++) { + try { + if(l != null){ + l.add(m.group(i)); + } else if (map != null ){ + if(i <= groupNames.length){ + String nameOfGroup = groupNames[i-1]; + if(nameOfGroup != null && nameOfGroup.trim().length() >0){ + map.put(nameOfGroup, m.group(i)); + } + } + } + } catch (Exception e) { + LOG.warn("Parsing failed for field : " + columnName, e); + } + } + return l == null ? map: l; + } else { + return m.group(1); + } + } + + return null; + } + + private Pattern getPattern(String reStr) { + Pattern result = PATTERN_CACHE.get(reStr); + if (result == null) { + PATTERN_CACHE.put(reStr, result = Pattern.compile(reStr)); + } + return result; + } + + private HashMap PATTERN_CACHE = new HashMap(); + + public static final String REGEX = "regex"; + + public static final String REPLACE_WITH = "replaceWith"; + + public static final String SPLIT_BY = "splitBy"; + + public static final String SRC_COL_NAME = "sourceColName"; + + public static final String GROUP_NAMES = "groupNames"; + +} diff --git a/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/ScriptTransformer.java b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/ScriptTransformer.java new file mode 100644 index 00000000000..3cba38115ed --- /dev/null +++ b/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/ScriptTransformer.java @@ -0,0 +1,105 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import static org.apache.solr.handler.dataimport.DataImportHandlerException.wrapAndThrow; +import static org.apache.solr.handler.dataimport.DataImportHandlerException.SEVERE; + +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.util.Map; + +/** + *

    + * A Transformer instance capable of executing functions written in scripting + * languages as a Transformer instance. + *

    + *

    + *

    + * Refer to http://wiki.apache.org/solr/DataImportHandler + * for more details. + *

    + *

    + * This API is experimental and may change in the future. + * + * @version $Id$ + * @since solr 1.3 + */ +public class ScriptTransformer extends Transformer { + private Object engine; + + private Method invokeFunctionMethod; + + private String functionName; + + public Object transformRow(Map row, Context context) { + try { + if (engine == null) + initEngine(context); + if (engine == null) + return row; + return invokeFunctionMethod.invoke(engine, functionName, new Object[]{ + row, context}); + } catch (DataImportHandlerException e) { + throw e; + } catch (InvocationTargetException e) { + wrapAndThrow(SEVERE,e, + "Could not invoke method :" + + functionName + + "\n "); + } catch (Exception e) { + wrapAndThrow(SEVERE,e, "Error invoking script for entity " + context.getEntityAttribute("name")); + } + //will not reach here + return null; + } + + private void initEngine(Context context) { + try { + String scriptText = context.getScript(); + String scriptLang = context.getScriptLanguage(); + if(scriptText == null ){ + throw new DataImportHandlerException(SEVERE, + " + + +

    DataImportHandler Development Console

    +<% + String handler = request.getParameter("handler"); // must be specified +%> +
    + + + + + + + + + + + + + + + + + +
    Handler: <%=handler%> change handler
    + + + + + + + + + +
    + + Verbose Commit Clean Start Row No. of Rows 
    +
    data config xml +
    +
    +
    + + + + + + + + +
    +
    + + + + +
    +
    + + + + + + +
    + +Return to Admin Page + + diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/MockInitialContextFactory.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/MockInitialContextFactory.java new file mode 100644 index 00000000000..e4ff3fc86bc --- /dev/null +++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/MockInitialContextFactory.java @@ -0,0 +1,62 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import java.util.HashMap; +import java.util.Hashtable; +import java.util.Map; + +import javax.naming.NamingException; +import javax.naming.spi.InitialContextFactory; + +import org.easymock.EasyMock; +import org.easymock.IAnswer; +import org.easymock.IMocksControl; + +public class MockInitialContextFactory implements InitialContextFactory { + private static final Map objects = new HashMap(); + private final IMocksControl mockControl; + private final javax.naming.Context context; + + public MockInitialContextFactory() { + mockControl = EasyMock.createStrictControl(); + context = mockControl.createMock(javax.naming.Context.class); + + try { + EasyMock.expect(context.lookup((String) EasyMock.anyObject())).andAnswer( + new IAnswer() { + public Object answer() throws Throwable { + return objects.get(EasyMock.getCurrentArguments()[0]); + } + }).anyTimes(); + + } catch (NamingException e) { + throw new RuntimeException(e); + } + + mockControl.replay(); + } + + @SuppressWarnings("unchecked") + public javax.naming.Context getInitialContext(Hashtable env) { + return context; + } + + public static void bind(String name, Object obj) { + objects.put(name, obj); + } +} diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestCachedSqlEntityProcessor.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestCachedSqlEntityProcessor.java new file mode 100644 index 00000000000..633a17645bd --- /dev/null +++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestCachedSqlEntityProcessor.java @@ -0,0 +1,263 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import org.junit.Assert; +import org.junit.Test; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +/** + *

    + * Test for CachedSqlEntityProcessor + *

    + * + * @version $Id$ + * @since solr 1.3 + */ +public class TestCachedSqlEntityProcessor { + + @Test + public void withoutWhereClause() { + List fields = new ArrayList(); + fields.add(AbstractDataImportHandlerTest.createMap("column", "id")); + fields.add(AbstractDataImportHandlerTest.createMap("column", "desc")); + String q = "select * from x where id=${x.id}"; + Map entityAttrs = AbstractDataImportHandlerTest.createMap( + "query", q); + MockDataSource ds = new MockDataSource(); + VariableResolverImpl vr = new VariableResolverImpl(); + + vr.addNamespace("x", AbstractDataImportHandlerTest.createMap("id", 1)); + Context context = AbstractDataImportHandlerTest.getContext(null, vr, ds, Context.FULL_DUMP, fields, entityAttrs); + List> rows = new ArrayList>(); + rows.add(AbstractDataImportHandlerTest.createMap("id", 1, "desc", "one")); + rows.add(AbstractDataImportHandlerTest.createMap("id", 1, "desc", + "another one")); + MockDataSource.setIterator(vr.replaceTokens(q), rows.iterator()); + EntityProcessor csep = new EntityProcessorWrapper( new CachedSqlEntityProcessor(), null); + csep.init(context); + rows = new ArrayList>(); + while (true) { + Map r = csep.nextRow(); + if (r == null) + break; + rows.add(r); + } + Assert.assertEquals(2, rows.size()); + ds.close(); + csep.init(context); + rows = new ArrayList>(); + while (true) { + Map r = csep.nextRow(); + if (r == null) + break; + rows.add(r); + } + Assert.assertEquals(2, rows.size()); + Assert.assertEquals(2, rows.get(0).size()); + Assert.assertEquals(2, rows.get(1).size()); + } + + @Test + public void withoutWhereClauseWithTransformers() { + List fields = new ArrayList(); + fields.add(AbstractDataImportHandlerTest.createMap("column", "id")); + fields.add(AbstractDataImportHandlerTest.createMap("column", "desc")); + String q = "select * from x where id=${x.id}"; + Map entityAttrs = AbstractDataImportHandlerTest.createMap( + "query", q, "transformer", UppercaseTransformer.class.getName()); + MockDataSource ds = new MockDataSource(); + VariableResolverImpl vr = new VariableResolverImpl(); + + vr.addNamespace("x", AbstractDataImportHandlerTest.createMap("id", 1)); + Context context = AbstractDataImportHandlerTest.getContext(null, vr, ds, Context.FULL_DUMP, fields, entityAttrs); + List> rows = new ArrayList>(); + rows.add(AbstractDataImportHandlerTest.createMap("id", 1, "desc", "one")); + rows.add(AbstractDataImportHandlerTest.createMap("id", 1, "desc", + "another one")); + MockDataSource.setIterator(vr.replaceTokens(q), rows.iterator()); + EntityProcessor csep = new EntityProcessorWrapper( new CachedSqlEntityProcessor(), null); + csep.init(context); + rows = new ArrayList>(); + while (true) { + Map r = csep.nextRow(); + if (r == null) + break; + rows.add(r); + } + Assert.assertEquals(2, rows.size()); + ds.close(); + csep.init(context); + rows = new ArrayList>(); + while (true) { + Map r = csep.nextRow(); + if (r == null) + break; + rows.add(r); + Assert.assertEquals(r.get("desc").toString().toUpperCase(), r.get("desc")); + } + Assert.assertEquals(2, rows.size()); + Assert.assertEquals(2, rows.get(0).size()); + Assert.assertEquals(2, rows.get(1).size()); + } + + @Test + public void withoutWhereClauseWithMultiRowTransformer() { + List fields = new ArrayList(); + fields.add(AbstractDataImportHandlerTest.createMap("column", "id")); + fields.add(AbstractDataImportHandlerTest.createMap("column", "desc")); + String q = "select * from x where id=${x.id}"; + Map entityAttrs = AbstractDataImportHandlerTest.createMap( + "query", q, "transformer", DoubleTransformer.class.getName()); + MockDataSource ds = new MockDataSource(); + VariableResolverImpl vr = new VariableResolverImpl(); + + vr.addNamespace("x", AbstractDataImportHandlerTest.createMap("id", 1)); + Context context = AbstractDataImportHandlerTest.getContext(null, vr, ds, Context.FULL_DUMP, fields, entityAttrs); + List> rows = new ArrayList>(); + rows.add(AbstractDataImportHandlerTest.createMap("id", 1, "desc", "one")); + rows.add(AbstractDataImportHandlerTest.createMap("id", 1, "desc", + "another one")); + MockDataSource.setIterator(vr.replaceTokens(q), rows.iterator()); + EntityProcessor csep = new EntityProcessorWrapper( new CachedSqlEntityProcessor(), null); + csep.init(context); + rows = new ArrayList>(); + while (true) { + Map r = csep.nextRow(); + if (r == null) + break; + rows.add(r); + } + Assert.assertEquals(4, rows.size()); + ds.close(); + csep.init(context); + rows = new ArrayList>(); + while (true) { + Map r = csep.nextRow(); + if (r == null) + break; + rows.add(r); + } + Assert.assertEquals(4, rows.size()); + Assert.assertEquals(2, rows.get(0).size()); + Assert.assertEquals(2, rows.get(1).size()); + } + + public static class DoubleTransformer extends Transformer { + + public Object transformRow(Map row, Context context) { + List> rows = new ArrayList>(); + rows.add(row); + rows.add(row); + + return rows; + } + } + + public static class UppercaseTransformer extends Transformer { + + public Object transformRow(Map row, Context context) { + for (Map.Entry entry : row.entrySet()) { + Object val = entry.getValue(); + if (val instanceof String) { + String s = (String) val; + entry.setValue(s.toUpperCase()); + } + } + return row; + } + } + + @Test + public void withWhereClause() { + List fields = new ArrayList(); + fields.add(AbstractDataImportHandlerTest.createMap("column", "id")); + fields.add(AbstractDataImportHandlerTest.createMap("column", "desc")); + String q = "select * from x"; + Map entityAttrs = AbstractDataImportHandlerTest.createMap( + "query", q, EntityProcessorBase.CACHE_KEY,"id", EntityProcessorBase.CACHE_LOOKUP ,"x.id"); + MockDataSource ds = new MockDataSource(); + VariableResolverImpl vr = new VariableResolverImpl(); + Map xNamespace = AbstractDataImportHandlerTest.createMap("id", 0); + vr.addNamespace("x", xNamespace); + Context context = AbstractDataImportHandlerTest.getContext(null, vr, ds, Context.FULL_DUMP, fields, entityAttrs); + doWhereTest(q, context, ds, xNamespace); + } + + @Test + public void withKeyAndLookup() { + List fields = new ArrayList(); + fields.add(AbstractDataImportHandlerTest.createMap("column", "id")); + fields.add(AbstractDataImportHandlerTest.createMap("column", "desc")); + String q = "select * from x"; + Map entityAttrs = AbstractDataImportHandlerTest.createMap("query", q, "where", "id=x.id"); + MockDataSource ds = new MockDataSource(); + VariableResolverImpl vr = new VariableResolverImpl(); + Map xNamespace = AbstractDataImportHandlerTest.createMap("id", 0); + vr.addNamespace("x", xNamespace); + Context context = AbstractDataImportHandlerTest.getContext(null, vr, ds, Context.FULL_DUMP, fields, entityAttrs); + doWhereTest(q, context, ds, xNamespace); + } + + private void doWhereTest(String q, Context context, MockDataSource ds, Map xNamespace) { + List> rows = new ArrayList>(); + rows.add(AbstractDataImportHandlerTest.createMap("id", 1, "desc", "one")); + rows.add(AbstractDataImportHandlerTest.createMap("id", 2, "desc", "two")); + rows.add(AbstractDataImportHandlerTest.createMap("id", 2, "desc", + "another two")); + rows.add(AbstractDataImportHandlerTest.createMap("id", 3, "desc", "three")); + rows.add(AbstractDataImportHandlerTest.createMap("id", 3, "desc", "another three")); + rows.add(AbstractDataImportHandlerTest.createMap("id", 3, "desc", "another another three")); + MockDataSource.setIterator(q, rows.iterator()); + EntityProcessor csep = new EntityProcessorWrapper(new CachedSqlEntityProcessor(), null); + csep.init(context); + rows = new ArrayList>(); + while (true) { + Map r = csep.nextRow(); + if (r == null) + break; + rows.add(r); + } + Assert.assertEquals(0, rows.size()); + ds.close(); + + csep.init(context); + rows = new ArrayList>(); + xNamespace.put("id", 2); + while (true) { + Map r = csep.nextRow(); + if (r == null) + break; + rows.add(r); + } + Assert.assertEquals(2, rows.size()); + + csep.init(context); + rows = new ArrayList>(); + xNamespace.put("id", 3); + while (true) { + Map r = csep.nextRow(); + if (r == null) + break; + rows.add(r); + } + Assert.assertEquals(3, rows.size()); + } +} diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestClobTransformer.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestClobTransformer.java new file mode 100644 index 00000000000..a3820926646 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestClobTransformer.java @@ -0,0 +1,62 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import junit.framework.Assert; +import org.junit.Test; + +import java.io.StringReader; +import java.lang.reflect.InvocationHandler; +import java.lang.reflect.Method; +import java.lang.reflect.Proxy; +import java.sql.Clob; +import java.util.*; + +/** + * Test for ClobTransformer + * + * @version $Id$ + * @see org.apache.solr.handler.dataimport.ClobTransformer + * @since solr 1.4 + */ +public class TestClobTransformer { + @Test + public void simple() throws Exception { + List> flds = new ArrayList>(); + Map f = new HashMap(); + // + f.put(DataImporter.COLUMN, "dsc"); + f.put(ClobTransformer.CLOB, "true"); + f.put(DataImporter.NAME, "description"); + flds.add(f); + Context ctx = AbstractDataImportHandlerTest.getContext(null, new VariableResolverImpl(), null, Context.FULL_DUMP, flds, Collections.EMPTY_MAP); + Transformer t = new ClobTransformer(); + Map row = new HashMap(); + Clob clob = (Clob) Proxy.newProxyInstance(this.getClass().getClassLoader(), new Class[]{Clob.class}, new InvocationHandler() { + public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { + if (method.getName().equals("getCharacterStream")) { + return new StringReader("hello!"); + } + return null; + } + }); + + row.put("dsc", clob); + t.transformRow(row, ctx); + Assert.assertEquals("hello!", row.get("dsc")); + } +} diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestContentStreamDataSource.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestContentStreamDataSource.java new file mode 100644 index 00000000000..32868bbb8f8 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestContentStreamDataSource.java @@ -0,0 +1,155 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import junit.framework.TestCase; +import org.apache.commons.io.FileUtils; +import org.apache.solr.client.solrj.embedded.JettySolrRunner; +import org.apache.solr.client.solrj.impl.CommonsHttpSolrServer; +import org.apache.solr.client.solrj.request.DirectXmlRequest; +import org.apache.solr.client.solrj.response.QueryResponse; +import org.apache.solr.common.SolrDocument; +import org.apache.solr.common.SolrDocumentList; +import org.apache.solr.common.params.ModifiableSolrParams; +import org.apache.solr.util.AbstractSolrTestCase; + +import java.io.File; +import java.util.List; + +/** + * Test for ContentStreamDataSource + * + * @version $Id$ + * @since solr 1.4 + */ +public class TestContentStreamDataSource extends TestCase { + private static final String CONF_DIR = "." + File.separator + "solr" + File.separator + "conf" + File.separator; + SolrInstance instance = null; + JettySolrRunner jetty; + + + public void setUp() throws Exception { + instance = new SolrInstance("inst", null); + instance.setUp(); + jetty = createJetty(instance); + + } + + public void testSimple() throws Exception { + DirectXmlRequest req = new DirectXmlRequest("/dataimport", xml); + ModifiableSolrParams params = new ModifiableSolrParams(); + params.set("command", "full-import"); + params.set("clean", "false"); + req.setParams(params); + String url = "http://localhost:" + jetty.getLocalPort() + "/solr"; + CommonsHttpSolrServer solrServer = new CommonsHttpSolrServer(url); + solrServer.request(req); + ModifiableSolrParams qparams = new ModifiableSolrParams(); + qparams.add("q", "*:*"); + QueryResponse qres = solrServer.query(qparams); + SolrDocumentList results = qres.getResults(); + assertEquals(2, results.getNumFound()); + SolrDocument doc = results.get(0); + assertEquals("1", doc.getFieldValue("id")); + assertEquals("Hello C1", ((List)doc.getFieldValue("desc")).get(0)); + } + + private class SolrInstance extends AbstractSolrTestCase { + String name; + Integer port; + File homeDir; + File confDir; + + /** + * if masterPort is null, this instance is a master -- otherwise this instance is a slave, and assumes the master is + * on localhost at the specified port. + */ + public SolrInstance(String name, Integer port) { + this.name = name; + this.port = port; + } + + public String getHomeDir() { + return homeDir.toString(); + } + + @Override + public String getSchemaFile() { + return CONF_DIR + "dataimport-schema.xml"; + } + + public String getConfDir() { + return confDir.toString(); + } + + public String getDataDir() { + return dataDir.toString(); + } + + @Override + public String getSolrConfigFile() { + return CONF_DIR + "contentstream-solrconfig.xml"; + } + + public void setUp() throws Exception { + + String home = System.getProperty("java.io.tmpdir") + + File.separator + + getClass().getName() + "-" + System.currentTimeMillis(); + + + homeDir = new File(home + "inst"); + dataDir = new File(homeDir, "data"); + confDir = new File(homeDir, "conf"); + + homeDir.mkdirs(); + dataDir.mkdirs(); + confDir.mkdirs(); + + File f = new File(confDir, "solrconfig.xml"); + FileUtils.copyFile(new File(getSolrConfigFile()), f); + f = new File(confDir, "schema.xml"); + + FileUtils.copyFile(new File(getSchemaFile()), f); + f = new File(confDir, "data-config.xml"); + FileUtils.copyFile(new File(CONF_DIR + "dataconfig-contentstream.xml"), f); + } + + public void tearDown() throws Exception { + super.tearDown(); + AbstractSolrTestCase.recurseDelete(homeDir); + } + } + + private JettySolrRunner createJetty(SolrInstance instance) throws Exception { + System.setProperty("solr.solr.home", instance.getHomeDir()); + System.setProperty("solr.data.dir", instance.getDataDir()); + JettySolrRunner jetty = new JettySolrRunner("/solr", 0); + jetty.start(); + return jetty; + } + + static String xml = "\n" + + "\n" + + " 1\n" + + " Hello C1\n" + + "\n" + + "\n" + + " 2\n" + + " Hello C2\n" + + "\n" + ""; +} diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestDataConfig.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestDataConfig.java new file mode 100644 index 00000000000..f69c8096297 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestDataConfig.java @@ -0,0 +1,91 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import junit.framework.Assert; +import org.junit.Test; +import org.w3c.dom.Document; + +import javax.xml.parsers.DocumentBuilderFactory; +import java.io.ByteArrayInputStream; +import java.util.ArrayList; +import java.util.List; + +/** + *

    + * Test for DataConfig + *

    + * + * @version $Id$ + * @since solr 1.3 + */ +public class TestDataConfig extends AbstractDataImportHandlerTest { + + @Override + public void setUp() throws Exception { + super.setUp(); + } + + @Override + public void tearDown() throws Exception { + super.tearDown(); + } + + @Override + public String getSchemaFile() { + return "dataimport-schema.xml"; + } + + @Override + public String getSolrConfigFile() { + return "dataimport-nodatasource-solrconfig.xml"; + } + + @Test + @SuppressWarnings("unchecked") + public void testDataConfigWithDataSource() throws Exception { + List rows = new ArrayList(); + rows.add(createMap("id", "1", "desc", "one")); + MockDataSource.setIterator("select * from x", rows.iterator()); + + super.runFullImport(loadDataConfig("data-config-with-datasource.xml")); + + assertQ(req("id:1"), "//*[@numFound='1']"); + } + + @Test + public void basic() throws Exception { + javax.xml.parsers.DocumentBuilder builder = DocumentBuilderFactory + .newInstance().newDocumentBuilder(); + Document doc = builder.parse(new ByteArrayInputStream(xml.getBytes())); + + DataConfig dc = new DataConfig(); + dc.readFromXml(doc.getDocumentElement()); + Assert.assertEquals("atrimlisting", dc.document.entities.get(0).name); + } + + private static final String xml = "\n" + + "\t\n" + + "\t\t '${indexer.last_index_time}'\">\n" + + + + "\t\t\n" + + + "\t\n" + ""; +} diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestDateFormatTransformer.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestDateFormatTransformer.java new file mode 100644 index 00000000000..7ebd2efd343 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestDateFormatTransformer.java @@ -0,0 +1,92 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import org.junit.Assert; +import org.junit.Test; + +import java.text.SimpleDateFormat; +import java.util.*; + +/** + *

    + * Test for DateFormatTransformer + *

    + * + * @version $Id$ + * @since solr 1.3 + */ +public class TestDateFormatTransformer { + + @Test + @SuppressWarnings("unchecked") + public void testTransformRow_SingleRow() throws Exception { + List fields = new ArrayList(); + fields.add(AbstractDataImportHandlerTest.createMap(DataImporter.COLUMN, + "lastModified")); + fields.add(AbstractDataImportHandlerTest.createMap(DataImporter.COLUMN, + "dateAdded", RegexTransformer.SRC_COL_NAME, "lastModified", + DateFormatTransformer.DATE_TIME_FMT, "MM/dd/yyyy")); + + SimpleDateFormat format = new SimpleDateFormat("MM/dd/yyyy"); + Date now = format.parse(format.format(new Date())); + + Map row = AbstractDataImportHandlerTest.createMap("lastModified", format + .format(now)); + + VariableResolverImpl resolver = new VariableResolverImpl(); + resolver.addNamespace("e", row); + + Context context = AbstractDataImportHandlerTest.getContext(null, resolver, + null, Context.FULL_DUMP, fields, null); + new DateFormatTransformer().transformRow(row, context); + Assert.assertEquals(now, row.get("dateAdded")); + } + + @Test + @SuppressWarnings("unchecked") + public void testTransformRow_MultipleRows() throws Exception { + List fields = new ArrayList(); + fields.add(AbstractDataImportHandlerTest.createMap(DataImporter.COLUMN, + "lastModified")); + fields.add(AbstractDataImportHandlerTest.createMap(DataImporter.COLUMN, + "dateAdded", RegexTransformer.SRC_COL_NAME, "lastModified", + DateFormatTransformer.DATE_TIME_FMT, "MM/dd/yyyy hh:mm:ss.SSS")); + + SimpleDateFormat format = new SimpleDateFormat("MM/dd/yyyy hh:mm:ss.SSS"); + Date now1 = format.parse(format.format(new Date())); + Date now2 = format.parse(format.format(new Date())); + + Map row = new HashMap(); + List list = new ArrayList(); + list.add(format.format(now1)); + list.add(format.format(now2)); + row.put("lastModified", list); + + VariableResolverImpl resolver = new VariableResolverImpl(); + resolver.addNamespace("e", row); + + Context context = AbstractDataImportHandlerTest.getContext(null, resolver, + null, Context.FULL_DUMP, fields, null); + new DateFormatTransformer().transformRow(row, context); + List output = new ArrayList(); + output.add(now1); + output.add(now2); + Assert.assertEquals(output, row.get("dateAdded")); + } + +} diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestDocBuilder.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestDocBuilder.java new file mode 100644 index 00000000000..a4809fbcc7f --- /dev/null +++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestDocBuilder.java @@ -0,0 +1,259 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import org.apache.solr.common.SolrInputDocument; +import static org.apache.solr.handler.dataimport.AbstractDataImportHandlerTest.createMap; +import org.junit.Assert; +import org.junit.Test; + +import java.util.*; + +/** + *

    + * Test for DocBuilder + *

    + * + * @version $Id$ + * @since solr 1.3 + */ +public class TestDocBuilder { + + @Test + public void loadClass() throws Exception { + Class clz = DocBuilder.loadClass("RegexTransformer", null); + Assert.assertNotNull(clz); + } + + @Test + public void singleEntityNoRows() { + try { + DataImporter di = new DataImporter(); + di.loadAndInit(dc_singleEntity); + DataConfig cfg = di.getConfig(); + DataConfig.Entity ent = cfg.document.entities.get(0); + MockDataSource.setIterator("select * from x", new ArrayList().iterator()); + ent.dataSrc = new MockDataSource(); + ent.isDocRoot = true; + DataImporter.RequestParams rp = new DataImporter.RequestParams(); + rp.command = "full-import"; + SolrWriterImpl swi = new SolrWriterImpl(); + di.runCmd(rp, swi); + Assert.assertEquals(Boolean.TRUE, swi.deleteAllCalled); + Assert.assertEquals(Boolean.TRUE, swi.commitCalled); + Assert.assertEquals(0, swi.docs.size()); + Assert.assertEquals(1, di.getDocBuilder().importStatistics.queryCount + .get()); + Assert + .assertEquals(0, di.getDocBuilder().importStatistics.docCount.get()); + Assert.assertEquals(0, di.getDocBuilder().importStatistics.rowsCount + .get()); + } finally { + MockDataSource.clearCache(); + } + } + + @Test + public void testDeltaImportNoRows_MustNotCommit() { + try { + DataImporter di = new DataImporter(); + di.loadAndInit(dc_deltaConfig); + DataConfig cfg = di.getConfig(); + DataConfig.Entity ent = cfg.document.entities.get(0); + MockDataSource.setIterator("select * from x", new ArrayList().iterator()); + MockDataSource.setIterator("select id from x", new ArrayList().iterator()); + ent.dataSrc = new MockDataSource(); + ent.isDocRoot = true; + DataImporter.RequestParams rp = new DataImporter.RequestParams(createMap("command", "delta-import")); + SolrWriterImpl swi = new SolrWriterImpl(); + di.runCmd(rp, swi); + Assert.assertEquals(Boolean.FALSE, swi.deleteAllCalled); + Assert.assertEquals(Boolean.FALSE, swi.commitCalled); + Assert.assertEquals(0, swi.docs.size()); + Assert.assertEquals(1, di.getDocBuilder().importStatistics.queryCount.get()); + Assert.assertEquals(0, di.getDocBuilder().importStatistics.docCount.get()); + Assert.assertEquals(0, di.getDocBuilder().importStatistics.rowsCount.get()); + } finally { + MockDataSource.clearCache(); + } + } + + @Test + public void singleEntityOneRow() { + try { + DataImporter di = new DataImporter(); + di.loadAndInit(dc_singleEntity); + DataConfig cfg = di.getConfig(); + DataConfig.Entity ent = cfg.document.entities.get(0); + List l = new ArrayList(); + l.add(createMap("id", 1, "desc", "one")); + MockDataSource.setIterator("select * from x", l.iterator()); + ent.dataSrc = new MockDataSource(); + ent.isDocRoot = true; + DataImporter.RequestParams rp = new DataImporter.RequestParams(); + rp.command = "full-import"; + SolrWriterImpl swi = new SolrWriterImpl(); + di.runCmd(rp, swi); + Assert.assertEquals(Boolean.TRUE, swi.deleteAllCalled); + Assert.assertEquals(Boolean.TRUE, swi.commitCalled); + Assert.assertEquals(1, swi.docs.size()); + Assert.assertEquals(1, di.getDocBuilder().importStatistics.queryCount + .get()); + Assert + .assertEquals(1, di.getDocBuilder().importStatistics.docCount.get()); + Assert.assertEquals(1, di.getDocBuilder().importStatistics.rowsCount + .get()); + + for (int i = 0; i < l.size(); i++) { + Map map = (Map) l.get(i); + SolrInputDocument doc = swi.docs.get(i); + for (Map.Entry entry : map.entrySet()) { + Assert.assertEquals(entry.getValue(), doc.getFieldValue(entry + .getKey())); + } + } + } finally { + MockDataSource.clearCache(); + } + } + + @Test + public void testImportCommand() { + try { + DataImporter di = new DataImporter(); + di.loadAndInit(dc_singleEntity); + DataConfig cfg = di.getConfig(); + DataConfig.Entity ent = cfg.document.entities.get(0); + List l = new ArrayList(); + l.add(createMap("id", 1, "desc", "one")); + MockDataSource.setIterator("select * from x", l.iterator()); + ent.dataSrc = new MockDataSource(); + ent.isDocRoot = true; + DataImporter.RequestParams rp = new DataImporter.RequestParams(createMap("command", "import")); + SolrWriterImpl swi = new SolrWriterImpl(); + di.runCmd(rp, swi); + Assert.assertEquals(Boolean.FALSE, swi.deleteAllCalled); + Assert.assertEquals(Boolean.TRUE, swi.commitCalled); + Assert.assertEquals(1, swi.docs.size()); + Assert.assertEquals(1, di.getDocBuilder().importStatistics.queryCount + .get()); + Assert + .assertEquals(1, di.getDocBuilder().importStatistics.docCount.get()); + Assert.assertEquals(1, di.getDocBuilder().importStatistics.rowsCount + .get()); + + for (int i = 0; i < l.size(); i++) { + Map map = (Map) l.get(i); + SolrInputDocument doc = swi.docs.get(i); + for (Map.Entry entry : map.entrySet()) { + Assert.assertEquals(entry.getValue(), doc.getFieldValue(entry + .getKey())); + } + } + } finally { + MockDataSource.clearCache(); + } + } + + @Test + public void singleEntityMultipleRows() { + try { + DataImporter di = new DataImporter(); + di.loadAndInit(dc_singleEntity); + DataConfig cfg = di.getConfig(); + DataConfig.Entity ent = cfg.document.entities.get(0); + ent.isDocRoot = true; + DataImporter.RequestParams rp = new DataImporter.RequestParams(); + rp.command = "full-import"; + List l = new ArrayList(); + l.add(createMap("id", 1, "desc", "one")); + l.add(createMap("id", 2, "desc", "two")); + l.add(createMap("id", 3, "desc", "three")); + + MockDataSource.setIterator("select * from x", l.iterator()); + ent.dataSrc = new MockDataSource(); + SolrWriterImpl swi = new SolrWriterImpl(); + di.runCmd(rp, swi); + Assert.assertEquals(Boolean.TRUE, swi.deleteAllCalled); + Assert.assertEquals(Boolean.TRUE, swi.commitCalled); + Assert.assertEquals(3, swi.docs.size()); + for (int i = 0; i < l.size(); i++) { + Map map = (Map) l.get(i); + SolrInputDocument doc = swi.docs.get(i); + for (Map.Entry entry : map.entrySet()) { + Assert.assertEquals(entry.getValue(), doc.getFieldValue(entry.getKey())); + } + Assert.assertEquals(map.get("desc"), doc.getFieldValue("desc_s")); + } + Assert.assertEquals(1, di.getDocBuilder().importStatistics.queryCount + .get()); + Assert + .assertEquals(3, di.getDocBuilder().importStatistics.docCount.get()); + Assert.assertEquals(3, di.getDocBuilder().importStatistics.rowsCount + .get()); + } finally { + MockDataSource.clearCache(); + } + } + + static class SolrWriterImpl extends SolrWriter { + List docs = new ArrayList(); + + Boolean deleteAllCalled = Boolean.FALSE; + + Boolean commitCalled = Boolean.FALSE; + + public SolrWriterImpl() { + super(null, "."); + } + + public boolean upload(SolrInputDocument doc) { + return docs.add(doc); + } + + public void log(int event, String name, Object row) { + // Do nothing + } + + public void doDeleteAll() { + deleteAllCalled = Boolean.TRUE; + } + + public void commit(boolean b) { + commitCalled = Boolean.TRUE; + } + } + + public static final String dc_singleEntity = "\n" + + "\n" + + " \n" + + " \n" + + " \n" + + " \n" + + " " + " \n" + + " \n" + ""; + + public static final String dc_deltaConfig = "\n" + + "\n" + + " \n" + + " \n" + + " \n" + + " \n" + + " " + " \n" + + " \n" + ""; + +} diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestDocBuilder2.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestDocBuilder2.java new file mode 100644 index 00000000000..f769dfb0a42 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestDocBuilder2.java @@ -0,0 +1,361 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.junit.Assert; +import org.apache.solr.request.LocalSolrQueryRequest; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Date; +import java.io.File; + +/** + *

    + * Test for DocBuilder using the test harness + *

    + * + * @version $Id$ + * @since solr 1.3 + */ +public class TestDocBuilder2 extends AbstractDataImportHandlerTest { + + @Before + public void setUp() throws Exception { + super.setUp(); + } + + @After + public void tearDown() throws Exception { + super.tearDown(); + } + + @Override + public String getSchemaFile() { + return "dataimport-schema.xml"; + } + + @Override + public String getSolrConfigFile() { + return "dataimport-solrconfig.xml"; + } + + @Test + @SuppressWarnings("unchecked") + public void testSingleEntity() throws Exception { + List rows = new ArrayList(); + rows.add(createMap("id", "1", "desc", "one")); + MockDataSource.setIterator("select * from x", rows.iterator()); + + super.runFullImport(loadDataConfig("single-entity-data-config.xml")); + + assertQ(req("id:1"), "//*[@numFound='1']"); + } + + @Test + @SuppressWarnings("unchecked") + public void testSingleEntity_CaseInsensitive() throws Exception { + List rows = new ArrayList(); + rows.add(createMap("id", "1", "desC", "one")); + MockDataSource.setIterator("select * from x", rows.iterator()); + + super.runFullImport(dataConfigWithCaseInsensitiveFields); + + assertQ(req("id:1"), "//*[@numFound='1']"); + assertTrue("Start event listener was not called", StartEventListener.executed); + assertTrue("End event listener was not called", EndEventListener.executed); + } + + @Test + @SuppressWarnings("unchecked") + public void testDynamicFields() throws Exception { + List rows = new ArrayList(); + rows.add(createMap("id", "1", "desc", "one")); + MockDataSource.setIterator("select * from x", rows.iterator()); + + super.runFullImport(dataConfigWithDynamicTransformer); + + assertQ(req("id:1"), "//*[@numFound='1']"); + assertQ(req("dynamic_s:test"), "//*[@numFound='1']"); + } + + @Test + @SuppressWarnings("unchecked") + public void testRequestParamsAsVariable() throws Exception { + List rows = new ArrayList(); + rows.add(createMap("id", "101", "desc", "ApacheSolr")); + MockDataSource.setIterator("select * from books where category='search'", rows.iterator()); + + LocalSolrQueryRequest request = lrf.makeRequest("command", "full-import", + "debug", "on", "clean", "true", "commit", "true", + "category", "search", + "dataConfig", requestParamAsVariable); + h.query("/dataimport", request); + assertQ(req("desc:ApacheSolr"), "//*[@numFound='1']"); + } + + @Test + @SuppressWarnings("unchecked") + public void testRequestParamsAsFieldName() throws Exception { + List rows = new ArrayList(); + rows.add(createMap("mypk", "101", "text", "ApacheSolr")); + MockDataSource.setIterator("select * from x", rows.iterator()); + + LocalSolrQueryRequest request = lrf.makeRequest("command", "full-import", + "debug", "on", "clean", "true", "commit", "true", + "mypk", "id", "text", "desc", + "dataConfig", dataConfigWithTemplatizedFieldNames); + h.query("/dataimport", request); + assertQ(req("id:101"), "//*[@numFound='1']"); + } + + @Test + @SuppressWarnings("unchecked") + public void testContext() throws Exception { + List rows = new ArrayList(); + rows.add(createMap("id", "1", "desc", "one")); + MockDataSource.setIterator("select * from x", rows.iterator()); + + super.runFullImport(loadDataConfig("data-config-with-transformer.xml")); + } + + @Test + @SuppressWarnings("unchecked") + public void testSkipDoc() throws Exception { + List rows = new ArrayList(); + rows.add(createMap("id", "1", "desc", "one")); + rows.add(createMap("id", "2", "desc", "two", "$skipDoc", "true")); + MockDataSource.setIterator("select * from x", rows.iterator()); + + super.runFullImport(dataConfigWithDynamicTransformer); + + assertQ(req("id:1"), "//*[@numFound='1']"); + assertQ(req("id:2"), "//*[@numFound='0']"); + } + + @Test + @SuppressWarnings("unchecked") + public void testSkipRow() throws Exception { + List rows = new ArrayList(); + rows.add(createMap("id", "1", "desc", "one")); + rows.add(createMap("id", "2", "desc", "two", "$skipRow", "true")); + MockDataSource.setIterator("select * from x", rows.iterator()); + + super.runFullImport(dataConfigWithDynamicTransformer); + + assertQ(req("id:1"), "//*[@numFound='1']"); + assertQ(req("id:2"), "//*[@numFound='0']"); + + MockDataSource.clearCache(); + + rows = new ArrayList(); + rows.add(createMap("id", "3", "desc", "one")); + rows.add(createMap("id", "4", "desc", "two")); + MockDataSource.setIterator("select * from x", rows.iterator()); + + rows = new ArrayList(); + rows.add(createMap("name_s", "abcd")); + MockDataSource.setIterator("3", rows.iterator()); + + rows = new ArrayList(); + rows.add(createMap("name_s", "xyz", "$skipRow", "true")); + MockDataSource.setIterator("4", rows.iterator()); + + super.runFullImport(dataConfigWithTwoEntities); + assertQ(req("id:3"), "//*[@numFound='1']"); + assertQ(req("id:4"), "//*[@numFound='1']"); + assertQ(req("name_s:abcd"), "//*[@numFound='1']"); + assertQ(req("name_s:xyz"), "//*[@numFound='0']"); + } + + @Test + @SuppressWarnings("unchecked") + public void testStopTransform() throws Exception { + List rows = new ArrayList(); + rows.add(createMap("id", "1", "desc", "one")); + rows.add(createMap("id", "2", "desc", "two", "$stopTransform", "true")); + MockDataSource.setIterator("select * from x", rows.iterator()); + + super.runFullImport(dataConfigForSkipTransform); + + assertQ(req("id:1"), "//*[@numFound='1']"); + assertQ(req("id:2"), "//*[@numFound='1']"); + assertQ(req("name_s:xyz"), "//*[@numFound='1']"); + } + + @Test + @SuppressWarnings("unchecked") + public void testDeleteDocs() throws Exception { + List rows = new ArrayList(); + rows.add(createMap("id", "1", "desc", "one")); + rows.add(createMap("id", "2", "desc", "two")); + rows.add(createMap("id", "3", "desc", "two", "$deleteDocById", "2")); + MockDataSource.setIterator("select * from x", rows.iterator()); + + super.runFullImport(dataConfigForSkipTransform); + + assertQ(req("id:1"), "//*[@numFound='1']"); + assertQ(req("id:2"), "//*[@numFound='0']"); + assertQ(req("id:3"), "//*[@numFound='1']"); + + MockDataSource.clearCache(); + rows = new ArrayList(); + rows.add(createMap("id", "1", "desc", "one")); + rows.add(createMap("id", "2", "desc", "one")); + rows.add(createMap("id", "3", "desc", "two", "$deleteDocByQuery", "desc:one")); + MockDataSource.setIterator("select * from x", rows.iterator()); + + super.runFullImport(dataConfigForSkipTransform); + + assertQ(req("id:1"), "//*[@numFound='0']"); + assertQ(req("id:2"), "//*[@numFound='0']"); + assertQ(req("id:3"), "//*[@numFound='1']"); + } + + @Test + public void testFileListEntityProcessor_lastIndexTime() throws Exception { + long time = System.currentTimeMillis(); + File tmpdir = new File("." + time); + tmpdir.mkdir(); + tmpdir.deleteOnExit(); + + Map params = createMap("baseDir", tmpdir.getAbsolutePath()); + + TestFileListEntityProcessor.createFile(tmpdir, "a.xml", "a.xml".getBytes(), true); + TestFileListEntityProcessor.createFile(tmpdir, "b.xml", "b.xml".getBytes(), true); + TestFileListEntityProcessor.createFile(tmpdir, "c.props", "c.props".getBytes(), true); + super.runFullImport(dataConfigFileList, params); + assertQ(req("*:*"), "//*[@numFound='3']"); + + // Add a new file after a full index is done + TestFileListEntityProcessor.createFile(tmpdir, "t.xml", "t.xml".getBytes(), false); + super.runFullImport(dataConfigFileList, params); + // we should find only 1 because by default clean=true is passed + // and this particular import should find only one file t.xml + assertQ(req("*:*"), "//*[@numFound='1']"); + } + + public static class MockTransformer extends Transformer { + public Object transformRow(Map row, Context context) { + Assert.assertTrue("Context gave incorrect data source", context.getDataSource("mockDs") instanceof MockDataSource2); + return row; + } + } + + public static class AddDynamicFieldTransformer extends Transformer { + public Object transformRow(Map row, Context context) { + // Add a dynamic field + row.put("dynamic_s", "test"); + return row; + } + } + + public static class MockDataSource2 extends MockDataSource { + + } + + public static class StartEventListener implements EventListener { + public static boolean executed = false; + + public void onEvent(Context ctx) { + executed = true; + } + } + + public static class EndEventListener implements EventListener { + public static boolean executed = false; + + public void onEvent(Context ctx) { + executed = true; + } + } + + private final String requestParamAsVariable = "\n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + ""; + + private final String dataConfigWithDynamicTransformer = " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + ""; + + private final String dataConfigForSkipTransform = " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + ""; + + private final String dataConfigWithTwoEntities = "\n" + + " \n" + + " " + + " \n" + + " \n" + + " " + + " " + + " " + + " \n" + + " \n" + + ""; + + private final String dataConfigWithCaseInsensitiveFields = " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + ""; + + private final String dataConfigWithTemplatizedFieldNames = "\n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + ""; + + private final String dataConfigFileList = "\n" + + "\t\n" + + "\t\t\n" + + "\t\t\t\n" + + "\t\t\n" + + "\t\n" + + ""; +} diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestEntityProcessorBase.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestEntityProcessorBase.java new file mode 100644 index 00000000000..811979d2eca --- /dev/null +++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestEntityProcessorBase.java @@ -0,0 +1,83 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import org.junit.Assert; +import org.junit.Test; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + *

    + * Test for EntityProcessorBase + *

    + * + * @version $Id$ + * @since solr 1.3 + */ +public class TestEntityProcessorBase { + + @Test + public void multiTransformer() { + List> fields = new ArrayList>(); + Map entity = new HashMap(); + entity.put("transformer", T1.class.getName() + "," + T2.class.getName() + + "," + T3.class.getName()); + fields.add(TestRegexTransformer.getField("A", null, null, null, null)); + fields.add(TestRegexTransformer.getField("B", null, null, null, null)); + + Context context = AbstractDataImportHandlerTest.getContext(null, null, new MockDataSource(), Context.FULL_DUMP, + fields, entity); + Map src = new HashMap(); + src.put("A", "NA"); + src.put("B", "NA"); + EntityProcessorWrapper sep = new EntityProcessorWrapper(new SqlEntityProcessor(), null); + sep.init(context); + Map res = sep.applyTransformer(src); + Assert.assertNotNull(res.get("T1")); + Assert.assertNotNull(res.get("T2")); + Assert.assertNotNull(res.get("T3")); + } + + static class T1 extends Transformer { + + public Object transformRow(Map aRow, Context context) { + aRow.put("T1", "T1 called"); + return aRow; + + } + } + + static class T2 extends Transformer { + + public Object transformRow(Map aRow, Context context) { + aRow.put("T2", "T2 called"); + return aRow; + } + } + + static class T3 { + + public Object transformRow(Map aRow) { + aRow.put("T3", "T3 called"); + return aRow; + } + } +} diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestErrorHandling.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestErrorHandling.java new file mode 100644 index 00000000000..111ae297920 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestErrorHandling.java @@ -0,0 +1,176 @@ +package org.apache.solr.handler.dataimport; +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import java.io.Reader; +import java.io.StringReader; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Properties; + +/** + * Tests exception handling during imports in DataImportHandler + * + * @version $Id$ + * @since solr 1.4 + */ +public class TestErrorHandling extends AbstractDataImportHandlerTest { + + public void testMalformedStreamingXml() throws Exception { + StringDataSource.xml = malformedXml; + super.runFullImport(dataConfigWithStreaming); + assertQ(req("id:1"), "//*[@numFound='1']"); + assertQ(req("id:2"), "//*[@numFound='1']"); + } + + public void testMalformedNonStreamingXml() throws Exception { + StringDataSource.xml = malformedXml; + super.runFullImport(dataConfigWithoutStreaming); + assertQ(req("id:1"), "//*[@numFound='1']"); + assertQ(req("id:2"), "//*[@numFound='1']"); + } + + public void testAbortOnError() throws Exception { + StringDataSource.xml = malformedXml; + super.runFullImport(dataConfigAbortOnError); + assertQ(req("*:*"), "//*[@numFound='0']"); + } + + public void testTransformerErrorContinue() throws Exception { + StringDataSource.xml = wellformedXml; + List> rows = new ArrayList>(); + rows.add(createMap("id", "3", "desc", "exception-transformer")); + MockDataSource.setIterator("select * from foo", rows.iterator()); + super.runFullImport(dataConfigWithTransformer); + assertQ(req("*:*"), "//*[@numFound='3']"); + } + + @Override + public String getSchemaFile() { + return "dataimport-schema.xml"; + } + + @Override + public String getSolrConfigFile() { + return "dataimport-solrconfig.xml"; + } + + @Override + public void setUp() throws Exception { + super.setUp(); + } + + @Override + public void tearDown() throws Exception { + super.tearDown(); + } + + public static class StringDataSource extends DataSource { + public static String xml = ""; + + public void init(Context context, Properties initProps) { + } + + public Reader getData(String query) { + return new StringReader(xml); + } + + public void close() { + + } + } + + public static class ExceptionTransformer extends Transformer { + public Object transformRow(Map row, Context context) { + throw new RuntimeException("Test exception"); + } + } + + private String dataConfigWithStreaming = "\n" + + " " + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + ""; + + private String dataConfigWithoutStreaming = "\n" + + " " + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + ""; + + private String dataConfigAbortOnError = "\n" + + " " + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + ""; + + private String dataConfigWithTransformer = "\n" + + " " + + "" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " " + + " \n" + + " \n" + + ""; + + private String malformedXml = "\n" + + " \n" + + " 1\n" + + " test1\n" + + " \n" + + " \n" + + " 2\n" + + " test2\n" + + " \n" + + " \n" + + " 3\n" + + " test3\n" + + " \n" + + ""; + + private String wellformedXml = "\n" + + " \n" + + " 1\n" + + " test1\n" + + " \n" + + " \n" + + " 2\n" + + " test2\n" + + " \n" + + " \n" + + " 3\n" + + " test3\n" + + " \n" + + ""; +} diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestEvaluatorBag.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestEvaluatorBag.java new file mode 100644 index 00000000000..98006c327b1 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestEvaluatorBag.java @@ -0,0 +1,158 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import static org.junit.Assert.assertEquals; +import org.junit.Before; +import org.junit.Ignore; +import org.junit.Test; + +import java.net.URLEncoder; +import java.text.SimpleDateFormat; +import java.util.*; + +import junit.framework.Assert; + +/** + *

    Test for EvaluatorBag

    + * + * @version $Id$ + * @since solr 1.3 + */ +public class TestEvaluatorBag { + private static final String ENCODING = "UTF-8"; + + VariableResolverImpl resolver; + + Map sqlTests; + + Map urlTests; + + @Before + public void setUp() throws Exception { + resolver = new VariableResolverImpl(); + + sqlTests = new HashMap(); + + sqlTests.put("foo\"", "foo\"\""); + sqlTests.put("foo\\", "foo\\\\"); + sqlTests.put("foo'", "foo''"); + sqlTests.put("foo''", "foo''''"); + sqlTests.put("'foo\"", "''foo\"\""); + sqlTests.put("\"Albert D'souza\"", "\"\"Albert D''souza\"\""); + + urlTests = new HashMap(); + + urlTests.put("*:*", URLEncoder.encode("*:*", ENCODING)); + urlTests.put("price:[* TO 200]", URLEncoder.encode("price:[* TO 200]", + ENCODING)); + urlTests.put("review:\"hybrid sedan\"", URLEncoder.encode( + "review:\"hybrid sedan\"", ENCODING)); + } + + /** + * Test method for {@link EvaluatorBag#getSqlEscapingEvaluator()}. + */ + @Test + public void testGetSqlEscapingEvaluator() { + Evaluator sqlEscaper = EvaluatorBag.getSqlEscapingEvaluator(); + runTests(sqlTests, sqlEscaper); + } + + /** + * Test method for {@link EvaluatorBag#getUrlEvaluator()}. + */ + @Test + public void testGetUrlEvaluator() throws Exception { + Evaluator urlEvaluator = EvaluatorBag.getUrlEvaluator(); + runTests(urlTests, urlEvaluator); + } + + @Test + public void parseParams() { + Map m = new HashMap(); + m.put("b","B"); + VariableResolverImpl vr = new VariableResolverImpl(); + vr.addNamespace("a",m); + List l = EvaluatorBag.parseParams(" 1 , a.b, 'hello!', 'ds,o,u\'za',",vr); + Assert.assertEquals(new Double(1),l.get(0)); + Assert.assertEquals("B",((EvaluatorBag.VariableWrapper)l.get(1)).resolve()); + Assert.assertEquals("hello!",l.get(2)); + Assert.assertEquals("ds,o,u'za",l.get(3)); + } + + @Test + public void testEscapeSolrQueryFunction() { + final VariableResolverImpl resolver = new VariableResolverImpl(); + ContextImpl context = new ContextImpl(null, resolver, null, Context.FULL_DUMP, Collections.EMPTY_MAP, null, null); + Context.CURRENT_CONTEXT.set(context); + try { + Map m= new HashMap(); + m.put("query","c:t"); + resolver.addNamespace("dataimporter.functions", EvaluatorBag + .getFunctionsNamespace(Collections.EMPTY_LIST, null)); + resolver.addNamespace("e",m); + String s = resolver + .replaceTokens("${dataimporter.functions.escapeQueryChars(e.query)}"); + org.junit.Assert.assertEquals("c\\:t", s); + } finally { + Context.CURRENT_CONTEXT.remove(); + } + } + + /** + * Test method for {@link EvaluatorBag#getDateFormatEvaluator()}. + */ + @Test + public void testGetDateFormatEvaluator() { + Evaluator dateFormatEval = EvaluatorBag.getDateFormatEvaluator(); + ContextImpl context = new ContextImpl(null, resolver, null, Context.FULL_DUMP, Collections.EMPTY_MAP, null, null); + Context.CURRENT_CONTEXT.set(context); + try { + long time = System.currentTimeMillis(); + assertEquals(new SimpleDateFormat("yyyy-MM-dd HH:mm").format(new Date(time - 2*86400*1000)), + dateFormatEval.evaluate("'NOW-2DAYS','yyyy-MM-dd HH:mm'", Context.CURRENT_CONTEXT.get())); + + Map map = new HashMap(); + map.put("key", new Date(time)); + resolver.addNamespace("A", map); + + assertEquals(new SimpleDateFormat("yyyy-MM-dd HH:mm").format(new Date(time)), + dateFormatEval.evaluate("A.key, 'yyyy-MM-dd HH:mm'", Context.CURRENT_CONTEXT.get())); + } finally { + Context.CURRENT_CONTEXT.remove(); + } + } + + private void runTests(Map tests, Evaluator evaluator) { + ContextImpl ctx = new ContextImpl(null, resolver, null, Context.FULL_DUMP, Collections.EMPTY_MAP, null, null); + Context.CURRENT_CONTEXT.set(ctx); + try { + for (Map.Entry entry : tests.entrySet()) { + Map values = new HashMap(); + values.put("key", entry.getKey()); + resolver.addNamespace("A", values); + + String expected = (String) entry.getValue(); + String actual = evaluator.evaluate("A.key", ctx); + assertEquals(expected, actual); + } + } finally { + Context.CURRENT_CONTEXT.remove(); + } + } +} diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestFieldReader.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestFieldReader.java new file mode 100644 index 00000000000..ac92190c435 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestFieldReader.java @@ -0,0 +1,66 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import junit.framework.Assert; +import static org.apache.solr.handler.dataimport.AbstractDataImportHandlerTest.createMap; +import org.junit.Test; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * Test for FieldReaderDataSource + * + * @version $Id$ + * @see org.apache.solr.handler.dataimport.FieldReaderDataSource + * @since 1.4 + */ +public class TestFieldReader { + + @Test + public void simple() { + DataImporter di = new DataImporter(); + di.loadAndInit(config); + TestDocBuilder.SolrWriterImpl sw = new TestDocBuilder.SolrWriterImpl(); + DataImporter.RequestParams rp = new DataImporter.RequestParams(createMap("command", "full-import")); + List> l = new ArrayList>(); + l.add(createMap("xml", xml)); + MockDataSource.setIterator("select * from a", l.iterator()); + di.runCmd(rp, sw); + Assert.assertEquals(sw.docs.get(0).getFieldValue("y"), "Hello"); + MockDataSource.clearCache(); + } + + String config = "\n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + ""; + + String xml = "\n" + + " Hello\n" + + ""; +} diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestFileListEntityProcessor.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestFileListEntityProcessor.java new file mode 100644 index 00000000000..d2bcd607af0 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestFileListEntityProcessor.java @@ -0,0 +1,201 @@ +package org.apache.solr.handler.dataimport; +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import org.junit.Assert; +import org.junit.Test; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.util.*; + +/** + *

    + * Test for FileListEntityProcessor + *

    + * + * @version $Id$ + * @since solr 1.3 + */ +public class TestFileListEntityProcessor { + + @Test + @SuppressWarnings("unchecked") + public void testSimple() throws IOException { + long time = System.currentTimeMillis(); + File tmpdir = new File("." + time); + tmpdir.mkdir(); + tmpdir.deleteOnExit(); + createFile(tmpdir, "a.xml", "a.xml".getBytes(), false); + createFile(tmpdir, "b.xml", "b.xml".getBytes(), false); + createFile(tmpdir, "c.props", "c.props".getBytes(), false); + Map attrs = AbstractDataImportHandlerTest.createMap( + FileListEntityProcessor.FILE_NAME, "xml$", + FileListEntityProcessor.BASE_DIR, tmpdir.getAbsolutePath()); + Context c = AbstractDataImportHandlerTest.getContext(null, + new VariableResolverImpl(), null, Context.FULL_DUMP, Collections.EMPTY_LIST, attrs); + FileListEntityProcessor fileListEntityProcessor = new FileListEntityProcessor(); + fileListEntityProcessor.init(c); + List fList = new ArrayList(); + while (true) { + Map f = fileListEntityProcessor.nextRow(); + if (f == null) + break; + fList.add((String) f.get(FileListEntityProcessor.ABSOLUTE_FILE)); + } + Assert.assertEquals(2, fList.size()); + } + + @Test + public void testBiggerSmallerFiles() throws IOException { + long time = System.currentTimeMillis(); + File tmpdir = new File("." + time); + tmpdir.mkdir(); + tmpdir.deleteOnExit(); + long minLength = Long.MAX_VALUE; + String smallestFile = ""; + byte[] content = "abcdefgij".getBytes("UTF-8"); + createFile(tmpdir, "a.xml", content, false); + if (minLength > content.length) { + minLength = content.length; + smallestFile = "a.xml"; + } + content = "abcdefgij".getBytes("UTF-8"); + createFile(tmpdir, "b.xml", content, false); + if (minLength > content.length) { + minLength = content.length; + smallestFile = "b.xml"; + } + content = "abc".getBytes("UTF-8"); + createFile(tmpdir, "c.props", content, false); + if (minLength > content.length) { + minLength = content.length; + smallestFile = "c.props"; + } + Map attrs = AbstractDataImportHandlerTest.createMap( + FileListEntityProcessor.FILE_NAME, ".*", + FileListEntityProcessor.BASE_DIR, tmpdir.getAbsolutePath(), + FileListEntityProcessor.BIGGER_THAN, String.valueOf(minLength)); + List fList = getFiles(null, attrs); + Assert.assertEquals(2, fList.size()); + Set l = new HashSet(); + l.add(new File(tmpdir, "a.xml").getAbsolutePath()); + l.add(new File(tmpdir, "b.xml").getAbsolutePath()); + Assert.assertEquals(l, new HashSet(fList)); + attrs = AbstractDataImportHandlerTest.createMap( + FileListEntityProcessor.FILE_NAME, ".*", + FileListEntityProcessor.BASE_DIR, tmpdir.getAbsolutePath(), + FileListEntityProcessor.SMALLER_THAN, String.valueOf(minLength+1)); + fList = getFiles(null, attrs); + l.clear(); + l.add(new File(tmpdir, smallestFile).getAbsolutePath()); + Assert.assertEquals(l, new HashSet(fList)); + attrs = AbstractDataImportHandlerTest.createMap( + FileListEntityProcessor.FILE_NAME, ".*", + FileListEntityProcessor.BASE_DIR, tmpdir.getAbsolutePath(), + FileListEntityProcessor.SMALLER_THAN, "${a.x}"); + VariableResolverImpl resolver = new VariableResolverImpl(); + resolver.addNamespace("a", AbstractDataImportHandlerTest.createMap("x", "4")); + fList = getFiles(resolver, attrs); + Assert.assertEquals(l, new HashSet(fList)); + } + + @SuppressWarnings("unchecked") + static List getFiles(VariableResolverImpl resolver, Map attrs) { + Context c = AbstractDataImportHandlerTest.getContext(null, + resolver, null, Context.FULL_DUMP, Collections.EMPTY_LIST, attrs); + FileListEntityProcessor fileListEntityProcessor = new FileListEntityProcessor(); + fileListEntityProcessor.init(c); + List fList = new ArrayList(); + while (true) { + Map f = fileListEntityProcessor.nextRow(); + if (f == null) + break; + fList.add((String) f.get(FileListEntityProcessor.ABSOLUTE_FILE)); + } + return fList; + } + + @Test + public void testNTOT() throws IOException { + long time = System.currentTimeMillis(); + File tmpdir = new File("." + time); + tmpdir.mkdir(); + tmpdir.deleteOnExit(); + createFile(tmpdir, "a.xml", "a.xml".getBytes(), true); + createFile(tmpdir, "b.xml", "b.xml".getBytes(), true); + createFile(tmpdir, "c.props", "c.props".getBytes(), true); + Map attrs = AbstractDataImportHandlerTest.createMap( + FileListEntityProcessor.FILE_NAME, "xml$", + FileListEntityProcessor.BASE_DIR, tmpdir.getAbsolutePath(), + FileListEntityProcessor.OLDER_THAN, "'NOW'"); + List fList = getFiles(null, attrs); + Assert.assertEquals(2, fList.size()); + attrs = AbstractDataImportHandlerTest.createMap( + FileListEntityProcessor.FILE_NAME, ".xml$", + FileListEntityProcessor.BASE_DIR, tmpdir.getAbsolutePath(), + FileListEntityProcessor.NEWER_THAN, "'NOW-2HOURS'"); + fList = getFiles(null, attrs); + Assert.assertEquals(2, fList.size()); + + // Use a variable for newerThan + attrs = AbstractDataImportHandlerTest.createMap( + FileListEntityProcessor.FILE_NAME, ".xml$", + FileListEntityProcessor.BASE_DIR, tmpdir.getAbsolutePath(), + FileListEntityProcessor.NEWER_THAN, "${a.x}"); + VariableResolverImpl resolver = new VariableResolverImpl(); + String lastMod = DataImporter.DATE_TIME_FORMAT.get().format(new Date(System.currentTimeMillis() - 50000)); + resolver.addNamespace("a", AbstractDataImportHandlerTest.createMap("x", lastMod)); + createFile(tmpdir, "t.xml", "t.xml".getBytes(), false); + fList = getFiles(resolver, attrs); + Assert.assertEquals(1, fList.size()); + Assert.assertEquals("File name must be t.xml", new File(tmpdir, "t.xml").getAbsolutePath(), fList.get(0)); + } + + @Test + public void testRECURSION() throws IOException { + long time = System.currentTimeMillis(); + File tmpdir = new File("." + time); + tmpdir.mkdir(); + tmpdir.deleteOnExit(); + File childdir = new File(tmpdir + "/child" ); + childdir.mkdirs(); + childdir.deleteOnExit(); + createFile(childdir, "a.xml", "a.xml".getBytes(), true); + createFile(childdir, "b.xml", "b.xml".getBytes(), true); + createFile(childdir, "c.props", "c.props".getBytes(), true); + Map attrs = AbstractDataImportHandlerTest.createMap( + FileListEntityProcessor.FILE_NAME, "^.*\\.xml$", + FileListEntityProcessor.BASE_DIR, childdir.getAbsolutePath(), + FileListEntityProcessor.RECURSIVE, "true"); + List fList = getFiles(null, attrs); + Assert.assertEquals(2, fList.size()); + } + + public static File createFile(File tmpdir, String name, byte[] content, + boolean changeModifiedTime) throws IOException { + File file = new File(tmpdir.getAbsolutePath() + File.separator + name); + file.deleteOnExit(); + FileOutputStream f = new FileOutputStream(file); + f.write(content); + f.close(); + if (changeModifiedTime) + file.setLastModified(System.currentTimeMillis() - 3600000); + return file; + } +} diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestJdbcDataSource.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestJdbcDataSource.java new file mode 100644 index 00000000000..08ac625b8d7 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestJdbcDataSource.java @@ -0,0 +1,182 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import java.sql.Connection; +import java.sql.Driver; +import java.sql.DriverManager; +import java.util.*; + +import javax.sql.DataSource; + +import org.easymock.EasyMock; +import org.easymock.IMocksControl; +import org.junit.*; + +/** + *

    + * Test for JdbcDataSource + *

    + *

    + *

    + * Note: The tests are ignored for the lack of DB support for testing + *

    + * + * @version $Id$ + * @since solr 1.3 + */ +public class TestJdbcDataSource { + Driver driver; + DataSource dataSource; + Connection connection; + IMocksControl mockControl; + JdbcDataSource jdbcDataSource = new JdbcDataSource(); + List> fields = new ArrayList>(); + + Context context = AbstractDataImportHandlerTest.getContext(null, null, + jdbcDataSource, Context.FULL_DUMP, fields, null); + + Properties props = new Properties(); + + String sysProp = System.getProperty("java.naming.factory.initial"); + + @Before + public void SetUp() throws ClassNotFoundException { + System.setProperty("java.naming.factory.initial", + MockInitialContextFactory.class.getName()); + + mockControl = EasyMock.createStrictControl(); + driver = mockControl.createMock(Driver.class); + dataSource = mockControl.createMock(DataSource.class); + connection = mockControl.createMock(Connection.class); + } + + @After + public void tearDown() { + if (sysProp == null) { + System.getProperties().remove("java.naming.factory.initial"); + } else { + System.setProperty("java.naming.factory.initial", sysProp); + } + } + + @Test + public void retrieveFromJndi() throws Exception { + MockInitialContextFactory.bind("java:comp/env/jdbc/JndiDB", dataSource); + + props.put(JdbcDataSource.JNDI_NAME, "java:comp/env/jdbc/JndiDB"); + + EasyMock.expect(dataSource.getConnection()).andReturn(connection); + connection.setAutoCommit(false); +// connection.setHoldability(1); + + mockControl.replay(); + + Connection conn = jdbcDataSource.createConnectionFactory(context, props) + .call(); + + mockControl.verify(); + + Assert.assertSame("connection", conn, connection); + } + + @Test + public void retrieveFromJndiWithCredentials() throws Exception { + MockInitialContextFactory.bind("java:comp/env/jdbc/JndiDB", dataSource); + + props.put(JdbcDataSource.JNDI_NAME, "java:comp/env/jdbc/JndiDB"); + props.put("user", "Fred"); + props.put("password", "4r3d"); + props.put("holdability", "HOLD_CURSORS_OVER_COMMIT"); + + EasyMock.expect(dataSource.getConnection("Fred", "4r3d")).andReturn( + connection); + connection.setAutoCommit(false); + connection.setHoldability(1); + + mockControl.replay(); + + Connection conn = jdbcDataSource.createConnectionFactory(context, props) + .call(); + + mockControl.verify(); + + Assert.assertSame("connection", conn, connection); + } + + @Test + public void retrieveFromDriverManager() throws Exception { + DriverManager.registerDriver(driver); + + EasyMock.expect( + driver.connect((String) EasyMock.notNull(), (Properties) EasyMock + .notNull())).andReturn(connection); + connection.setAutoCommit(false); + connection.setHoldability(1); + + props.put(JdbcDataSource.DRIVER, driver.getClass().getName()); + props.put(JdbcDataSource.URL, "jdbc:fakedb"); + props.put("holdability", "HOLD_CURSORS_OVER_COMMIT"); + mockControl.replay(); + + Connection conn = jdbcDataSource.createConnectionFactory(context, props) + .call(); + + mockControl.verify(); + + Assert.assertSame("connection", conn, connection); + } + + @Test + @Ignore + public void basic() throws Exception { + JdbcDataSource dataSource = new JdbcDataSource(); + Properties p = new Properties(); + p.put("driver", "com.mysql.jdbc.Driver"); + p.put("url", "jdbc:mysql://localhost/autos"); + p.put("user", "root"); + p.put("password", ""); + + List> flds = new ArrayList>(); + Map f = new HashMap(); + f.put("column", "trim_id"); + f.put("type", "long"); + flds.add(f); + f = new HashMap(); + f.put("column", "msrp"); + f.put("type", "float"); + flds.add(f); + + Context c = AbstractDataImportHandlerTest.getContext(null, null, + dataSource, Context.FULL_DUMP, flds, null); + dataSource.init(c, p); + Iterator> i = dataSource + .getData("select make,model,year,msrp,trim_id from atrimlisting where make='Acura'"); + int count = 0; + Object msrp = null; + Object trim_id = null; + while (i.hasNext()) { + Map map = i.next(); + msrp = map.get("msrp"); + trim_id = map.get("trim_id"); + count++; + } + Assert.assertEquals(5, count); + Assert.assertEquals(Float.class, msrp.getClass()); + Assert.assertEquals(Long.class, trim_id.getClass()); + } +} diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestLineEntityProcessor.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestLineEntityProcessor.java new file mode 100644 index 00000000000..1a8d96ccbb3 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestLineEntityProcessor.java @@ -0,0 +1,250 @@ +package org.apache.solr.handler.dataimport; +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import org.junit.Assert; +import org.junit.Test; + +import java.io.IOException; +import java.io.Reader; +import java.io.StringReader; +import java.util.*; + + +/** + *

    Test for TestLineEntityProcessor

    + * + * @version $Id$ + * @since solr 1.4 + */ +public class TestLineEntityProcessor { + + @Test + /************************************************************************/ + public void simple() throws IOException { + + /* we want to create the equiv of :- + * + */ + + Map attrs = AbstractDataImportHandlerTest.createMap( + LineEntityProcessor.URL, "dummy.lis", + LineEntityProcessor.ACCEPT_LINE_REGEX, null, + LineEntityProcessor.SKIP_LINE_REGEX, null + ); + + Context c = AbstractDataImportHandlerTest.getContext( + null, //parentEntity + new VariableResolverImpl(), //resolver + getDataSource(filecontents), //parentDataSource + Context.FULL_DUMP, //currProcess + Collections.EMPTY_LIST, //entityFields + attrs //entityAttrs + ); + LineEntityProcessor ep = new LineEntityProcessor(); + ep.init(c); + + /// call the entity processor to the list of lines + System.out.print("\n"); + List fList = new ArrayList(); + while (true) { + Map f = ep.nextRow(); + if (f == null) break; + fList.add((String) f.get("rawLine")); + System.out.print(" rawLine='" + f.get("rawLine") + "'\n"); + } + Assert.assertEquals(24, fList.size()); + } + + @Test + /************************************************************************/ + public void only_xml_files() throws IOException { + + /* we want to create the equiv of :- + * + */ + Map attrs = AbstractDataImportHandlerTest.createMap( + LineEntityProcessor.URL, "dummy.lis", + LineEntityProcessor.ACCEPT_LINE_REGEX, "xml", + LineEntityProcessor.SKIP_LINE_REGEX, null + ); + + Context c = AbstractDataImportHandlerTest.getContext( + null, //parentEntity + new VariableResolverImpl(), //resolver + getDataSource(filecontents), //parentDataSource + Context.FULL_DUMP, //currProcess + Collections.EMPTY_LIST, //entityFields + attrs //entityAttrs + ); + LineEntityProcessor ep = new LineEntityProcessor(); + ep.init(c); + + /// call the entity processor to the list of lines + List fList = new ArrayList(); + while (true) { + Map f = ep.nextRow(); + if (f == null) break; + fList.add((String) f.get("rawLine")); + } + Assert.assertEquals(5, fList.size()); + } + + @Test + /************************************************************************/ + public void only_xml_files_no_xsd() throws IOException { + /* we want to create the equiv of :- + * + */ + Map attrs = AbstractDataImportHandlerTest.createMap( + LineEntityProcessor.URL, "dummy.lis", + LineEntityProcessor.ACCEPT_LINE_REGEX, "\\.xml", + LineEntityProcessor.SKIP_LINE_REGEX, "\\.xsd" + ); + + Context c = AbstractDataImportHandlerTest.getContext( + null, //parentEntity + new VariableResolverImpl(), //resolver + getDataSource(filecontents), //parentDataSource + Context.FULL_DUMP, //currProcess + Collections.EMPTY_LIST, //entityFields + attrs //entityAttrs + ); + LineEntityProcessor ep = new LineEntityProcessor(); + ep.init(c); + + /// call the entity processor to walk the directory + List fList = new ArrayList(); + while (true) { + Map f = ep.nextRow(); + if (f == null) break; + fList.add((String) f.get("rawLine")); + } + Assert.assertEquals(4, fList.size()); + } + + @Test + /************************************************************************/ + public void no_xsd_files() throws IOException { + /* we want to create the equiv of :- + * + */ + Map attrs = AbstractDataImportHandlerTest.createMap( + LineEntityProcessor.URL, "dummy.lis", + LineEntityProcessor.SKIP_LINE_REGEX, "\\.xsd" + ); + + Context c = AbstractDataImportHandlerTest.getContext( + null, //parentEntity + new VariableResolverImpl(), //resolver + getDataSource(filecontents), //parentDataSource + Context.FULL_DUMP, //currProcess + Collections.EMPTY_LIST, //entityFields + attrs //entityAttrs + ); + LineEntityProcessor ep = new LineEntityProcessor(); + ep.init(c); + + /// call the entity processor to walk the directory + List fList = new ArrayList(); + while (true) { + Map f = ep.nextRow(); + if (f == null) break; + fList.add((String) f.get("rawLine")); + } + Assert.assertEquals(18, fList.size()); + } + + /** + * ******************************************************************** + */ + public static Map createField( + String col, // DIH column name + String type, // field type from schema.xml + String srcCol, // DIH transformer attribute 'sourceColName' + String re, // DIH regex attribute 'regex' + String rw, // DIH regex attribute 'replaceWith' + String gn // DIH regex attribute 'groupNames' + ) { + HashMap vals = new HashMap(); + vals.put("column", col); + vals.put("type", type); + vals.put("sourceColName", srcCol); + vals.put("regex", re); + vals.put("replaceWith", rw); + vals.put("groupNames", gn); + return vals; + } + + private DataSource getDataSource(final String xml) { + return new DataSource() { + public void init(Context context, Properties initProps) { + } + + public void close() { + } + + public Reader getData(String query) { + return new StringReader(xml); + } + }; + } + + private static final String filecontents = + "\n" + + "# this is what the output from 'find . -ls; looks like, athough the format\n" + + "# of the time stamp varies depending on the age of the file and your LANG \n" + + "# env setting\n" + + "412577 0 drwxr-xr-x 6 user group 204 1 Apr 10:53 /Volumes/spare/ts\n" + + "412582 0 drwxr-xr-x 13 user group 442 1 Apr 10:18 /Volumes/spare/ts/config\n" + + "412583 24 -rwxr-xr-x 1 user group 8318 1 Apr 11:10 /Volumes/spare/ts/config/dc.xsd\n" + + "412584 32 -rwxr-xr-x 1 user group 12847 1 Apr 11:10 /Volumes/spare/ts/config/dcterms.xsd\n" + + "412585 8 -rwxr-xr-x 1 user group 3156 1 Apr 11:10 /Volumes/spare/ts/config/s-deliver.css\n" + + "412586 192 -rwxr-xr-x 1 user group 97764 1 Apr 11:10 /Volumes/spare/ts/config/s-deliver.xsl\n" + + "412587 224 -rwxr-xr-x 1 user group 112700 1 Apr 11:10 /Volumes/spare/ts/config/sml-delivery-2.1.xsd\n" + + "412588 208 -rwxr-xr-x 1 user group 103419 1 Apr 11:10 /Volumes/spare/ts/config/sml-delivery-norm-2.0.dtd\n" + + "412589 248 -rwxr-xr-x 1 user group 125296 1 Apr 11:10 /Volumes/spare/ts/config/sml-delivery-norm-2.1.dtd\n" + + "412590 72 -rwxr-xr-x 1 user group 36256 1 Apr 11:10 /Volumes/spare/ts/config/jm.xsd\n" + + "412591 8 -rwxr-xr-x 1 user group 990 1 Apr 11:10 /Volumes/spare/ts/config/video.gif\n" + + "412592 8 -rwxr-xr-x 1 user group 1498 1 Apr 11:10 /Volumes/spare/ts/config/xlink.xsd\n" + + "412593 8 -rwxr-xr-x 1 user group 1155 1 Apr 11:10 /Volumes/spare/ts/config/xml.xsd\n" + + "412594 0 drwxr-xr-x 4 user group 136 1 Apr 10:18 /Volumes/spare/ts/acm19\n" + + "412621 0 drwxr-xr-x 57 user group 1938 1 Apr 10:18 /Volumes/spare/ts/acm19/data\n" + + "412622 24 -rwxr-xr-x 1 user group 8894 1 Apr 11:09 /Volumes/spare/ts/acm19/data/00000510.xml\n" + + "412623 32 -rwxr-xr-x 1 user group 14124 1 Apr 11:09 /Volumes/spare/ts/acm19/data/00000603.xml\n" + + "412624 24 -rwxr-xr-x 1 user group 11976 1 Apr 11:09 /Volumes/spare/ts/acm19/data/00001292.xml\n" + + "# tacked on an extra line to cause a file to be deleted.\n" + + "DELETE /Volumes/spare/ts/acm19/data/00001292old.xml\n" + + ""; + +} diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestNumberFormatTransformer.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestNumberFormatTransformer.java new file mode 100644 index 00000000000..09d877d466d --- /dev/null +++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestNumberFormatTransformer.java @@ -0,0 +1,166 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import org.junit.Assert; +import org.junit.Test; + +import java.text.DecimalFormatSymbols; +import java.util.ArrayList; +import java.util.List; +import java.util.Locale; +import java.util.Map; + +/** + *

    + * Test for NumberFormatTransformer + *

    + * + * @version $Id$ + * @since solr 1.3 + */ +public class TestNumberFormatTransformer { + private char GROUPING_SEP = new DecimalFormatSymbols().getGroupingSeparator(); + private char DECIMAL_SEP = new DecimalFormatSymbols().getDecimalSeparator(); + + @Test + @SuppressWarnings("unchecked") + public void testTransformRow_SingleNumber() { + char GERMAN_GROUPING_SEP = new DecimalFormatSymbols(Locale.GERMANY).getGroupingSeparator(); + List l = new ArrayList(); + l.add(AbstractDataImportHandlerTest.createMap("column", "num", + NumberFormatTransformer.FORMAT_STYLE, NumberFormatTransformer.NUMBER)); + l.add(AbstractDataImportHandlerTest.createMap("column", "localizedNum", + NumberFormatTransformer.FORMAT_STYLE, NumberFormatTransformer.NUMBER, NumberFormatTransformer.LOCALE, "de-DE")); + Context c = AbstractDataImportHandlerTest.getContext(null, null, null, Context.FULL_DUMP, l, null); + Map m = AbstractDataImportHandlerTest.createMap("num", "123" + GROUPING_SEP + "567", "localizedNum", "123" + GERMAN_GROUPING_SEP + "567"); + new NumberFormatTransformer().transformRow(m, c); + Assert.assertEquals(new Long(123567), m.get("num")); + Assert.assertEquals(new Long(123567), m.get("localizedNum")); + } + + @Test + @SuppressWarnings("unchecked") + public void testTransformRow_MultipleNumbers() throws Exception { + List fields = new ArrayList(); + fields.add(AbstractDataImportHandlerTest.createMap(DataImporter.COLUMN, + "inputs")); + fields.add(AbstractDataImportHandlerTest.createMap(DataImporter.COLUMN, + "outputs", RegexTransformer.SRC_COL_NAME, "inputs", + NumberFormatTransformer.FORMAT_STYLE, NumberFormatTransformer.NUMBER)); + + List inputs = new ArrayList(); + inputs.add("123" + GROUPING_SEP + "567"); + inputs.add("245" + GROUPING_SEP + "678"); + Map row = AbstractDataImportHandlerTest.createMap("inputs", inputs); + + VariableResolverImpl resolver = new VariableResolverImpl(); + resolver.addNamespace("e", row); + + Context context = AbstractDataImportHandlerTest.getContext(null, resolver, null, Context.FULL_DUMP, fields, null); + new NumberFormatTransformer().transformRow(row, context); + + List output = new ArrayList(); + output.add(new Long(123567)); + output.add(new Long(245678)); + Map outputRow = AbstractDataImportHandlerTest.createMap("inputs", inputs, + "outputs", output); + + Assert.assertEquals(outputRow, row); + } + + @Test(expected = DataImportHandlerException.class) + @SuppressWarnings("unchecked") + public void testTransformRow_InvalidInput1_Number() { + List l = new ArrayList(); + l.add(AbstractDataImportHandlerTest.createMap("column", "num", + NumberFormatTransformer.FORMAT_STYLE, NumberFormatTransformer.NUMBER)); + Context c = AbstractDataImportHandlerTest.getContext(null, null, null, Context.FULL_DUMP, l, null); + Map m = AbstractDataImportHandlerTest.createMap("num", "123" + GROUPING_SEP + "5a67"); + new NumberFormatTransformer().transformRow(m, c); + } + + @Test(expected = DataImportHandlerException.class) + @SuppressWarnings("unchecked") + public void testTransformRow_InvalidInput2_Number() { + List l = new ArrayList(); + l.add(AbstractDataImportHandlerTest.createMap("column", "num", + NumberFormatTransformer.FORMAT_STYLE, NumberFormatTransformer.NUMBER)); + Context c = AbstractDataImportHandlerTest.getContext(null, null, null, Context.FULL_DUMP, l, null); + Map m = AbstractDataImportHandlerTest.createMap("num", "123" + GROUPING_SEP + "567b"); + new NumberFormatTransformer().transformRow(m, c); + } + + @Test(expected = DataImportHandlerException.class) + @SuppressWarnings("unchecked") + public void testTransformRow_InvalidInput2_Currency() { + List l = new ArrayList(); + l.add(AbstractDataImportHandlerTest.createMap("column", "num", + NumberFormatTransformer.FORMAT_STYLE, NumberFormatTransformer.CURRENCY)); + Context c = AbstractDataImportHandlerTest.getContext(null, null, null, Context.FULL_DUMP, l, null); + Map m = AbstractDataImportHandlerTest.createMap("num", "123" + GROUPING_SEP + "567b"); + new NumberFormatTransformer().transformRow(m, c); + } + + @Test(expected = DataImportHandlerException.class) + @SuppressWarnings("unchecked") + public void testTransformRow_InvalidInput1_Percent() { + List l = new ArrayList(); + l.add(AbstractDataImportHandlerTest.createMap("column", "num", + NumberFormatTransformer.FORMAT_STYLE, NumberFormatTransformer.PERCENT)); + Context c = AbstractDataImportHandlerTest.getContext(null, null, null, Context.FULL_DUMP, l, null); + Map m = AbstractDataImportHandlerTest.createMap("num", "123" + GROUPING_SEP + "5a67"); + new NumberFormatTransformer().transformRow(m, c); + } + + @Test(expected = DataImportHandlerException.class) + @SuppressWarnings("unchecked") + public void testTransformRow_InvalidInput3_Currency() { + List l = new ArrayList(); + l.add(AbstractDataImportHandlerTest.createMap("column", "num", + NumberFormatTransformer.FORMAT_STYLE, NumberFormatTransformer.CURRENCY)); + Context c = AbstractDataImportHandlerTest.getContext(null, null, null, Context.FULL_DUMP, l, null); + Map m = AbstractDataImportHandlerTest.createMap( + "num", "123" + DECIMAL_SEP + "456" + DECIMAL_SEP + "789"); + new NumberFormatTransformer().transformRow(m, c); + } + + @Test(expected = DataImportHandlerException.class) + @SuppressWarnings("unchecked") + public void testTransformRow_InvalidInput3_Number() { + List l = new ArrayList(); + l.add(AbstractDataImportHandlerTest.createMap("column", "num", + NumberFormatTransformer.FORMAT_STYLE, NumberFormatTransformer.NUMBER)); + Context c = AbstractDataImportHandlerTest.getContext(null, null, null, Context.FULL_DUMP, l, null); + Map m = AbstractDataImportHandlerTest.createMap( + "num", "123" + DECIMAL_SEP + "456" + DECIMAL_SEP + "789"); + new NumberFormatTransformer().transformRow(m, c); + } + + @Test + @SuppressWarnings("unchecked") + public void testTransformRow_MalformedInput_Number() { + List l = new ArrayList(); + l.add(AbstractDataImportHandlerTest.createMap("column", "num", + NumberFormatTransformer.FORMAT_STYLE, NumberFormatTransformer.NUMBER)); + Context c = AbstractDataImportHandlerTest.getContext(null, null, null, Context.FULL_DUMP, l, null); + Map m = AbstractDataImportHandlerTest.createMap( + "num", "123" + GROUPING_SEP + GROUPING_SEP + "789"); + new NumberFormatTransformer().transformRow(m, c); + Assert.assertEquals(new Long(123789), m.get("num")); + } +} diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestPlainTextEntityProcessor.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestPlainTextEntityProcessor.java new file mode 100644 index 00000000000..c641404d3d5 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestPlainTextEntityProcessor.java @@ -0,0 +1,70 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import junit.framework.Assert; +import static org.apache.solr.handler.dataimport.AbstractDataImportHandlerTest.createMap; +import org.junit.Test; + +import java.io.StringReader; +import java.util.Properties; + +/** + * Test for PlainTextEntityProcessor + * + * @version $Id$ + * @see org.apache.solr.handler.dataimport.PlainTextEntityProcessor + * @since solr 1.4 + */ +public class TestPlainTextEntityProcessor { + @Test + public void simple() { + DataImporter di = new DataImporter(); + di.loadAndInit(DATA_CONFIG); + TestDocBuilder.SolrWriterImpl sw = new TestDocBuilder.SolrWriterImpl(); + DataImporter.RequestParams rp = new DataImporter.RequestParams(createMap("command", "full-import")); + di.runCmd(rp, sw); + Assert.assertEquals(DS.s, sw.docs.get(0).getFieldValue("x")); + + } + + public static class DS extends DataSource { + static String s = "hello world"; + + public void init(Context context, Properties initProps) { + + } + + public Object getData(String query) { + + return new StringReader(s); + } + + public void close() { + + } + } + + static String DATA_CONFIG = "\n" + + "\t\n" + + "\t\n" + + "\t\t\n" + + "\t\t\t\n" + + "\t\t\n" + + "\t\n" + + ""; +} diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestRegexTransformer.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestRegexTransformer.java new file mode 100644 index 00000000000..96df44e2a2e --- /dev/null +++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestRegexTransformer.java @@ -0,0 +1,209 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import static org.apache.solr.handler.dataimport.RegexTransformer.REGEX; +import static org.apache.solr.handler.dataimport.RegexTransformer.GROUP_NAMES; +import static org.apache.solr.handler.dataimport.RegexTransformer.REPLACE_WITH; +import static org.apache.solr.handler.dataimport.DataImporter.COLUMN; +import static org.apache.solr.handler.dataimport.AbstractDataImportHandlerTest.createMap; +import static org.apache.solr.handler.dataimport.AbstractDataImportHandlerTest.getContext; +import org.junit.Assert; +import org.junit.Test; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + *

    Test for RegexTransformer

    + * + * @version $Id$ + * @since solr 1.3 + */ +public class TestRegexTransformer { + + @Test + public void commaSeparated() { + List> fields = new ArrayList>(); + // + fields.add(getField("col1", "string", null, "a", ",")); + Context context = AbstractDataImportHandlerTest.getContext(null, null, null, Context.FULL_DUMP, fields, null); + + Map src = new HashMap(); + src.put("a", "a,bb,cc,d"); + + Map result = new RegexTransformer().transformRow(src, context); + Assert.assertEquals(2, result.size()); + Assert.assertEquals(4, ((List) result.get("col1")).size()); + } + + + @Test + public void groupNames() { + List> fields = new ArrayList>(); + // + Map m = new HashMap(); + m.put(COLUMN,"fullName"); + m.put(GROUP_NAMES,",firstName,lastName"); + m.put(REGEX,"(\\w*) (\\w*) (\\w*)"); + fields.add(m); + Context context = AbstractDataImportHandlerTest.getContext(null, null, null, Context.FULL_DUMP, fields, null); + Map src = new HashMap(); + src.put("fullName", "Mr Noble Paul"); + + Map result = new RegexTransformer().transformRow(src, context); + Assert.assertEquals("Noble", result.get("firstName")); + Assert.assertEquals("Paul", result.get("lastName")); + src= new HashMap(); + List l= new ArrayList(); + l.add("Mr Noble Paul") ; + l.add("Mr Shalin Mangar") ; + src.put("fullName", l); + result = new RegexTransformer().transformRow(src, context); + List l1 = (List) result.get("firstName"); + List l2 = (List) result.get("lastName"); + Assert.assertEquals("Noble", l1.get(0)); + Assert.assertEquals("Shalin", l1.get(1)); + Assert.assertEquals("Paul", l2.get(0)); + Assert.assertEquals("Mangar", l2.get(1)); + } + + @Test + public void replaceWith() { + List> fields = new ArrayList>(); + // + Map fld = getField("name", "string", "'", null, null); + fld.put(REPLACE_WITH, "''"); + fields.add(fld); + Context context = AbstractDataImportHandlerTest.getContext(null, null, + null, Context.FULL_DUMP, fields, null); + + Map src = new HashMap(); + String s = "D'souza"; + src.put("name", s); + + Map result = new RegexTransformer().transformRow(src, + context); + Assert.assertEquals("D''souza", result.get("name")); + } + + @Test + public void mileage() { + // init a whole pile of fields + List> fields = getFields(); + + // add another regex which reuses result from previous regex again! + // + Map fld = getField("hltCityMPG", "string", + ".*(${e.city_mileage})", "rowdata", null); + fld.put(REPLACE_WITH, "*** $1 ***"); + fields.add(fld); + + // **ATTEMPTS** a match WITHOUT a replaceWith + // + fld = getField("t1", "string","duff", "rowdata", null); + fields.add(fld); + + // **ATTEMPTS** a match WITH a replaceWith + // + fld = getField("t2", "string","duff", "rowdata", null); + fld.put(REPLACE_WITH, "60"); + fields.add(fld); + + // regex WITH both replaceWith and groupName (groupName ignored!) + // + fld = getField("t3", "string","(Range)", "rowdata", null); + fld.put(REPLACE_WITH, "range"); + fld.put(GROUP_NAMES,"t4,t5"); + fields.add(fld); + + Map row = new HashMap(); + String s = "Fuel Economy Range: 26 mpg Hwy, 19 mpg City"; + row.put("rowdata", s); + + VariableResolverImpl resolver = new VariableResolverImpl(); + resolver.addNamespace("e", row); + Map eAttrs = AbstractDataImportHandlerTest.createMap("name", "e"); + Context context = AbstractDataImportHandlerTest.getContext(null, resolver, null, Context.FULL_DUMP, fields, eAttrs); + + Map result = new RegexTransformer().transformRow(row, context); + Assert.assertEquals(5, result.size()); + Assert.assertEquals(s, result.get("rowdata")); + Assert.assertEquals("26", result.get("highway_mileage")); + Assert.assertEquals("19", result.get("city_mileage")); + Assert.assertEquals("*** 19 *** mpg City", result.get("hltCityMPG")); + Assert.assertEquals("Fuel Economy range: 26 mpg Hwy, 19 mpg City", result.get("t3")); + } + + @Test + public void testMultiValuedRegex(){ + List> fields = new ArrayList>(); +// + Map fld = getField("participant", null, "(.*)", "person", null); + fields.add(fld); + Context context = getContext(null, null, + null, Context.FULL_DUMP, fields, null); + + ArrayList strings = new ArrayList(); + strings.add("hello"); + strings.add("world"); + Map result = new RegexTransformer().transformRow(createMap("person", strings), context); + Assert.assertEquals(strings,result.get("participant")); + + + } + + public static List> getFields() { + List> fields = new ArrayList>(); + + // + fields.add(getField("warranty", "string", "Warranty:(.*)", "rowdata", null)); + + // + fields.add(getField("rowdata", "string", null, "rowdata", null)); + return fields; + } + + public static Map getField(String col, String type, + String re, String srcCol, String splitBy) { + HashMap vals = new HashMap(); + vals.put("column", col); + vals.put("type", type); + vals.put("regex", re); + vals.put("sourceColName", srcCol); + vals.put("splitBy", splitBy); + return vals; + } +} diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestScriptTransformer.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestScriptTransformer.java new file mode 100644 index 00000000000..b8dea989534 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestScriptTransformer.java @@ -0,0 +1,143 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import org.junit.Assert; +import org.junit.Ignore; +import org.junit.Test; +import org.w3c.dom.Document; +import org.w3c.dom.Element; +import org.xml.sax.InputSource; + +import javax.xml.parsers.DocumentBuilder; +import javax.xml.parsers.DocumentBuilderFactory; +import java.io.StringReader; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + *

    + * Test for ScriptTransformer + *

    + *

    + * All tests in this have been ignored because script support is only available + * in Java 1.6+ + * + * @version $Id$ + * @since solr 1.3 + */ +public class TestScriptTransformer { + + @Test + @Ignore + public void basic() { + String script = "function f1(row,context){" + + "row.put('name','Hello ' + row.get('name'));" + "return row;\n" + "}"; + Context context = getContext("f1", script); + Map map = new HashMap(); + map.put("name", "Scott"); + EntityProcessorWrapper sep = new EntityProcessorWrapper(new SqlEntityProcessor(), null); + sep.init(context); + sep.applyTransformer(map); + Assert.assertEquals(map.get("name"), "Hello Scott"); + + } + + private Context getContext(String funcName, String script) { + List> fields = new ArrayList>(); + Map entity = new HashMap(); + entity.put("name", "hello"); + entity.put("transformer", "script:" + funcName); + + AbstractDataImportHandlerTest.TestContext context = AbstractDataImportHandlerTest.getContext(null, null, null, + Context.FULL_DUMP, fields, entity); + context.script = script; + context.scriptlang = "JavaScript"; + return context; + } + + @Test + @Ignore + public void oneparam() { + + String script = "function f1(row){" + + "row.put('name','Hello ' + row.get('name'));" + "return row;\n" + "}"; + + Context context = getContext("f1", script); + Map map = new HashMap(); + map.put("name", "Scott"); + EntityProcessorWrapper sep = new EntityProcessorWrapper(new SqlEntityProcessor(), null); + sep.init(context); + sep.applyTransformer(map); + Assert.assertEquals(map.get("name"), "Hello Scott"); + + } + + @Test + @Ignore + public void readScriptTag() throws Exception { + DocumentBuilder builder = DocumentBuilderFactory.newInstance() + .newDocumentBuilder(); + Document document = builder.parse(new InputSource(new StringReader(xml))); + DataConfig config = new DataConfig(); + config.readFromXml((Element) document.getElementsByTagName("dataConfig") + .item(0)); + Assert.assertTrue(config.script.text.indexOf("checkNextToken") > -1); + } + + @Test + @Ignore + public void checkScript() throws Exception { + DocumentBuilder builder = DocumentBuilderFactory.newInstance() + .newDocumentBuilder(); + Document document = builder.parse(new InputSource(new StringReader(xml))); + DataConfig config = new DataConfig(); + config.readFromXml((Element) document.getElementsByTagName("dataConfig") + .item(0)); + + Context c = getContext("checkNextToken", config.script.text); + + Map map = new HashMap(); + map.put("nextToken", "hello"); + EntityProcessorWrapper sep = new EntityProcessorWrapper(new SqlEntityProcessor(), null); + sep.init(c); + sep.applyTransformer(map); + Assert.assertEquals("true", map.get("$hasMore")); + map = new HashMap(); + map.put("nextToken", ""); + sep.applyTransformer(map); + Assert.assertNull(map.get("$hasMore")); + + } + + static String xml = "\n" + + "\t\n" + + "\t\t\n" + + "\n" + "\t\t\t\n" + + "\n" + "\t\t\n" + "\t\n" + ""; +} diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestSqlEntityProcessor.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestSqlEntityProcessor.java new file mode 100644 index 00000000000..15c790bdb36 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestSqlEntityProcessor.java @@ -0,0 +1,179 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import org.junit.Assert; +import org.junit.Test; + +import java.util.*; + +/** + *

    + * Test for SqlEntityProcessor + *

    + * + * @version $Id$ + * @since solr 1.3 + */ +public class TestSqlEntityProcessor { + private static ThreadLocal local = new ThreadLocal(); + + @Test + public void singleBatch() { + SqlEntityProcessor sep = new SqlEntityProcessor(); + List> rows = getRows(3); + VariableResolverImpl vr = new VariableResolverImpl(); + HashMap ea = new HashMap(); + ea.put("query", "SELECT * FROM A"); + Context c = AbstractDataImportHandlerTest.getContext(null, vr, getDs(rows), + Context.FULL_DUMP, null, ea); + sep.init(c); + int count = 0; + while (true) { + Map r = sep.nextRow(); + if (r == null) + break; + count++; + } + + Assert.assertEquals(3, count); + } + + @Test + public void tranformer() { + EntityProcessor sep = new EntityProcessorWrapper( new SqlEntityProcessor(), null); + List> rows = getRows(2); + VariableResolverImpl vr = new VariableResolverImpl(); + HashMap ea = new HashMap(); + ea.put("query", "SELECT * FROM A"); + ea.put("transformer", T.class.getName()); + + sep.init(AbstractDataImportHandlerTest.getContext(null, vr, getDs(rows), + Context.FULL_DUMP, null, ea)); + List> rs = new ArrayList>(); + Map r = null; + while (true) { + r = sep.nextRow(); + if (r == null) + break; + rs.add(r); + + } + Assert.assertEquals(2, rs.size()); + Assert.assertNotNull(rs.get(0).get("T")); + } + + @Test + public void tranformerWithReflection() { + EntityProcessor sep = new EntityProcessorWrapper(new SqlEntityProcessor(), null); + List> rows = getRows(2); + VariableResolverImpl vr = new VariableResolverImpl(); + HashMap ea = new HashMap(); + ea.put("query", "SELECT * FROM A"); + ea.put("transformer", T3.class.getName()); + + sep.init(AbstractDataImportHandlerTest.getContext(null, vr, getDs(rows), + Context.FULL_DUMP, null, ea)); + List> rs = new ArrayList>(); + Map r = null; + while (true) { + r = sep.nextRow(); + if (r == null) + break; + rs.add(r); + + } + Assert.assertEquals(2, rs.size()); + Assert.assertNotNull(rs.get(0).get("T3")); + } + + @Test + public void tranformerList() { + EntityProcessor sep = new EntityProcessorWrapper(new SqlEntityProcessor(),null); + List> rows = getRows(2); + VariableResolverImpl vr = new VariableResolverImpl(); + + HashMap ea = new HashMap(); + ea.put("query", "SELECT * FROM A"); + ea.put("transformer", T2.class.getName()); + sep.init(AbstractDataImportHandlerTest.getContext(null, vr, getDs(rows), + Context.FULL_DUMP, null, ea)); + + local.set(0); + Map r = null; + int count = 0; + while (true) { + r = sep.nextRow(); + if (r == null) + break; + count++; + } + Assert.assertEquals(2, local.get()); + Assert.assertEquals(4, count); + } + + private List> getRows(int count) { + List> rows = new ArrayList>(); + for (int i = 0; i < count; i++) { + Map row = new HashMap(); + row.put("id", i); + row.put("value", "The value is " + i); + rows.add(row); + } + return rows; + } + + private static DataSource>> getDs( + final List> rows) { + return new DataSource>>() { + public Iterator> getData(String query) { + return rows.iterator(); + } + + public void init(Context context, Properties initProps) { + } + + public void close() { + } + }; + } + + public static class T extends Transformer { + public Object transformRow(Map aRow, Context context) { + aRow.put("T", "Class T"); + return aRow; + } + } + + public static class T3 { + public Object transformRow(Map aRow) { + aRow.put("T3", "T3 class"); + return aRow; + } + } + + public static class T2 extends Transformer { + public Object transformRow(Map aRow, Context context) { + Integer count = local.get(); + local.set(count + 1); + List> l = new ArrayList>(); + l.add(aRow); + l.add(aRow); + return l; + } + } +} diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestSqlEntityProcessor2.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestSqlEntityProcessor2.java new file mode 100644 index 00000000000..01509a13263 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestSqlEntityProcessor2.java @@ -0,0 +1,274 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import org.junit.Test; + +import java.util.ArrayList; +import java.util.List; +import java.text.SimpleDateFormat; +import java.text.ParseException; + +/** + *

    + * Test for SqlEntityProcessor which checks full and delta imports using the + * test harness + *

    + * + * @version $Id$ + * @since solr 1.3 + */ +public class TestSqlEntityProcessor2 extends AbstractDataImportHandlerTest { + @Override + public String getSchemaFile() { + return "dataimport-schema.xml"; + } + + @Override + public String getSolrConfigFile() { + return "dataimport-solrconfig.xml"; + } + + @Override + public void setUp() throws Exception { + super.setUp(); + } + + @Override + public void tearDown() throws Exception { + super.tearDown(); + } + + @Test + @SuppressWarnings("unchecked") + public void testCompositePk_FullImport() throws Exception { + List parentRow = new ArrayList(); + parentRow.add(createMap("id", "1")); + MockDataSource.setIterator("select * from x", parentRow.iterator()); + + List childRow = new ArrayList(); + childRow.add(createMap("desc", "hello")); + + MockDataSource.setIterator("select * from y where y.A=1", childRow + .iterator()); + + super.runFullImport(dataConfig); + + assertQ(req("id:1"), "//*[@numFound='1']"); + assertQ(req("desc:hello"), "//*[@numFound='1']"); + } + @Test + @SuppressWarnings("unchecked") + public void testCompositePk_FullImport_MT() throws Exception { + List parentRow = new ArrayList(); + parentRow.add(createMap("id", "1")); + parentRow.add(createMap("id", "2")); + MockDataSource.setIterator("select * from x", parentRow.iterator()); + + List childRow = new ArrayList(); + childRow.add(createMap("desc", "hello")); + + MockDataSource.setIterator("select * from y where y.A=1", childRow.iterator()); + MockDataSource.setIterator("select * from y where y.A=2", childRow.iterator()); + + super.runFullImport(dataConfig_2threads); + + assertQ(req("id:1"), "//*[@numFound='1']"); + assertQ(req("id:2"), "//*[@numFound='1']"); + assertQ(req("desc:hello"), "//*[@numFound='2']"); + } + + @Test + @SuppressWarnings("unchecked") + public void testCompositePk_FullImportNoCommit() throws Exception { + List parentRow = new ArrayList(); + parentRow.add(createMap("id", "10")); + MockDataSource.setIterator("select * from x", parentRow.iterator()); + + List childRow = new ArrayList(); + childRow.add(createMap("desc", "hello")); + + MockDataSource.setIterator("select * from y where y.A=10", childRow + .iterator()); + + + super.runFullImport(dataConfig,createMap("commit","false")); + assertQ(req("id:10"), "//*[@numFound='0']"); + } + + @Test + @SuppressWarnings("unchecked") + public void testCompositePk_DeltaImport() throws Exception { + List deltaRow = new ArrayList(); + deltaRow.add(createMap("id", "5")); + MockDataSource.setIterator("select id from x where last_modified > NOW", + deltaRow.iterator()); + + List parentRow = new ArrayList(); + parentRow.add(createMap("id", "5")); + MockDataSource.setIterator("select * from x where id = '5'", parentRow + .iterator()); + + List childRow = new ArrayList(); + childRow.add(createMap("desc", "hello")); + MockDataSource.setIterator("select * from y where y.A=5", childRow + .iterator()); + + super.runDeltaImport(dataConfig); + + assertQ(req("id:5"), "//*[@numFound='1']"); + assertQ(req("desc:hello"), "//*[@numFound='1']"); + } + + @Test + @SuppressWarnings("unchecked") + public void testCompositePk_DeltaImport_DeletedPkQuery() throws Exception { + List parentRow = new ArrayList(); + parentRow.add(createMap("id", "11")); + MockDataSource.setIterator("select * from x", parentRow.iterator()); + + List childRow = new ArrayList(); + childRow.add(createMap("desc", "hello")); + + MockDataSource.setIterator("select * from y where y.A=11", childRow + .iterator()); + + super.runFullImport(dataConfig); + + assertQ(req("id:11"), "//*[@numFound='1']"); + + + + List deltaRow = new ArrayList(); + deltaRow.add(createMap("id", "15")); + deltaRow.add(createMap("id", "17")); + MockDataSource.setIterator("select id from x where last_modified > NOW", + deltaRow.iterator()); + + List deltaDeleteRow = new ArrayList(); + deltaDeleteRow.add(createMap("id", "11")); + deltaDeleteRow.add(createMap("id", "17")); + MockDataSource.setIterator("select id from x where last_modified > NOW AND deleted='true'", + deltaDeleteRow.iterator()); + + parentRow = new ArrayList(); + parentRow.add(createMap("id", "15")); + MockDataSource.setIterator("select * from x where id = '15'", parentRow + .iterator()); + + parentRow = new ArrayList(); + parentRow.add(createMap("id", "17")); + MockDataSource.setIterator("select * from x where id = '17'", parentRow + .iterator()); + + super.runDeltaImport(dataConfig); + + assertQ(req("id:15"), "//*[@numFound='1']"); + assertQ(req("id:11"), "//*[@numFound='0']"); + assertQ(req("id:17"), "//*[@numFound='0']"); + + + } + + @Test + @SuppressWarnings("unchecked") + public void testCompositePk_DeltaImport_DeltaImportQuery() throws Exception { + List deltaRow = new ArrayList(); + deltaRow.add(createMap("id", "5")); + MockDataSource.setIterator("select id from x where last_modified > NOW", + deltaRow.iterator()); + + List parentRow = new ArrayList(); + parentRow.add(createMap("id", "5")); + MockDataSource.setIterator("select * from x where id=5", parentRow + .iterator()); + + List childRow = new ArrayList(); + childRow.add(createMap("desc", "hello")); + MockDataSource.setIterator("select * from y where y.A=5", childRow + .iterator()); + + super.runDeltaImport(dataConfig_deltaimportquery); + + assertQ(req("id:5"), "//*[@numFound='1']"); + assertQ(req("desc:hello"), "//*[@numFound='1']"); + } + + @Test + @SuppressWarnings("unchecked") + public void testLastIndexTime() throws Exception { + List row = new ArrayList(); + row.add(createMap("id", 5)); + MockDataSource.setIterator("select * from x where last_modified > OK", row.iterator()); + super.runFullImport(dataConfig_LastIndexTime); + assertQ(req("id:5"), "//*[@numFound='1']"); + } + + static class DateFormatValidatingEvaluator extends Evaluator { + public String evaluate(String expression, Context context) { + List l = EvaluatorBag.parseParams(expression, context.getVariableResolver()); + Object o = l.get(0); + String dateStr = null; + if (o instanceof EvaluatorBag.VariableWrapper) { + EvaluatorBag.VariableWrapper wrapper = (EvaluatorBag.VariableWrapper) o; + o = wrapper.resolve(); + dateStr = o.toString(); + } + SimpleDateFormat formatter = DataImporter.DATE_TIME_FORMAT.get(); + try { + formatter.parse(dateStr); + } catch (ParseException e) { + DataImportHandlerException.wrapAndThrow(DataImportHandlerException.SEVERE, e); + } + return "OK"; + } + } + + private static String dataConfig_LastIndexTime = "\n" + + "\t\n" + + "\t\n" + + "\t\t ${dih.functions.checkDateFormat(dih.last_index_time)}\" />\n" + + "\t\n" + + ""; + + private static String dataConfig = "\n" + + " \n" + + " NOW AND deleted='true'\" deltaQuery=\"select id from x where last_modified > NOW\">\n" + + " \n" + + " \n" + + " \n" + + " \n" + " \n" + + " \n" + "\n"; + + private static String dataConfig_2threads = "\n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + " \n" + + " \n" + "\n"; + + private static String dataConfig_deltaimportquery = "\n" + + " \n" + + " NOW\">\n" + + " \n" + + " \n" + + " \n" + + " \n" + " \n" + + " \n" + "\n"; +} diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestSqlEntityProcessorDelta.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestSqlEntityProcessorDelta.java new file mode 100644 index 00000000000..9925991645c --- /dev/null +++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestSqlEntityProcessorDelta.java @@ -0,0 +1,295 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import org.junit.Test; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; + +/** + *

    + * Test for SqlEntityProcessor which checks variations in primary key names and deleted ids + *

    + * + * + * @version $Id: TestSqlEntityProcessor2.java 723824 2008-12-05 19:14:11Z shalin $ + * @since solr 1.3 + */ +public class TestSqlEntityProcessorDelta extends AbstractDataImportHandlerTest { + private static final String FULLIMPORT_QUERY = "select * from x"; + + private static final String DELTA_QUERY = "select id from x where last_modified > NOW"; + + private static final String DELETED_PK_QUERY = "select id from x where last_modified > NOW AND deleted='true'"; + + @Override + public String getSchemaFile() { + return "dataimport-schema.xml"; + } + + @Override + public String getSolrConfigFile() { + return "dataimport-solrconfig.xml"; + } + + @Override + public void setUp() throws Exception { + super.setUp(); + } + + @Override + public void tearDown() throws Exception { + super.tearDown(); + } + + + @SuppressWarnings("unchecked") + private void add1document() throws Exception { + List parentRow = new ArrayList(); + parentRow.add(createMap("id", "1")); + MockDataSource.setIterator(FULLIMPORT_QUERY, parentRow.iterator()); + + List childRow = new ArrayList(); + childRow.add(createMap("desc", "hello")); + MockDataSource.setIterator("select * from y where y.A='1'", childRow + .iterator()); + + super.runFullImport(dataConfig_delta); + + assertQ(req("*:* OR add1document"), "//*[@numFound='1']"); + assertQ(req("id:1"), "//*[@numFound='1']"); + assertQ(req("desc:hello"), "//*[@numFound='1']"); + } + + @Test + @SuppressWarnings("unchecked") + public void testCompositePk_FullImport() throws Exception { + add1document(); + } + + // WORKS + + @Test + @SuppressWarnings("unchecked") + public void testCompositePk_DeltaImport_delete() throws Exception { + add1document(); + List deletedRow = new ArrayList(); + deletedRow.add(createMap("id", "1")); + MockDataSource.setIterator(DELETED_PK_QUERY, deletedRow.iterator()); + + MockDataSource.setIterator(DELTA_QUERY, Collections + .EMPTY_LIST.iterator()); + + List childRow = new ArrayList(); + childRow.add(createMap("desc", "hello")); + MockDataSource.setIterator("select * from y where y.A='1'", childRow + .iterator()); + + super.runDeltaImport(dataConfig_delta); + assertQ(req("*:* OR testCompositePk_DeltaImport_delete"), "//*[@numFound='0']"); + } + + @Test + @SuppressWarnings("unchecked") + public void testCompositePk_DeltaImport_empty() throws Exception { + List deltaRow = new ArrayList(); + deltaRow.add(createMap("id", "1")); + MockDataSource.setIterator(DELTA_QUERY, deltaRow.iterator()); + + MockDataSource.setIterator(DELETED_PK_QUERY, Collections + .EMPTY_LIST.iterator()); + + List parentRow = new ArrayList(); + parentRow.add(createMap("id", "1")); + MockDataSource.setIterator("select * from x where id='1'", parentRow + .iterator()); + + List childRow = new ArrayList(); + childRow.add(createMap("desc", "hello")); + MockDataSource.setIterator("select * from y where y.A='1'", childRow + .iterator()); + + super.runDeltaImport(dataConfig_delta); + + assertQ(req("*:* OR testCompositePk_DeltaImport_empty"), "//*[@numFound='1']"); + assertQ(req("id:1"), "//*[@numFound='1']"); + assertQ(req("desc:hello"), "//*[@numFound='1']"); + } + + // WORKS + + @Test + @SuppressWarnings("unchecked") + public void XtestCompositePk_DeltaImport_replace_delete() throws Exception { + add1document(); + MockDataSource.clearCache(); + + List deltaRow = new ArrayList(); + deltaRow.add(createMap("id", "1")); + MockDataSource.setIterator(DELTA_QUERY, + deltaRow.iterator()); + + List deletedRow = new ArrayList(); + deletedRow.add(createMap("id", "1")); + MockDataSource.setIterator(DELETED_PK_QUERY, + deletedRow.iterator()); + + List parentRow = new ArrayList(); + parentRow.add(createMap("id", "1")); + MockDataSource.setIterator("select * from x where id='1'", parentRow + .iterator()); + + List childRow = new ArrayList(); + childRow.add(createMap("desc", "goodbye")); + MockDataSource.setIterator("select * from y where y.A='1'", childRow + .iterator()); + + super.runDeltaImport(dataConfig_delta); + + assertQ(req("*:* OR testCompositePk_DeltaImport_replace_delete"), "//*[@numFound='0']"); + } + + + @Test + @SuppressWarnings("unchecked") + public void testCompositePk_DeltaImport_replace_nodelete() throws Exception { + add1document(); + MockDataSource.clearCache(); + + List deltaRow = new ArrayList(); + deltaRow.add(createMap("id", "1")); + MockDataSource.setIterator(DELTA_QUERY, + deltaRow.iterator()); + + MockDataSource.setIterator(DELETED_PK_QUERY, Collections + .EMPTY_LIST.iterator()); + + List parentRow = new ArrayList(); + parentRow.add(createMap("id", "1")); + MockDataSource.setIterator("select * from x where id='1'", parentRow + .iterator()); + + List childRow = new ArrayList(); + childRow.add(createMap("desc", "goodbye")); + MockDataSource.setIterator("select * from y where y.A='1'", childRow + .iterator()); + + super.runDeltaImport(dataConfig_delta); + + assertQ(req("*:* OR XtestCompositePk_DeltaImport_replace_nodelete"), "//*[@numFound='1']"); + assertQ(req("id:1"), "//*[@numFound='1']"); + assertQ(req("desc:hello OR XtestCompositePk_DeltaImport_replace_nodelete"), "//*[@numFound='0']"); + assertQ(req("desc:goodbye"), "//*[@numFound='1']"); + } + + @Test + @SuppressWarnings("unchecked") + public void testCompositePk_DeltaImport_add() throws Exception { + add1document(); + MockDataSource.clearCache(); + + List deltaRow = new ArrayList(); + deltaRow.add(createMap("id", "2")); + MockDataSource.setIterator(DELTA_QUERY, + deltaRow.iterator()); + + List parentRow = new ArrayList(); + parentRow.add(createMap("id", "2")); + MockDataSource.setIterator("select * from x where id='2'", parentRow + .iterator()); + + List childRow = new ArrayList(); + childRow.add(createMap("desc", "goodbye")); + MockDataSource.setIterator("select * from y where y.A='2'", childRow + .iterator()); + + super.runDeltaImport(dataConfig_delta); + + assertQ(req("*:* OR testCompositePk_DeltaImport_add"), "//*[@numFound='2']"); + assertQ(req("id:1"), "//*[@numFound='1']"); + assertQ(req("id:2"), "//*[@numFound='1']"); + assertQ(req("desc:hello"), "//*[@numFound='1']"); + assertQ(req("desc:goodbye"), "//*[@numFound='1']"); + } + + @Test + @SuppressWarnings("unchecked") + public void testCompositePk_DeltaImport_nodelta() throws Exception { + add1document(); + MockDataSource.clearCache(); + + MockDataSource.setIterator(DELTA_QUERY, + Collections.EMPTY_LIST.iterator()); + + super.runDeltaImport(dataConfig_delta); + + assertQ(req("*:* OR testCompositePk_DeltaImport_nodelta"), "//*[@numFound='1']"); + assertQ(req("id:1 OR testCompositePk_DeltaImport_nodelta"), "//*[@numFound='1']"); + assertQ(req("desc:hello OR testCompositePk_DeltaImport_nodelta"), "//*[@numFound='1']"); + } + + @Test + @SuppressWarnings("unchecked") + public void testCompositePk_DeltaImport_add_delete() throws Exception { + add1document(); + MockDataSource.clearCache(); + + List deltaRow = new ArrayList(); + deltaRow.add(createMap("id", "2")); + MockDataSource.setIterator(DELTA_QUERY, + deltaRow.iterator()); + + List deletedRow = new ArrayList(); + deletedRow.add(createMap("id", "1")); + MockDataSource.setIterator(DELETED_PK_QUERY, + deletedRow.iterator()); + + List parentRow = new ArrayList(); + parentRow.add(createMap("id", "2")); + MockDataSource.setIterator("select * from x where id='2'", parentRow + .iterator()); + + List childRow = new ArrayList(); + childRow.add(createMap("desc", "goodbye")); + MockDataSource.setIterator("select * from y where y.A='2'", childRow + .iterator()); + + super.runDeltaImport(dataConfig_delta); + + assertQ(req("*:* OR XtestCompositePk_DeltaImport_add_delete"), "//*[@numFound='1']"); + assertQ(req("id:2"), "//*[@numFound='1']"); + assertQ(req("desc:hello"), "//*[@numFound='0']"); + assertQ(req("desc:goodbye"), "//*[@numFound='1']"); + } + + private static String dataConfig_delta = "\n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + " \n" + + " \n" + "\n"; + +} diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestSqlEntityProcessorDelta2.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestSqlEntityProcessorDelta2.java new file mode 100644 index 00000000000..def30826735 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestSqlEntityProcessorDelta2.java @@ -0,0 +1,291 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import org.junit.Test; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; + +/** + *

    + * Test for SqlEntityProcessor which checks variations in primary key names and deleted ids + *

    + * + * + * @version $Id: TestSqlEntityProcessor2.java 723824 2008-12-05 19:14:11Z shalin $ + * @since solr 1.3 + */ +public class TestSqlEntityProcessorDelta2 extends AbstractDataImportHandlerTest { + private static final String FULLIMPORT_QUERY = "select * from x"; + + private static final String DELTA_QUERY = "select id from x where last_modified > NOW"; + + private static final String DELETED_PK_QUERY = "select id from x where last_modified > NOW AND deleted='true'"; + + @Override + public String getSchemaFile() { + return "dataimport-solr_id-schema.xml"; + } + + @Override + public String getSolrConfigFile() { + return "dataimport-solrconfig.xml"; + } + + @Override + public void setUp() throws Exception { + super.setUp(); + } + + @Override + public void tearDown() throws Exception { + super.tearDown(); + } + + + @SuppressWarnings("unchecked") + private void add1document() throws Exception { + List parentRow = new ArrayList(); + parentRow.add(createMap("id", "1")); + MockDataSource.setIterator(FULLIMPORT_QUERY, parentRow.iterator()); + + List childRow = new ArrayList(); + childRow.add(createMap("desc", "hello")); + MockDataSource.setIterator("select * from y where y.A='1'", childRow + .iterator()); + + super.runFullImport(dataConfig_delta2); + + assertQ(req("*:* OR add1document"), "//*[@numFound='1']"); + assertQ(req("solr_id:prefix-1"), "//*[@numFound='1']"); + assertQ(req("desc:hello"), "//*[@numFound='1']"); + } + + @Test + @SuppressWarnings("unchecked") + public void testCompositePk_FullImport() throws Exception { + add1document(); + } + + @Test + @SuppressWarnings("unchecked") + public void testCompositePk_DeltaImport_delete() throws Exception { + add1document(); + List deletedRow = new ArrayList(); + deletedRow.add(createMap("id", "1")); + MockDataSource.setIterator(DELETED_PK_QUERY, deletedRow.iterator()); + + MockDataSource.setIterator(DELTA_QUERY, Collections + .EMPTY_LIST.iterator()); + + List childRow = new ArrayList(); + childRow.add(createMap("desc", "hello")); + MockDataSource.setIterator("select * from y where y.A='1'", childRow + .iterator()); + + super.runDeltaImport(dataConfig_delta2); + assertQ(req("*:* OR testCompositePk_DeltaImport_delete"), "//*[@numFound='0']"); + } + + @Test + @SuppressWarnings("unchecked") + public void testCompositePk_DeltaImport_empty() throws Exception { + List deltaRow = new ArrayList(); + deltaRow.add(createMap("id", "1")); + MockDataSource.setIterator(DELTA_QUERY, deltaRow.iterator()); + + MockDataSource.setIterator(DELETED_PK_QUERY, Collections + .EMPTY_LIST.iterator()); + + List parentRow = new ArrayList(); + parentRow.add(createMap("id", "1")); + MockDataSource.setIterator("select * from x where id='1'", parentRow + .iterator()); + + List childRow = new ArrayList(); + childRow.add(createMap("desc", "hello")); + MockDataSource.setIterator("select * from y where y.A='1'", childRow + .iterator()); + + super.runDeltaImport(dataConfig_delta2); + + assertQ(req("*:* OR testCompositePk_DeltaImport_empty"), "//*[@numFound='1']"); + assertQ(req("solr_id:prefix-1"), "//*[@numFound='1']"); + assertQ(req("desc:hello"), "//*[@numFound='1']"); + } + + @Test + @SuppressWarnings("unchecked") + public void XtestCompositePk_DeltaImport_replace_delete() throws Exception { + add1document(); + MockDataSource.clearCache(); + + List deltaRow = new ArrayList(); + deltaRow.add(createMap("id", "1")); + MockDataSource.setIterator(DELTA_QUERY, + deltaRow.iterator()); + + List deletedRow = new ArrayList(); + deletedRow.add(createMap("id", "1")); + MockDataSource.setIterator(DELETED_PK_QUERY, + deletedRow.iterator()); + + List parentRow = new ArrayList(); + parentRow.add(createMap("id", "1")); + MockDataSource.setIterator("select * from x where id='1'", parentRow + .iterator()); + + List childRow = new ArrayList(); + childRow.add(createMap("desc", "goodbye")); + MockDataSource.setIterator("select * from y where y.A='1'", childRow + .iterator()); + + super.runDeltaImport(dataConfig_delta2); + + assertQ(req("*:* OR testCompositePk_DeltaImport_replace_delete"), "//*[@numFound='0']"); + } + + + @Test + @SuppressWarnings("unchecked") + public void testCompositePk_DeltaImport_replace_nodelete() throws Exception { + add1document(); + MockDataSource.clearCache(); + + List deltaRow = new ArrayList(); + deltaRow.add(createMap("id", "1")); + MockDataSource.setIterator(DELTA_QUERY, + deltaRow.iterator()); + + MockDataSource.setIterator(DELETED_PK_QUERY, Collections + .EMPTY_LIST.iterator()); + + List parentRow = new ArrayList(); + parentRow.add(createMap("id", "1")); + MockDataSource.setIterator("select * from x where id='1'", parentRow + .iterator()); + + List childRow = new ArrayList(); + childRow.add(createMap("desc", "goodbye")); + MockDataSource.setIterator("select * from y where y.A='1'", childRow + .iterator()); + + super.runDeltaImport(dataConfig_delta2); + + assertQ(req("*:* OR XtestCompositePk_DeltaImport_replace_nodelete"), "//*[@numFound='1']"); + assertQ(req("solr_id:prefix-1"), "//*[@numFound='1']"); + assertQ(req("desc:hello OR XtestCompositePk_DeltaImport_replace_nodelete"), "//*[@numFound='0']"); + assertQ(req("desc:goodbye"), "//*[@numFound='1']"); + } + + @Test + @SuppressWarnings("unchecked") + public void testCompositePk_DeltaImport_add() throws Exception { + add1document(); + MockDataSource.clearCache(); + + List deltaRow = new ArrayList(); + deltaRow.add(createMap("id", "2")); + MockDataSource.setIterator(DELTA_QUERY, + deltaRow.iterator()); + + List parentRow = new ArrayList(); + parentRow.add(createMap("id", "2")); + MockDataSource.setIterator("select * from x where id='2'", parentRow + .iterator()); + + List childRow = new ArrayList(); + childRow.add(createMap("desc", "goodbye")); + MockDataSource.setIterator("select * from y where y.A='2'", childRow + .iterator()); + + super.runDeltaImport(dataConfig_delta2); + + assertQ(req("*:* OR testCompositePk_DeltaImport_add"), "//*[@numFound='2']"); + assertQ(req("solr_id:prefix-1"), "//*[@numFound='1']"); + assertQ(req("solr_id:prefix-2"), "//*[@numFound='1']"); + assertQ(req("desc:hello"), "//*[@numFound='1']"); + assertQ(req("desc:goodbye"), "//*[@numFound='1']"); + } + + @Test + @SuppressWarnings("unchecked") + public void testCompositePk_DeltaImport_nodelta() throws Exception { + add1document(); + MockDataSource.clearCache(); + + MockDataSource.setIterator(DELTA_QUERY, + Collections.EMPTY_LIST.iterator()); + + super.runDeltaImport(dataConfig_delta2); + + assertQ(req("*:* OR testCompositePk_DeltaImport_nodelta"), "//*[@numFound='1']"); + assertQ(req("solr_id:prefix-1 OR testCompositePk_DeltaImport_nodelta"), "//*[@numFound='1']"); + assertQ(req("desc:hello OR testCompositePk_DeltaImport_nodelta"), "//*[@numFound='1']"); + } + + @Test + @SuppressWarnings("unchecked") + public void testCompositePk_DeltaImport_add_delete() throws Exception { + add1document(); + MockDataSource.clearCache(); + + List deltaRow = new ArrayList(); + deltaRow.add(createMap("id", "2")); + MockDataSource.setIterator(DELTA_QUERY, + deltaRow.iterator()); + + List deletedRow = new ArrayList(); + deletedRow.add(createMap("id", "1")); + MockDataSource.setIterator(DELETED_PK_QUERY, + deletedRow.iterator()); + + List parentRow = new ArrayList(); + parentRow.add(createMap("id", "2")); + MockDataSource.setIterator("select * from x where id='2'", parentRow + .iterator()); + + List childRow = new ArrayList(); + childRow.add(createMap("desc", "goodbye")); + MockDataSource.setIterator("select * from y where y.A='2'", childRow + .iterator()); + + super.runDeltaImport(dataConfig_delta2); + + assertQ(req("*:* OR XtestCompositePk_DeltaImport_add_delete"), "//*[@numFound='1']"); + assertQ(req("solr_id:prefix-2"), "//*[@numFound='1']"); + assertQ(req("desc:hello"), "//*[@numFound='0']"); + assertQ(req("desc:goodbye"), "//*[@numFound='1']"); + } + + private static String dataConfig_delta2 = "\n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + " \n" + + " \n" + "\n"; + +} diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestTemplateString.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestTemplateString.java new file mode 100644 index 00000000000..706ac0755f9 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestTemplateString.java @@ -0,0 +1,55 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import org.junit.Assert; +import org.junit.Test; + +import java.util.HashMap; +import java.util.Map; +import java.util.Properties; +import java.util.regex.Pattern; + +/** + *

    + * Test for TemplateString + *

    + * + * @version $Id$ + * @since solr 1.3 + */ +public class TestTemplateString { + @Test + public void testSimple() { + VariableResolverImpl vri = new VariableResolverImpl(); + Map ns = new HashMap(); + ns.put("last_index_time", Long.valueOf(1199429363730l)); + vri.addNamespace("indexer", ns); + Assert + .assertEquals( + "select id from subject where last_modified > 1199429363730", + new TemplateString() + .replaceTokens( + "select id from subject where last_modified > ${indexer.last_index_time}", + vri)); + } + + private static Properties EMPTY_PROPS = new Properties(); + + private static Pattern SELECT_WHERE_PATTERN = Pattern.compile( + "^\\s*(select\\b.*?\\b)(where).*", Pattern.CASE_INSENSITIVE); +} diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestTemplateTransformer.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestTemplateTransformer.java new file mode 100644 index 00000000000..7fbf9ac9e23 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestTemplateTransformer.java @@ -0,0 +1,75 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import org.junit.Assert; +import org.junit.Test; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Arrays; + +/** + *

    + * Test for TemplateTransformer + *

    + * + * @version $Id$ + * @since solr 1.3 + */ +public class TestTemplateTransformer { + + @Test + @SuppressWarnings("unchecked") + public void testTransformRow() { + List fields = new ArrayList(); + fields.add(AbstractDataImportHandlerTest.createMap("column", "firstName")); + fields.add(AbstractDataImportHandlerTest.createMap("column", "lastName")); + fields.add(AbstractDataImportHandlerTest.createMap("column", "middleName")); + fields.add(AbstractDataImportHandlerTest.createMap("column", "name", + TemplateTransformer.TEMPLATE, + "${e.lastName}, ${e.firstName} ${e.middleName}")); + fields.add(AbstractDataImportHandlerTest.createMap("column", "emails", + TemplateTransformer.TEMPLATE, + "${e.mail}")); + + // test reuse of template output in another template + fields.add(AbstractDataImportHandlerTest.createMap("column", "mrname", + TemplateTransformer.TEMPLATE,"Mr ${e.name}")); + + List mails = Arrays.asList(new String[]{"a@b.com", "c@d.com"}); + Map row = AbstractDataImportHandlerTest.createMap( + "firstName", "Shalin", + "middleName", "Shekhar", + "lastName", "Mangar", + "mail", mails); + + VariableResolverImpl resolver = new VariableResolverImpl(); + resolver.addNamespace("e", row); + Map entityAttrs = AbstractDataImportHandlerTest.createMap( + "name", "e"); + + Context context = AbstractDataImportHandlerTest.getContext(null, resolver, + null, Context.FULL_DUMP, fields, entityAttrs); + new TemplateTransformer().transformRow(row, context); + Assert.assertEquals("Mangar, Shalin Shekhar", row.get("name")); + Assert.assertEquals("Mr Mangar, Shalin Shekhar", row.get("mrname")); + Assert.assertEquals(mails,row.get("emails")); + } + +} diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestThreaded.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestThreaded.java new file mode 100644 index 00000000000..daa6f6e80fc --- /dev/null +++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestThreaded.java @@ -0,0 +1,72 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import org.junit.Test; + +import java.util.List; +import java.util.ArrayList; +import java.util.Map; + + +public class TestThreaded extends AbstractDataImportHandlerTest { + @Test + @SuppressWarnings("unchecked") + public void testCompositePk_FullImport() throws Exception { + List parentRow = new ArrayList(); +// parentRow.add(createMap("id", "1")); + parentRow.add(createMap("id", "2")); + parentRow.add(createMap("id", "3")); + parentRow.add(createMap("id", "4")); + parentRow.add(createMap("id", "1")); + MockDataSource.setIterator("select * from x", parentRow.iterator()); + + List childRow = new ArrayList(); + Map map = createMap("desc", "hello"); + childRow.add(map); + + MockDataSource.setIterator("select * from y where y.A=1", childRow.iterator()); + MockDataSource.setIterator("select * from y where y.A=2", childRow.iterator()); + MockDataSource.setIterator("select * from y where y.A=3", childRow.iterator()); + MockDataSource.setIterator("select * from y where y.A=4", childRow.iterator()); + + super.runFullImport(dataConfig); + + assertQ(req("id:1"), "//*[@numFound='1']"); + assertQ(req("*:*"), "//*[@numFound='4']"); + assertQ(req("desc:hello"), "//*[@numFound='4']"); + } + + @Override + public String getSchemaFile() { + return "dataimport-schema.xml"; + } + + @Override + public String getSolrConfigFile() { + return "dataimport-solrconfig.xml"; + } + private static String dataConfig = "\n" + +"\n" + + " \n" + + " NOW AND deleted='true'\" deltaQuery=\"select id from x where last_modified > NOW\">\n" + + " \n" + + " \n" + + " \n" + + " \n" + " \n" + + " \n" + ""; +} diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestURLDataSource.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestURLDataSource.java new file mode 100644 index 00000000000..e76648969b8 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestURLDataSource.java @@ -0,0 +1,47 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Properties; + +import junit.framework.Assert; + +import org.junit.Test; + +public class TestURLDataSource { + private List> fields = new ArrayList>(); + private URLDataSource dataSource = new URLDataSource(); + private VariableResolverImpl variableResolver = new VariableResolverImpl(); + private Context context = AbstractDataImportHandlerTest.getContext(null, variableResolver, + dataSource, Context.FULL_DUMP, fields, null); + private Properties initProps = new Properties(); + + @Test + public void substitutionsOnBaseUrl() throws Exception { + String url = "http://example.com/"; + + variableResolver.addNamespace("dataimporter.request", Collections.singletonMap("baseurl", url)); + + initProps.setProperty(URLDataSource.BASE_URL, "${dataimporter.request.baseurl}"); + dataSource.init(context, initProps); + Assert.assertEquals(url, dataSource.getBaseUrl()); + } +} diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestVariableResolver.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestVariableResolver.java new file mode 100644 index 00000000000..125fd961c47 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestVariableResolver.java @@ -0,0 +1,175 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import org.junit.Assert; +import org.junit.Test; +import org.apache.solr.util.DateMathParser; + +import java.text.SimpleDateFormat; +import java.util.*; + +/** + *

    + * Test for VariableResolver + *

    + * + * @version $Id$ + * @since solr 1.3 + */ +public class TestVariableResolver { + + @Test + public void testSimpleNamespace() { + VariableResolverImpl vri = new VariableResolverImpl(); + Map ns = new HashMap(); + ns.put("world", "WORLD"); + vri.addNamespace("hello", ns); + Assert.assertEquals("WORLD", vri.resolve("hello.world")); + } + + @Test + public void testDefaults(){ + System.out.println(System.setProperty(TestVariableResolver.class.getName(),"hello")); + System.out.println("s.gP()"+ System.getProperty(TestVariableResolver.class.getName())); + + HashMap m = new HashMap(); + m.put("hello","world"); + VariableResolverImpl vri = new VariableResolverImpl(m); + Object val = vri.resolve(TestVariableResolver.class.getName()); + System.out.println("val = " + val); + Assert.assertEquals("hello", val); + Assert.assertEquals("world",vri.resolve("hello")); + + } + + @Test + public void testNestedNamespace() { + VariableResolverImpl vri = new VariableResolverImpl(); + Map ns = new HashMap(); + ns.put("world", "WORLD"); + vri.addNamespace("hello", ns); + ns = new HashMap(); + ns.put("world1", "WORLD1"); + vri.addNamespace("hello.my", ns); + Assert.assertEquals("WORLD1", vri.resolve("hello.my.world1")); + } + + @Test + public void test3LevelNestedNamespace() { + VariableResolverImpl vri = new VariableResolverImpl(); + Map ns = new HashMap(); + ns.put("world", "WORLD"); + vri.addNamespace("hello", ns); + ns = new HashMap(); + ns.put("world1", "WORLD1"); + vri.addNamespace("hello.my.new", ns); + Assert.assertEquals("WORLD1", vri.resolve("hello.my.new.world1")); + } + + @Test + public void dateNamespaceWithValue() { + VariableResolverImpl vri = new VariableResolverImpl(); + ContextImpl context = new ContextImpl(null, vri, null, Context.FULL_DUMP, Collections.EMPTY_MAP, null, null); + Context.CURRENT_CONTEXT.set(context); + try { + vri.addNamespace("dataimporter.functions", EvaluatorBag + .getFunctionsNamespace(Collections.EMPTY_LIST, null)); + Map ns = new HashMap(); + Date d = new Date(); + ns.put("dt", d); + vri.addNamespace("A", ns); + Assert.assertEquals(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(d), + vri.replaceTokens("${dataimporter.functions.formatDate(A.dt,'yyyy-MM-dd HH:mm:ss')}")); + } finally { + Context.CURRENT_CONTEXT.remove(); + } + } + + @Test + public void dateNamespaceWithExpr() throws Exception { + VariableResolverImpl vri = new VariableResolverImpl(); + ContextImpl context = new ContextImpl(null, vri, null, Context.FULL_DUMP, Collections.EMPTY_MAP, null, null); + Context.CURRENT_CONTEXT.set(context); + try { + vri.addNamespace("dataimporter.functions", EvaluatorBag + .getFunctionsNamespace(Collections.EMPTY_LIST,null)); + + SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'"); + format.setTimeZone(TimeZone.getTimeZone("UTC")); + DateMathParser dmp = new DateMathParser(TimeZone.getDefault(), Locale.getDefault()); + + String s = vri.replaceTokens("${dataimporter.functions.formatDate('NOW/DAY','yyyy-MM-dd HH:mm')}"); + Assert.assertEquals(new SimpleDateFormat("yyyy-MM-dd HH:mm").format(dmp.parseMath("/DAY")), s); + } finally { + Context.CURRENT_CONTEXT.remove(); + } + } + + @Test + public void testDefaultNamespace() { + VariableResolverImpl vri = new VariableResolverImpl(); + Map ns = new HashMap(); + ns.put("world", "WORLD"); + vri.addNamespace(null, ns); + Assert.assertEquals("WORLD", vri.resolve("world")); + } + + @Test + public void testDefaultNamespace1() { + VariableResolverImpl vri = new VariableResolverImpl(); + Map ns = new HashMap(); + ns.put("world", "WORLD"); + vri.addNamespace(null, ns); + Assert.assertEquals("WORLD", vri.resolve("world")); + } + + @Test + public void testFunctionNamespace1() throws Exception { + VariableResolverImpl resolver = new VariableResolverImpl(); + ContextImpl context = new ContextImpl(null, resolver, null, Context.FULL_DUMP, Collections.EMPTY_MAP, null, null); + Context.CURRENT_CONTEXT.set(context); + try { + final List> l = new ArrayList>(); + Map m = new HashMap(); + m.put("name","test"); + m.put("class",E.class.getName()); + l.add(m); + + SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'"); + format.setTimeZone(TimeZone.getTimeZone("UTC")); + DateMathParser dmp = new DateMathParser(TimeZone.getDefault(), Locale.getDefault()); + + resolver.addNamespace("dataimporter.functions", EvaluatorBag + .getFunctionsNamespace(l,null)); + String s = resolver + .replaceTokens("${dataimporter.functions.formatDate('NOW/DAY','yyyy-MM-dd HH:mm')}"); + Assert.assertEquals(new SimpleDateFormat("yyyy-MM-dd HH:mm") + .format(dmp.parseMath("/DAY")), s); + Assert.assertEquals("Hello World", resolver + .replaceTokens("${dataimporter.functions.test('TEST')}")); + } finally { + Context.CURRENT_CONTEXT.remove(); + } + } + + public static class E extends Evaluator{ + public String evaluate(String expression, Context context) { + return "Hello World"; + } + } +} diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestXPathEntityProcessor.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestXPathEntityProcessor.java new file mode 100644 index 00000000000..cf4b1619a1f --- /dev/null +++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestXPathEntityProcessor.java @@ -0,0 +1,304 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import static org.apache.solr.handler.dataimport.AbstractDataImportHandlerTest.createMap; +import org.junit.Assert; +import org.junit.Test; + +import java.io.File; +import java.io.Reader; +import java.io.StringReader; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import java.util.concurrent.TimeUnit; + +/** + *

    + * Test for XPathEntityProcessor + *

    + * + * @version $Id$ + * @since solr 1.3 + */ +public class TestXPathEntityProcessor { + boolean simulateSlowReader; + boolean simulateSlowResultProcessor; + int rowsToRead = -1; + + @Test + public void withFieldsAndXpath() throws Exception { + long time = System.currentTimeMillis(); + File tmpdir = new File("." + time); + tmpdir.mkdir(); + tmpdir.deleteOnExit(); + TestFileListEntityProcessor.createFile(tmpdir, "x.xsl", xsl.getBytes(), + false); + Map entityAttrs = createMap("name", "e", "url", "cd.xml", + XPathEntityProcessor.FOR_EACH, "/catalog/cd"); + List fields = new ArrayList(); + fields.add(createMap("column", "title", "xpath", "/catalog/cd/title")); + fields.add(createMap("column", "artist", "xpath", "/catalog/cd/artist")); + fields.add(createMap("column", "year", "xpath", "/catalog/cd/year")); + Context c = AbstractDataImportHandlerTest.getContext(null, + new VariableResolverImpl(), getDataSource(cdData), Context.FULL_DUMP, fields, entityAttrs); + XPathEntityProcessor xPathEntityProcessor = new XPathEntityProcessor(); + xPathEntityProcessor.init(c); + List> result = new ArrayList>(); + while (true) { + Map row = xPathEntityProcessor.nextRow(); + if (row == null) + break; + result.add(row); + } + Assert.assertEquals(3, result.size()); + Assert.assertEquals("Empire Burlesque", result.get(0).get("title")); + Assert.assertEquals("Bonnie Tyler", result.get(1).get("artist")); + Assert.assertEquals("1982", result.get(2).get("year")); + } + + @Test + public void testMultiValued() throws Exception { + Map entityAttrs = createMap("name", "e", "url", "testdata.xml", + XPathEntityProcessor.FOR_EACH, "/root"); + List fields = new ArrayList(); + fields.add(createMap("column", "a", "xpath", "/root/a", DataImporter.MULTI_VALUED, "true")); + Context c = AbstractDataImportHandlerTest.getContext(null, + new VariableResolverImpl(), getDataSource(testXml), Context.FULL_DUMP, fields, entityAttrs); + XPathEntityProcessor xPathEntityProcessor = new XPathEntityProcessor(); + xPathEntityProcessor.init(c); + List> result = new ArrayList>(); + while (true) { + Map row = xPathEntityProcessor.nextRow(); + if (row == null) + break; + result.add(row); + } + Assert.assertEquals(2, ((List)result.get(0).get("a")).size()); + } + + @Test + public void testMultiValuedFlatten() throws Exception { + Map entityAttrs = createMap("name", "e", "url", "testdata.xml", + XPathEntityProcessor.FOR_EACH, "/root"); + List fields = new ArrayList(); + fields.add(createMap("column", "a", "xpath", "/root/a" ,"flatten","true")); + Context c = AbstractDataImportHandlerTest.getContext(null, + new VariableResolverImpl(), getDataSource(testXmlFlatten), Context.FULL_DUMP, fields, entityAttrs); + XPathEntityProcessor xPathEntityProcessor = new XPathEntityProcessor(); + xPathEntityProcessor.init(c); + Map result = null; + while (true) { + Map row = xPathEntityProcessor.nextRow(); + if (row == null) + break; + result = row; + } + Assert.assertEquals("1B2", result.get("a")); + } + + @Test + public void withFieldsAndXpathStream() throws Exception { + final Object monitor = new Object(); + final boolean[] done = new boolean[1]; + + Map entityAttrs = createMap("name", "e", "url", "cd.xml", + XPathEntityProcessor.FOR_EACH, "/catalog/cd", "stream", "true", "batchSize","1"); + List fields = new ArrayList(); + fields.add(createMap("column", "title", "xpath", "/catalog/cd/title")); + fields.add(createMap("column", "artist", "xpath", "/catalog/cd/artist")); + fields.add(createMap("column", "year", "xpath", "/catalog/cd/year")); + Context c = AbstractDataImportHandlerTest.getContext(null, + new VariableResolverImpl(), getDataSource(cdData), Context.FULL_DUMP, fields, entityAttrs); + XPathEntityProcessor xPathEntityProcessor = new XPathEntityProcessor() { + private int count; + + @Override + protected Map readRow(Map record, + String xpath) { + synchronized (monitor) { + if (simulateSlowReader && !done[0]) { + try { + monitor.wait(100); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + } + + return super.readRow(record, xpath); + } + }; + + if (simulateSlowResultProcessor) { + xPathEntityProcessor.blockingQueueSize = 1; + } + xPathEntityProcessor.blockingQueueTimeOut = 1; + xPathEntityProcessor.blockingQueueTimeOutUnits = TimeUnit.MICROSECONDS; + + xPathEntityProcessor.init(c); + List> result = new ArrayList>(); + while (true) { + if (rowsToRead >= 0 && result.size() >= rowsToRead) { + Thread.currentThread().interrupt(); + } + Map row = xPathEntityProcessor.nextRow(); + if (row == null) + break; + result.add(row); + if (simulateSlowResultProcessor) { + synchronized (xPathEntityProcessor.publisherThread) { + if (xPathEntityProcessor.publisherThread.isAlive()) { + xPathEntityProcessor.publisherThread.wait(1000); + } + } + } + } + + synchronized (monitor) { + done[0] = true; + monitor.notify(); + } + + // confirm that publisher thread stops. + xPathEntityProcessor.publisherThread.join(1000); + Assert.assertEquals("Expected thread to stop", false, xPathEntityProcessor.publisherThread.isAlive()); + + Assert.assertEquals(rowsToRead < 0 ? 3 : rowsToRead, result.size()); + + if (rowsToRead < 0) { + Assert.assertEquals("Empire Burlesque", result.get(0).get("title")); + Assert.assertEquals("Bonnie Tyler", result.get(1).get("artist")); + Assert.assertEquals("1982", result.get(2).get("year")); + } + } + + @Test + public void withFieldsAndXpathStreamContinuesOnTimeout() throws Exception { + simulateSlowReader = true; + withFieldsAndXpathStream(); + } + + @Test + public void streamWritesMessageAfterBlockedAttempt() throws Exception { + simulateSlowResultProcessor = true; + withFieldsAndXpathStream(); + } + + @Test + public void streamStopsAfterInterrupt() throws Exception { + simulateSlowResultProcessor = true; + rowsToRead = 1; + withFieldsAndXpathStream(); + } + + @Test + public void withDefaultSolrAndXsl() throws Exception { + long time = System.currentTimeMillis(); + File tmpdir = new File("." + time); + tmpdir.mkdir(); + tmpdir.deleteOnExit(); + TestFileListEntityProcessor.createFile(tmpdir, "x.xsl", xsl.getBytes(), + false); + Map entityAttrs = createMap("name", "e", + XPathEntityProcessor.USE_SOLR_ADD_SCHEMA, "true", "xsl", "" + + new File(tmpdir, "x.xsl").getAbsolutePath(), "url", "cd.xml"); + Context c = AbstractDataImportHandlerTest.getContext(null, + new VariableResolverImpl(), getDataSource(cdData), Context.FULL_DUMP, null, entityAttrs); + XPathEntityProcessor xPathEntityProcessor = new XPathEntityProcessor(); + xPathEntityProcessor.init(c); + List> result = new ArrayList>(); + while (true) { + Map row = xPathEntityProcessor.nextRow(); + if (row == null) + break; + result.add(row); + } + Assert.assertEquals(3, result.size()); + Assert.assertEquals("Empire Burlesque", result.get(0).get("title")); + Assert.assertEquals("Bonnie Tyler", result.get(1).get("artist")); + Assert.assertEquals("1982", result.get(2).get("year")); + } + + private DataSource getDataSource(final String xml) { + return new DataSource() { + + public void init(Context context, Properties initProps) { + } + + public void close() { + } + + public Reader getData(String query) { + return new StringReader(xml); + } + }; + } + + private static final String xsl = "\n" + + "\n" + + "\n" + + "\n" + + "\n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + "\n" + ""; + + private static final String cdData = "\n" + + "\n" + + "\n" + + "\t\n" + + "\t\tEmpire Burlesque\n" + + "\t\tBob Dylan\n" + + "\t\tUSA\n" + + "\t\tColumbia\n" + + "\t\t10.90\n" + + "\t\t1985\n" + + "\t\n" + + "\t\n" + + "\t\tHide your heart\n" + + "\t\tBonnie Tyler\n" + + "\t\tUK\n" + + "\t\tCBS Records\n" + + "\t\t9.90\n" + + "\t\t1988\n" + + "\t\n" + + "\t\n" + + "\t\tGreatest Hits\n" + + "\t\tDolly Parton\n" + + "\t\tUSA\n" + + "\t\tRCA\n" + + "\t\t9.90\n" + + "\t\t1982\n" + "\t\n" + "\t"; + + private static final String testXml = "12"; + + private static final String testXmlFlatten = "1B2"; +} diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestXPathRecordReader.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestXPathRecordReader.java new file mode 100644 index 00000000000..8365ecff767 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestXPathRecordReader.java @@ -0,0 +1,569 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed onT an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.dataimport; + +import org.junit.Assert; +import org.junit.Test; + +import java.io.StringReader; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +/** + *

    Test for XPathRecordReader

    + * + * @version $Id$ + * @since solr 1.3 + */ +public class TestXPathRecordReader { + @Test + public void basic() { + String xml="\n" + + " Hello C1\n" + + " Hello C1\n" + + " \n" + + " Hello C2\n" + + " \n" + + ""; + XPathRecordReader rr = new XPathRecordReader("/root/b"); + rr.addField("c", "/root/b/c", true); + List> l = rr.getAllRecords(new StringReader(xml)); + Assert.assertEquals(2, l.size()); + Assert.assertEquals(2, ((List) l.get(0).get("c")).size()); + Assert.assertEquals(1, ((List) l.get(1).get("c")).size()); + } + + @Test + public void attributes() { + String xml="\n" + + " \n" + + " \n" + + " \n" + + ""; + XPathRecordReader rr = new XPathRecordReader("/root/b"); + rr.addField("a", "/root/b/@a", false); + rr.addField("b", "/root/b/@b", false); + List> l = rr.getAllRecords(new StringReader(xml)); + Assert.assertEquals(3, l.size()); + Assert.assertEquals("x0", l.get(0).get("a")); + Assert.assertEquals("x1", l.get(1).get("a")); + Assert.assertEquals("x2", l.get(2).get("a")); + Assert.assertEquals("y0", l.get(0).get("b")); + Assert.assertEquals("y1", l.get(1).get("b")); + Assert.assertEquals("y2", l.get(2).get("b")); + } + + @Test + public void attrInRoot(){ + String xml="\n" + + "\n" + + " \n" + + " \n" + + " 301.46\n" + + " \n" + + "\n" + + " \n" + + " \n" + + " 302.46\n" + + " \n" + + "\n" + + ""; + XPathRecordReader rr = new XPathRecordReader("/r/merchantProduct"); + rr.addField("id", "/r/merchantProduct/@id", false); + rr.addField("mid", "/r/merchantProduct/@mid", false); + rr.addField("price", "/r/merchantProduct/price", false); + rr.addField("conditionType", "/r/merchantProduct/condition/@type", false); + List> l = rr.getAllRecords(new StringReader(xml)); + Map m = l.get(0); + Assert.assertEquals("814636051", m.get("id")); + Assert.assertEquals("189973", m.get("mid")); + Assert.assertEquals("301.46", m.get("price")); + Assert.assertEquals("cond-0", m.get("conditionType")); + + m = l.get(1); + Assert.assertEquals("814636052", m.get("id")); + Assert.assertEquals("189974", m.get("mid")); + Assert.assertEquals("302.46", m.get("price")); + Assert.assertEquals("cond-1", m.get("conditionType")); + } + + @Test + public void attributes2Level() { + String xml="\n" + + "\n \n" + + " \n" + + " \n" + + " " + + ""; + XPathRecordReader rr = new XPathRecordReader("/root/a/b"); + rr.addField("a", "/root/a/b/@a", false); + rr.addField("b", "/root/a/b/@b", false); + List> l = rr.getAllRecords(new StringReader(xml)); + Assert.assertEquals(3, l.size()); + Assert.assertEquals("x0", l.get(0).get("a")); + Assert.assertEquals("y1", l.get(1).get("b")); + } + + @Test + public void attributes2LevelHetero() { + String xml="\n" + + "\n \n" + + " \n" + + " \n" + + " " + + "\n \n" + + " \n" + + " \n" + + " " + + ""; + XPathRecordReader rr = new XPathRecordReader("/root/a | /root/x"); + rr.addField("a", "/root/a/b/@a", false); + rr.addField("b", "/root/a/b/@b", false); + rr.addField("a", "/root/x/b/@a", false); + rr.addField("b", "/root/x/b/@b", false); + + final List> a = new ArrayList>(); + final List> x = new ArrayList>(); + rr.streamRecords(new StringReader(xml), new XPathRecordReader.Handler() { + public void handle(Map record, String xpath) { + if (record == null) return; + if (xpath.equals("/root/a")) a.add(record); + if (xpath.equals("/root/x")) x.add(record); + } + }); + + Assert.assertEquals(1, a.size()); + Assert.assertEquals(1, x.size()); + } + + @Test + public void attributes2LevelMissingAttrVal() { + String xml="\n" + + "\n \n" + + " \n" + + " " + + "\n \n" + + " \n" + + " " + + ""; + XPathRecordReader rr = new XPathRecordReader("/root/a"); + rr.addField("a", "/root/a/b/@a", true); + rr.addField("b", "/root/a/b/@b", true); + List> l = rr.getAllRecords(new StringReader(xml)); + Assert.assertEquals(2, l.size()); + Assert.assertNull(((List) l.get(1).get("a")).get(1)); + Assert.assertNull(((List) l.get(1).get("b")).get(0)); + } + + @Test + public void elems2LevelMissing() { + String xml="\n" + + "\t\n" + + "\t \n\t x0\n" + + "\t y0\n" + + "\t \n" + + "\t \n\t x1\n" + + "\t y1\n" + + "\t \n" + + "\t \n" + + "\t\n" + + "\t \n\t x3\n\t \n" + + "\t \n\t y4\n\t \n" + + "\t \n" + + ""; + XPathRecordReader rr = new XPathRecordReader("/root/a"); + rr.addField("a", "/root/a/b/x", true); + rr.addField("b", "/root/a/b/y", true); + List> l = rr.getAllRecords(new StringReader(xml)); + Assert.assertEquals(2, l.size()); + Assert.assertNull(((List) l.get(1).get("a")).get(1)); + Assert.assertNull(((List) l.get(1).get("b")).get(0)); + } + + @Test + public void mixedContent() { + String xml = "This text is \n" + + " bold and this text is \n" + + " underlined!\n" + + ""; + XPathRecordReader rr = new XPathRecordReader("/p"); + rr.addField("p", "/p", true); + rr.addField("b", "/p/b", true); + rr.addField("u", "/p/u", true); + List> l = rr.getAllRecords(new StringReader(xml)); + Map row = l.get(0); + + Assert.assertEquals("bold", ((List) row.get("b")).get(0)); + Assert.assertEquals("underlined", ((List) row.get("u")).get(0)); + String p = (String) ((List) row.get("p")).get(0); + Assert.assertTrue(p.contains("This text is")); + Assert.assertTrue(p.contains("and this text is")); + Assert.assertTrue(p.contains("!")); + // Should not contain content from child elements + Assert.assertFalse(p.contains("bold")); + } + + @Test + public void mixedContentFlattened() { + String xml = "This text is \n" + + " bold and this text is \n" + + " underlined!\n" + + ""; + XPathRecordReader rr = new XPathRecordReader("/p"); + rr.addField("p", "/p", false, XPathRecordReader.FLATTEN); + List> l = rr.getAllRecords(new StringReader(xml)); + Map row = l.get(0); + Assert.assertEquals("This text is \n" + + " bold and this text is \n" + + " underlined!", ((String)row.get("p")).trim() ); + } + + @Test + public void elems2LevelWithAttrib() { + String xml = "\n\t\n\t \n" + + "\t x0\n" + + "\t y0\n" + + "\t \n" + + "\t \n" + + "\t x1\n" + + "\t y1\n" + + "\t \n" + + "\t \n" + + "\t \n\t \n" + + "\t x3\n" + + "\t \n" + + "\t \n" + + "\t y4\n" + + "\t \n" + + "\t \n" + + ""; + XPathRecordReader rr = new XPathRecordReader("/root/a"); + rr.addField("x", "/root/a/b[@k]/x", true); + rr.addField("y", "/root/a/b[@k]/y", true); + List> l = rr.getAllRecords(new StringReader(xml)); + Assert.assertEquals(2, l.size()); + Assert.assertEquals(2, ((List) l.get(0).get("x")).size()); + Assert.assertEquals(2, ((List) l.get(0).get("y")).size()); + Assert.assertEquals(0, l.get(1).size()); + } + + @Test + public void elems2LevelWithAttribMultiple() { + String xml="\n" + + "\t\n\t \n" + + "\t x0\n" + + "\t y0\n" + + "\t \n" + + "\t \n" + + "\t x1\n" + + "\t y1\n" + + "\t \n" + + "\t \n" + + "\t\n\t \n" + + "\t x3\n" + + "\t \n" + + "\t \n" + + "\t y4\n" + + "\t \n" + + "\t \n" + + ""; + XPathRecordReader rr = new XPathRecordReader("/root/a"); + rr.addField("x", "/root/a/b[@k][@m='n']/x", true); + rr.addField("y", "/root/a/b[@k][@m='n']/y", true); + List> l = rr.getAllRecords(new StringReader(xml)); + Assert.assertEquals(2, l.size()); + Assert.assertEquals(1, ((List) l.get(0).get("x")).size()); + Assert.assertEquals(1, ((List) l.get(0).get("y")).size()); + Assert.assertEquals(0, l.get(1).size()); + } + + @Test + public void elems2LevelWithAttribVal() { + String xml="\n\t\n \n" + + "\t x0\n" + + "\t y0\n" + + "\t \n" + + "\t \n" + + "\t x1\n" + + "\t y1\n" + + "\t \n" + + "\t \n" + + "\t \n x3\n" + + "\t y4\n" + + "\t\n" + ""; + XPathRecordReader rr = new XPathRecordReader("/root/a"); + rr.addField("x", "/root/a/b[@k='x']/x", true); + rr.addField("y", "/root/a/b[@k='x']/y", true); + List> l = rr.getAllRecords(new StringReader(xml)); + Assert.assertEquals(2, l.size()); + Assert.assertEquals(1, ((List) l.get(0).get("x")).size()); + Assert.assertEquals(1, ((List) l.get(0).get("y")).size()); + Assert.assertEquals(0, l.get(1).size()); + } + + @Test + public void attribValWithSlash() { + String xml = "\n" + + " \n" + + ""; + XPathRecordReader rr = new XPathRecordReader("/root/b"); + rr.addField("x", "/root/b/a[@x='a/b']/@h", false); + List> l = rr.getAllRecords(new StringReader(xml)); + Assert.assertEquals(1, l.size()); + Map m = l.get(0); + Assert.assertEquals("hello-A", m.get("x")); + } + + @Test + public void unsupported_Xpaths() { + String xml = " "; + XPathRecordReader rr=null; + try { + rr = new XPathRecordReader("//b"); + Assert.fail("A RuntimeException was expected: //b forEach cannot begin with '//'."); + } + catch (RuntimeException ex) { } + try { + rr.addField("bold" ,"b", false); + Assert.fail("A RuntimeException was expected: 'b' xpaths must begin with '/'."); + } + catch (RuntimeException ex) { } + + } + + @Test + public void any_decendent_from_root() { + XPathRecordReader rr = new XPathRecordReader("/anyd/contenido"); + rr.addField("descdend", "//boo", true); + rr.addField("inr_descd","//boo/i", false); + rr.addField("cont", "/anyd/contenido", false); + rr.addField("id", "/anyd/contenido/@id", false); + rr.addField("status", "/anyd/status", false); + rr.addField("title", "/anyd/contenido/titulo", false,XPathRecordReader.FLATTEN); + rr.addField("resume", "/anyd/contenido/resumen",false); + rr.addField("text", "/anyd/contenido/texto", false); + + String xml="\n" + + " this top level is ignored because it is external to the forEach\n" + + " as is this element\n" + + " \n" + + " This one is not ignored as its inside a forEach\n" + + " big antler\n" + + " My flattened title \n" + + " My summary skip this! \n" + + " Within the body ofMy text\n" + + "

    Access inner sub clauses as well

    \n" + + "
    \n" + + "
    "; + + List> l = rr.getAllRecords(new StringReader(xml)); + Assert.assertEquals(1, l.size()); + Map m = l.get(0); + Assert.assertEquals("This one is inside a forEach", m.get("cont").toString().trim()); + Assert.assertEquals("10097" ,m.get("id")); + Assert.assertEquals("My flattened title",m.get("title").toString().trim()); + Assert.assertEquals("My summary" ,m.get("resume").toString().trim()); + Assert.assertEquals("My text" ,m.get("text").toString().trim()); + Assert.assertEquals("not ignored as its",(String) ((List) m.get("descdend")).get(0) ); + Assert.assertEquals("antler" ,(String) ((List) m.get("descdend")).get(1) ); + Assert.assertEquals("Within the body of",(String) ((List) m.get("descdend")).get(2) ); + Assert.assertEquals("inner as well" ,(String) ((List) m.get("descdend")).get(3) ); + Assert.assertEquals("sub clauses" ,m.get("inr_descd").toString().trim()); + } + + @Test + public void any_decendent_of_a_child1() { + XPathRecordReader rr = new XPathRecordReader("/anycd"); + rr.addField("descdend", "/anycd//boo", true); + + // same test string as above but checking to see if *all* //boo's are collected + String xml="\n" + + " this top level is ignored because it is external to the forEach\n" + + " as is this element\n" + + " \n" + + " This one is not ignored as its inside a forEach\n" + + " big antler\n" + + " My flattened title \n" + + " My summary skip this! \n" + + " Within the body ofMy text\n" + + "

    Access inner sub clauses as well

    \n" + + "
    \n" + + "
    "; + + List> l = rr.getAllRecords(new StringReader(xml)); + Assert.assertEquals(1, l.size()); + Map m = l.get(0); + Assert.assertEquals("top level" ,(String) ((List) m.get("descdend")).get(0) ); + Assert.assertEquals("this element" ,(String) ((List) m.get("descdend")).get(1) ); + Assert.assertEquals("not ignored as its",(String) ((List) m.get("descdend")).get(2) ); + Assert.assertEquals("antler" ,(String) ((List) m.get("descdend")).get(3) ); + Assert.assertEquals("title" ,(String) ((List) m.get("descdend")).get(4) ); + Assert.assertEquals("Within the body of",(String) ((List) m.get("descdend")).get(5) ); + Assert.assertEquals("inner as well" ,(String) ((List) m.get("descdend")).get(6) ); + } + + @Test + public void any_decendent_of_a_child2() { + XPathRecordReader rr = new XPathRecordReader("/anycd"); + rr.addField("descdend", "/anycd/contenido//boo", true); + + // same test string as above but checking to see if *some* //boo's are collected + String xml="\n" + + " this top level is ignored because it is external to the forEach\n" + + " as is this element\n" + + " \n" + + " This one is not ignored as its inside a forEach\n" + + " big antler\n" + + " My flattened title \n" + + " My summary skip this! \n" + + " Within the body ofMy text\n" + + "

    Access inner sub clauses as well

    \n" + + "
    \n" + + "
    "; + + List> l = rr.getAllRecords(new StringReader(xml)); + Assert.assertEquals(1, l.size()); + Map m = l.get(0); + Assert.assertEquals("not ignored as its",((List) m.get("descdend")).get(0) ); + Assert.assertEquals("antler" ,((List) m.get("descdend")).get(1) ); + Assert.assertEquals("title" ,((List) m.get("descdend")).get(2) ); + Assert.assertEquals("Within the body of",((List) m.get("descdend")).get(3) ); + Assert.assertEquals("inner as well" ,((List) m.get("descdend")).get(4) ); + } + + @Test + public void another() { + String xml="\n" + + " \n" + + " \n" + + " This is my title \n" + + " This is my summary \n" + + " This is the body of my text \n" + + " \n" + + ""; + XPathRecordReader rr = new XPathRecordReader("/root/contenido"); + rr.addField("id", "/root/contenido/@id", false); + rr.addField("title", "/root/contenido/titulo", false); + rr.addField("resume","/root/contenido/resumen",false); + rr.addField("text", "/root/contenido/texto", false); + + List> l = rr.getAllRecords(new StringReader(xml)); + Assert.assertEquals(1, l.size()); + Map m = l.get(0); + Assert.assertEquals("10097", m.get("id")); + Assert.assertEquals("This is my title", m.get("title").toString().trim()); + Assert.assertEquals("This is my summary", m.get("resume").toString().trim()); + Assert.assertEquals("This is the body of my text", m.get("text").toString() + .trim()); + } + + @Test + public void sameForEachAndXpath(){ + String xml="\n" + + " \n" + + " hello\n" + + " \n" + + " \n" + + ""; + XPathRecordReader rr = new XPathRecordReader("/root/cat/name"); + rr.addField("catName", "/root/cat/name",false); + List> l = rr.getAllRecords(new StringReader(xml)); + Assert.assertEquals("hello",l.get(0).get("catName")); + } + + @Test + public void putNullTest(){ + String xml = "\n" + + " \n" + + " \n" + + "
    A.1.1\n" + + " B.1.1\n" + + " \n" + + " \n" + + " B.1.2\n" + + " C.1.2\n" + + " \n" + + " \n" + + " \n" + + " \n" + + " A.2.1\n" + + " C.2.1\n" + + " \n" + + " \n" + + " B.2.2\n" + + " C.2.2\n" + + " \n" + + " \n" + + ""; + XPathRecordReader rr = new XPathRecordReader("/root/i"); + rr.addField("a", "/root/i/x/a", true); + rr.addField("b", "/root/i/x/b", true); + rr.addField("c", "/root/i/x/c", true); + List> l = rr.getAllRecords(new StringReader(xml)); + Map map = l.get(0); + List a = (List) map.get("a"); + List b = (List) map.get("b"); + List c = (List) map.get("c"); + + Assert.assertEquals("A.1.1",a.get(0)); + Assert.assertEquals("B.1.1",b.get(0)); + Assert.assertNull(c.get(0)); + + Assert.assertNull(a.get(1)); + Assert.assertEquals("B.1.2",b.get(1)); + Assert.assertEquals("C.1.2",c.get(1)); + + map = l.get(1); + a = (List) map.get("a"); + b = (List) map.get("b"); + c = (List) map.get("c"); + Assert.assertEquals("A.2.1",a.get(0)); + Assert.assertNull(b.get(0)); + Assert.assertEquals("C.2.1",c.get(0)); + + Assert.assertNull(a.get(1)); + Assert.assertEquals("B.2.2",b.get(1)); + Assert.assertEquals("C.2.2",c.get(1)); + } + + + @Test + public void testError(){ + String malformedXml = "\n" + + " \n" + + " 1\n" + + " test1\n" + + " \n" + + " \n" + + " 2\n" + + " test2\n" + + " \n" + + " \n" + + " 3\n" + // invalid XML + " test3\n" + + " \n" + + ""; + XPathRecordReader rr = new XPathRecordReader("/root/node"); + rr.addField("id", "/root/node/id", true); + rr.addField("desc", "/root/node/desc", true); + try { + rr.getAllRecords(new StringReader(malformedXml)); + Assert.fail("A RuntimeException was expected: the input XML is invalid."); + } catch (Exception e) { } + } +} diff --git a/solr/contrib/dataimporthandler/src/test/resources/solr/conf/contentstream-solrconfig.xml b/solr/contrib/dataimporthandler/src/test/resources/solr/conf/contentstream-solrconfig.xml new file mode 100644 index 00000000000..acfed3c9224 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/test/resources/solr/conf/contentstream-solrconfig.xml @@ -0,0 +1,408 @@ + + + + + + ${solr.abortOnConfigurationError:true} + + + ${solr.data.dir:./solr/data} + + + + + false + + 10 + + + + 32 + 2147483647 + 10000 + 1000 + 10000 + + + + + + + + + + + single + + + + + false + 32 + 10 + + + 2147483647 + 10000 + + + false + + + + + + + + + 100000 + + + + + + + 1024 + + + + + + + + + + + + + true + + + + + + + + 50 + + + 200 + + + + + + + + + solr 0 10 + rocks 0 10 + static newSearcher warming query from solrconfig.xml + + + + + + + + + + + false + + + 4 + + + + + + + + + + + + + + + + + + + + + + + explicit + + + + + + + data-config.xml + + + + + + + + + explicit + + + + + + + + + + + + *:* + + + + + + diff --git a/solr/contrib/dataimporthandler/src/test/resources/solr/conf/data-config-with-datasource.xml b/solr/contrib/dataimporthandler/src/test/resources/solr/conf/data-config-with-datasource.xml new file mode 100644 index 00000000000..9566a54850b --- /dev/null +++ b/solr/contrib/dataimporthandler/src/test/resources/solr/conf/data-config-with-datasource.xml @@ -0,0 +1,9 @@ + + + + + + + + + diff --git a/solr/contrib/dataimporthandler/src/test/resources/solr/conf/data-config-with-transformer.xml b/solr/contrib/dataimporthandler/src/test/resources/solr/conf/data-config-with-transformer.xml new file mode 100644 index 00000000000..c58b21d5a68 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/test/resources/solr/conf/data-config-with-transformer.xml @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/solr/contrib/dataimporthandler/src/test/resources/solr/conf/dataconfig-contentstream.xml b/solr/contrib/dataimporthandler/src/test/resources/solr/conf/dataconfig-contentstream.xml new file mode 100644 index 00000000000..7520e74742b --- /dev/null +++ b/solr/contrib/dataimporthandler/src/test/resources/solr/conf/dataconfig-contentstream.xml @@ -0,0 +1,10 @@ + + + + + + + + + diff --git a/solr/contrib/dataimporthandler/src/test/resources/solr/conf/dataimport-nodatasource-solrconfig.xml b/solr/contrib/dataimporthandler/src/test/resources/solr/conf/dataimport-nodatasource-solrconfig.xml new file mode 100644 index 00000000000..4b5a06ede92 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/test/resources/solr/conf/dataimport-nodatasource-solrconfig.xml @@ -0,0 +1,404 @@ + + + + + + ${solr.abortOnConfigurationError:true} + + + ${solr.data.dir:./solr/data} + + + + + false + + 10 + + + + 32 + 2147483647 + 10000 + 1000 + 10000 + + + + + + + + + + + single + + + + + false + 32 + 10 + + + 2147483647 + 10000 + + + false + + + + + + + + + 100000 + + + + + + + 1024 + + + + + + + + + + + + + true + + + + + + + + 50 + + + 200 + + + + + + + + + solr 0 10 + rocks 0 10 + static newSearcher warming query from solrconfig.xml + + + + + + + + + + + false + + + 4 + + + + + + + + + + + + + + + + + + + + + + + explicit + + + + + + + + + + + + explicit + + + + + + + + + + + + *:* + + + + + + diff --git a/solr/contrib/dataimporthandler/src/test/resources/solr/conf/dataimport-schema.xml b/solr/contrib/dataimporthandler/src/test/resources/solr/conf/dataimport-schema.xml new file mode 100644 index 00000000000..75416dff34e --- /dev/null +++ b/solr/contrib/dataimporthandler/src/test/resources/solr/conf/dataimport-schema.xml @@ -0,0 +1,304 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + id + + + desc + + + + + diff --git a/solr/contrib/dataimporthandler/src/test/resources/solr/conf/dataimport-solr_id-schema.xml b/solr/contrib/dataimporthandler/src/test/resources/solr/conf/dataimport-solr_id-schema.xml new file mode 100644 index 00000000000..f7b8dfd6d1f --- /dev/null +++ b/solr/contrib/dataimporthandler/src/test/resources/solr/conf/dataimport-solr_id-schema.xml @@ -0,0 +1,304 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + solr_id + + + desc + + + + + diff --git a/solr/contrib/dataimporthandler/src/test/resources/solr/conf/dataimport-solrconfig.xml b/solr/contrib/dataimporthandler/src/test/resources/solr/conf/dataimport-solrconfig.xml new file mode 100644 index 00000000000..4b5a06ede92 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/test/resources/solr/conf/dataimport-solrconfig.xml @@ -0,0 +1,404 @@ + + + + + + ${solr.abortOnConfigurationError:true} + + + ${solr.data.dir:./solr/data} + + + + + false + + 10 + + + + 32 + 2147483647 + 10000 + 1000 + 10000 + + + + + + + + + + + single + + + + + false + 32 + 10 + + + 2147483647 + 10000 + + + false + + + + + + + + + 100000 + + + + + + + 1024 + + + + + + + + + + + + + true + + + + + + + + 50 + + + 200 + + + + + + + + + solr 0 10 + rocks 0 10 + static newSearcher warming query from solrconfig.xml + + + + + + + + + + + false + + + 4 + + + + + + + + + + + + + + + + + + + + + + + explicit + + + + + + + + + + + + explicit + + + + + + + + + + + + *:* + + + + + + diff --git a/solr/contrib/dataimporthandler/src/test/resources/solr/conf/protwords.txt b/solr/contrib/dataimporthandler/src/test/resources/solr/conf/protwords.txt new file mode 100644 index 00000000000..7878147ba58 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/test/resources/solr/conf/protwords.txt @@ -0,0 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +#use a protected word file to avoid stemming two +#unrelated words to the same base word. +#to test, we will use words that would normally obviously be stemmed. +cats +ridding diff --git a/solr/contrib/dataimporthandler/src/test/resources/solr/conf/single-entity-data-config.xml b/solr/contrib/dataimporthandler/src/test/resources/solr/conf/single-entity-data-config.xml new file mode 100644 index 00000000000..f9d35238f04 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/test/resources/solr/conf/single-entity-data-config.xml @@ -0,0 +1,9 @@ + + + + + + + + + diff --git a/solr/contrib/dataimporthandler/src/test/resources/solr/conf/stopwords.txt b/solr/contrib/dataimporthandler/src/test/resources/solr/conf/stopwords.txt new file mode 100644 index 00000000000..688e3075431 --- /dev/null +++ b/solr/contrib/dataimporthandler/src/test/resources/solr/conf/stopwords.txt @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +stopworda +stopwordb diff --git a/solr/contrib/dataimporthandler/src/test/resources/solr/conf/synonyms.txt b/solr/contrib/dataimporthandler/src/test/resources/solr/conf/synonyms.txt new file mode 100644 index 00000000000..a7624f0597d --- /dev/null +++ b/solr/contrib/dataimporthandler/src/test/resources/solr/conf/synonyms.txt @@ -0,0 +1,22 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +a => aa +b => b1 b2 +c => c1,c2 +a\=>a => b\=>b +a\,a => b\,b +foo,bar,baz + +Television,TV,Televisions diff --git a/solr/contrib/extraction/CHANGES.txt b/solr/contrib/extraction/CHANGES.txt new file mode 100644 index 00000000000..ae57d3bdc0c --- /dev/null +++ b/solr/contrib/extraction/CHANGES.txt @@ -0,0 +1,49 @@ +Apache Solr Content Extraction Library (Solr Cell) + Release Notes + +This file describes changes to the Solr Cell (contrib/extraction) module. See SOLR-284 for details. + +Introduction +------------ + +Apache Solr Extraction provides a means for extracting and indexing content contained in "rich" documents, such +as Microsoft Word, Adobe PDF, etc. (Each name is a trademark of their respective owners) This contrib module +uses Apache Tika to extract content and metadata from the files, which can then be indexed. For more information, +see http://wiki.apache.org/solr/ExtractingRequestHandler + +Getting Started +--------------- +You will need Solr up and running. Then, simply add the extraction JAR file, plus the Tika dependencies (in the ./lib folder) +to your Solr Home lib directory. See http://wiki.apache.org/solr/ExtractingRequestHandler for more details on hooking it in + and configuring. + +$Id:$ + +================== Release 1.5-dev ================== + + +* SOLR-1567: Upgrade to Tika 0.5, which upgrades many of the underlying libraries (PDFBox, for example) too (gsingers) + +* SOLR-1756: The date.format setting causes ClassCastException when enabled and the config code that + parses this setting does not properly use the same iterator instance. (Christoph Brill, Mark Miller) + +* SOLR-1738: Upgrade to Tika 0.6 (gsingers) + +* SOLR-18913: Add ICU4j to libs and add tests for Arabic extraction (Robert Muir via gsingers) + +================== Release 1.4.0 ================== + +1. SOLR-284: Added in support for extraction. (Eric Pugh, Chris Harris, gsingers) + +2. SOLR-284: Removed "silent success" key generation (gsingers) + +3. SOLR-1075: Upgrade to Tika 0.3. See http://www.apache.org/dist/lucene/tika/CHANGES-0.3.txt (gsingers) + +4. SOLR-1128: Added metadata output to "extract only" option. (gsingers) + +5. SOLR-1310: Upgrade to Tika 0.4. Note there are some differences in detecting Languages now. + See http://www.lucidimagination.com/search/document/d6f1899a85b2a45c/vote_apache_tika_0_4_release_candidate_2#d6f1899a85b2a45c + for discussion on language detection. + See http://www.apache.org/dist/lucene/tika/CHANGES-0.4.txt. (gsingers) + +6. SOLR-1274: Added text serialization output for extractOnly (Peter Wolanin, gsingers) diff --git a/solr/contrib/extraction/build.xml b/solr/contrib/extraction/build.xml new file mode 100644 index 00000000000..471389a1e86 --- /dev/null +++ b/solr/contrib/extraction/build.xml @@ -0,0 +1,139 @@ + + + + + + + + + + + + + + Solr Integration with Tika for extracting content from binary file formats such as Microsoft Word and Adobe PDF. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Tests failed! + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/solr/contrib/extraction/lib/asm-3.1.jar b/solr/contrib/extraction/lib/asm-3.1.jar new file mode 100644 index 00000000000..01536921c0d --- /dev/null +++ b/solr/contrib/extraction/lib/asm-3.1.jar @@ -0,0 +1,2 @@ +AnyObjectId[8217cae0a1bc977b241e0c8517cc2e3e7cede276] was removed in git history. +Apache SVN contains full history. \ No newline at end of file diff --git a/solr/contrib/extraction/lib/commons-compress-1.0.jar b/solr/contrib/extraction/lib/commons-compress-1.0.jar new file mode 100644 index 00000000000..473e2bfa654 --- /dev/null +++ b/solr/contrib/extraction/lib/commons-compress-1.0.jar @@ -0,0 +1,2 @@ +AnyObjectId[78d832c11c42023d4bc12077a1d9b7b5025217bc] was removed in git history. +Apache SVN contains full history. \ No newline at end of file diff --git a/solr/contrib/extraction/lib/commons-logging-1.1.1.jar b/solr/contrib/extraction/lib/commons-logging-1.1.1.jar new file mode 100644 index 00000000000..e537a05e956 --- /dev/null +++ b/solr/contrib/extraction/lib/commons-logging-1.1.1.jar @@ -0,0 +1,2 @@ +AnyObjectId[1deef144cb17ed2c11c6cdcdcb2d9530fa8d0b47] was removed in git history. +Apache SVN contains full history. \ No newline at end of file diff --git a/solr/contrib/extraction/lib/dom4j-1.6.1.jar b/solr/contrib/extraction/lib/dom4j-1.6.1.jar new file mode 100644 index 00000000000..cf7601ea8ff --- /dev/null +++ b/solr/contrib/extraction/lib/dom4j-1.6.1.jar @@ -0,0 +1,2 @@ +AnyObjectId[c8c4dbb92d6c23a7fbb2813eb721eb4cce91750c] was removed in git history. +Apache SVN contains full history. \ No newline at end of file diff --git a/solr/contrib/extraction/lib/fontbox-0.8.0-incubator.jar b/solr/contrib/extraction/lib/fontbox-0.8.0-incubator.jar new file mode 100644 index 00000000000..e0b3470ce1a --- /dev/null +++ b/solr/contrib/extraction/lib/fontbox-0.8.0-incubator.jar @@ -0,0 +1,2 @@ +AnyObjectId[91a496ac1164c08522c1e622fc39b8e991dd2d0b] was removed in git history. +Apache SVN contains full history. \ No newline at end of file diff --git a/solr/contrib/extraction/lib/geronimo-stax-api_1.0_spec-1.0.1.jar b/solr/contrib/extraction/lib/geronimo-stax-api_1.0_spec-1.0.1.jar new file mode 100644 index 00000000000..77da03b5bd6 --- /dev/null +++ b/solr/contrib/extraction/lib/geronimo-stax-api_1.0_spec-1.0.1.jar @@ -0,0 +1,2 @@ +AnyObjectId[ab1ee3ba605df11b3075677c808d092845dad123] was removed in git history. +Apache SVN contains full history. \ No newline at end of file diff --git a/solr/contrib/extraction/lib/icu4j-4_2_1.jar b/solr/contrib/extraction/lib/icu4j-4_2_1.jar new file mode 100644 index 00000000000..8053ea3e88c --- /dev/null +++ b/solr/contrib/extraction/lib/icu4j-4_2_1.jar @@ -0,0 +1,2 @@ +AnyObjectId[bf0d532cb19e6ce3972f370a13a1940d1a8d1db8] was removed in git history. +Apache SVN contains full history. \ No newline at end of file diff --git a/solr/contrib/extraction/lib/jempbox-0.8.0-incubator.jar b/solr/contrib/extraction/lib/jempbox-0.8.0-incubator.jar new file mode 100644 index 00000000000..f01f9529a03 --- /dev/null +++ b/solr/contrib/extraction/lib/jempbox-0.8.0-incubator.jar @@ -0,0 +1,2 @@ +AnyObjectId[adcead7737700efc2c77f7f16a83ce0c0547381a] was removed in git history. +Apache SVN contains full history. \ No newline at end of file diff --git a/solr/contrib/extraction/lib/log4j-1.2.14.jar b/solr/contrib/extraction/lib/log4j-1.2.14.jar new file mode 100644 index 00000000000..2812b3b95fb --- /dev/null +++ b/solr/contrib/extraction/lib/log4j-1.2.14.jar @@ -0,0 +1,2 @@ +AnyObjectId[625130719013f195869881a36dcb8d2b14d64d1e] was removed in git history. +Apache SVN contains full history. \ No newline at end of file diff --git a/solr/contrib/extraction/lib/metadata-extractor-2.4.0-beta-1.jar b/solr/contrib/extraction/lib/metadata-extractor-2.4.0-beta-1.jar new file mode 100644 index 00000000000..a396b86ca0b --- /dev/null +++ b/solr/contrib/extraction/lib/metadata-extractor-2.4.0-beta-1.jar @@ -0,0 +1,2 @@ +AnyObjectId[3720d649dd56d96f9351435dea7c2c921a0be050] was removed in git history. +Apache SVN contains full history. \ No newline at end of file diff --git a/solr/contrib/extraction/lib/pdfbox-0.8.0-incubating.jar b/solr/contrib/extraction/lib/pdfbox-0.8.0-incubating.jar new file mode 100644 index 00000000000..f0bffd53e8c --- /dev/null +++ b/solr/contrib/extraction/lib/pdfbox-0.8.0-incubating.jar @@ -0,0 +1,2 @@ +AnyObjectId[637324e4666d5fbf2fe29cb8151a790b1ccabcec] was removed in git history. +Apache SVN contains full history. \ No newline at end of file diff --git a/solr/contrib/extraction/lib/poi-3.6.jar b/solr/contrib/extraction/lib/poi-3.6.jar new file mode 100644 index 00000000000..804f9d9737d --- /dev/null +++ b/solr/contrib/extraction/lib/poi-3.6.jar @@ -0,0 +1,2 @@ +AnyObjectId[9972d973277def35e3749d39cf39dfa37d61f75c] was removed in git history. +Apache SVN contains full history. \ No newline at end of file diff --git a/solr/contrib/extraction/lib/poi-ooxml-3.6.jar b/solr/contrib/extraction/lib/poi-ooxml-3.6.jar new file mode 100644 index 00000000000..343f2c569f2 --- /dev/null +++ b/solr/contrib/extraction/lib/poi-ooxml-3.6.jar @@ -0,0 +1,2 @@ +AnyObjectId[c986646e69bef4e3cd9086eabfc67f6a200fa3d9] was removed in git history. +Apache SVN contains full history. \ No newline at end of file diff --git a/solr/contrib/extraction/lib/poi-ooxml-schemas-3.6.jar b/solr/contrib/extraction/lib/poi-ooxml-schemas-3.6.jar new file mode 100644 index 00000000000..a4a66f3976d --- /dev/null +++ b/solr/contrib/extraction/lib/poi-ooxml-schemas-3.6.jar @@ -0,0 +1,2 @@ +AnyObjectId[5b79f0246f6b9b599767586fc426b26cf28c960a] was removed in git history. +Apache SVN contains full history. \ No newline at end of file diff --git a/solr/contrib/extraction/lib/poi-scratchpad-3.6.jar b/solr/contrib/extraction/lib/poi-scratchpad-3.6.jar new file mode 100644 index 00000000000..f261b6ad666 --- /dev/null +++ b/solr/contrib/extraction/lib/poi-scratchpad-3.6.jar @@ -0,0 +1,2 @@ +AnyObjectId[1a01b2b895b560d94dd12b3fd5e46a39724e16d1] was removed in git history. +Apache SVN contains full history. \ No newline at end of file diff --git a/solr/contrib/extraction/lib/tagsoup-1.2.jar b/solr/contrib/extraction/lib/tagsoup-1.2.jar new file mode 100644 index 00000000000..95267dcb1d0 --- /dev/null +++ b/solr/contrib/extraction/lib/tagsoup-1.2.jar @@ -0,0 +1,2 @@ +AnyObjectId[af27803ec117e6ec643b8522e266481253b35fe3] was removed in git history. +Apache SVN contains full history. \ No newline at end of file diff --git a/solr/contrib/extraction/lib/tika-core-0.6.jar b/solr/contrib/extraction/lib/tika-core-0.6.jar new file mode 100644 index 00000000000..20dc925222c --- /dev/null +++ b/solr/contrib/extraction/lib/tika-core-0.6.jar @@ -0,0 +1,2 @@ +AnyObjectId[9278f8599ceaba0feb0d8ecb3e6da4e5a1881f12] was removed in git history. +Apache SVN contains full history. \ No newline at end of file diff --git a/solr/contrib/extraction/lib/tika-parsers-0.6.jar b/solr/contrib/extraction/lib/tika-parsers-0.6.jar new file mode 100644 index 00000000000..3d36c6c3503 --- /dev/null +++ b/solr/contrib/extraction/lib/tika-parsers-0.6.jar @@ -0,0 +1,2 @@ +AnyObjectId[450ecc650514ab9680849cd53630528385c33dcb] was removed in git history. +Apache SVN contains full history. \ No newline at end of file diff --git a/solr/contrib/extraction/lib/xercesImpl-2.8.1.jar b/solr/contrib/extraction/lib/xercesImpl-2.8.1.jar new file mode 100644 index 00000000000..d29c6a23588 --- /dev/null +++ b/solr/contrib/extraction/lib/xercesImpl-2.8.1.jar @@ -0,0 +1,2 @@ +AnyObjectId[3b351f6e2b566f73b742510738a52b866b4ffd0d] was removed in git history. +Apache SVN contains full history. \ No newline at end of file diff --git a/solr/contrib/extraction/lib/xml-apis-1.0.b2.jar b/solr/contrib/extraction/lib/xml-apis-1.0.b2.jar new file mode 100644 index 00000000000..65531da804a --- /dev/null +++ b/solr/contrib/extraction/lib/xml-apis-1.0.b2.jar @@ -0,0 +1,2 @@ +AnyObjectId[ad33a5afa6ddae02f3ed0b42b1c7fcbf22a7d2ab] was removed in git history. +Apache SVN contains full history. \ No newline at end of file diff --git a/solr/contrib/extraction/lib/xmlbeans-2.3.0.jar b/solr/contrib/extraction/lib/xmlbeans-2.3.0.jar new file mode 100644 index 00000000000..36e6d816b99 --- /dev/null +++ b/solr/contrib/extraction/lib/xmlbeans-2.3.0.jar @@ -0,0 +1,2 @@ +AnyObjectId[ccd8163421ba8d0361315fb947f2432f1e6d7a83] was removed in git history. +Apache SVN contains full history. \ No newline at end of file diff --git a/solr/contrib/extraction/solr-cell-pom.xml.template b/solr/contrib/extraction/solr-cell-pom.xml.template new file mode 100644 index 00000000000..44f5d3fc490 --- /dev/null +++ b/solr/contrib/extraction/solr-cell-pom.xml.template @@ -0,0 +1,51 @@ + + + + + 4.0.0 + + + org.apache.solr + solr-parent + @maven_version@ + + + org.apache.solr + solr-cell + Apache Solr Content Extraction Library + @maven_version@ + Apache Solr Content Extraction Library integrates Apache Tika content extraction framework into Solr + jar + + + + org.apache.tika + tika-core + 0.4 + + + org.apache.tika + tika-parsers + 0.4 + + + diff --git a/solr/contrib/extraction/src/main/java/org/apache/solr/handler/extraction/ExtractingDocumentLoader.java b/solr/contrib/extraction/src/main/java/org/apache/solr/handler/extraction/ExtractingDocumentLoader.java new file mode 100644 index 00000000000..a4427d7c938 --- /dev/null +++ b/solr/contrib/extraction/src/main/java/org/apache/solr/handler/extraction/ExtractingDocumentLoader.java @@ -0,0 +1,221 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.extraction; + +import org.apache.commons.io.IOUtils; +import org.apache.solr.common.SolrException; +import org.apache.solr.common.params.SolrParams; +import org.apache.solr.common.params.UpdateParams; +import org.apache.solr.common.util.ContentStream; +import org.apache.solr.common.util.NamedList; +import org.apache.solr.request.SolrQueryRequest; +import org.apache.solr.response.SolrQueryResponse; +import org.apache.solr.schema.IndexSchema; +import org.apache.solr.update.AddUpdateCommand; +import org.apache.solr.update.processor.UpdateRequestProcessor; +import org.apache.solr.handler.ContentStreamLoader; +import org.apache.tika.config.TikaConfig; +import org.apache.tika.metadata.Metadata; +import org.apache.tika.parser.AutoDetectParser; +import org.apache.tika.parser.Parser; +import org.apache.tika.sax.XHTMLContentHandler; +import org.apache.tika.sax.xpath.Matcher; +import org.apache.tika.sax.xpath.MatchingContentHandler; +import org.apache.tika.sax.xpath.XPathParser; +import org.apache.tika.exception.TikaException; +import org.apache.xml.serialize.OutputFormat; +import org.apache.xml.serialize.BaseMarkupSerializer; +import org.apache.xml.serialize.XMLSerializer; +import org.apache.xml.serialize.TextSerializer; +import org.xml.sax.ContentHandler; +import org.xml.sax.SAXException; + +import java.io.IOException; +import java.io.InputStream; +import java.io.StringWriter; + + +/** + * The class responsible for loading extracted content into Solr. + * + **/ +public class ExtractingDocumentLoader extends ContentStreamLoader { + /** + * Extract Only supported format + */ + public static final String TEXT_FORMAT = "text"; + /** + * Extract Only supported format. Default + */ + public static final String XML_FORMAT = "xml"; + /** + * XHTML XPath parser. + */ + private static final XPathParser PARSER = + new XPathParser("xhtml", XHTMLContentHandler.XHTML); + + final IndexSchema schema; + final SolrParams params; + final UpdateRequestProcessor processor; + protected AutoDetectParser autoDetectParser; + + private final AddUpdateCommand templateAdd; + + protected TikaConfig config; + protected SolrContentHandlerFactory factory; + //protected Collection dateFormats = DateUtil.DEFAULT_DATE_FORMATS; + + public ExtractingDocumentLoader(SolrQueryRequest req, UpdateRequestProcessor processor, + TikaConfig config, SolrContentHandlerFactory factory) { + this.params = req.getParams(); + schema = req.getSchema(); + this.config = config; + this.processor = processor; + + templateAdd = new AddUpdateCommand(); + templateAdd.allowDups = false; + templateAdd.overwriteCommitted = true; + templateAdd.overwritePending = true; + + if (params.getBool(UpdateParams.OVERWRITE, true)) { + templateAdd.allowDups = false; + templateAdd.overwriteCommitted = true; + templateAdd.overwritePending = true; + } else { + templateAdd.allowDups = true; + templateAdd.overwriteCommitted = false; + templateAdd.overwritePending = false; + } + //this is lightweight + autoDetectParser = new AutoDetectParser(config); + this.factory = factory; + } + + + /** + * this must be MT safe... may be called concurrently from multiple threads. + * + * @param + * @param + */ + void doAdd(SolrContentHandler handler, AddUpdateCommand template) + throws IOException { + template.solrDoc = handler.newDocument(); + processor.processAdd(template); + } + + void addDoc(SolrContentHandler handler) throws IOException { + templateAdd.indexedId = null; + doAdd(handler, templateAdd); + } + + /** + * @param req + * @param stream + * @throws java.io.IOException + */ + public void load(SolrQueryRequest req, SolrQueryResponse rsp, ContentStream stream) throws IOException { + errHeader = "ExtractingDocumentLoader: " + stream.getSourceInfo(); + Parser parser = null; + String streamType = req.getParams().get(ExtractingParams.STREAM_TYPE, null); + if (streamType != null) { + //Cache? Parsers are lightweight to construct and thread-safe, so I'm told + parser = config.getParser(streamType.trim().toLowerCase()); + } else { + parser = autoDetectParser; + } + if (parser != null) { + Metadata metadata = new Metadata(); + metadata.add(ExtractingMetadataConstants.STREAM_NAME, stream.getName()); + metadata.add(ExtractingMetadataConstants.STREAM_SOURCE_INFO, stream.getSourceInfo()); + metadata.add(ExtractingMetadataConstants.STREAM_SIZE, String.valueOf(stream.getSize())); + metadata.add(ExtractingMetadataConstants.STREAM_CONTENT_TYPE, stream.getContentType()); + + // If you specify the resource name (the filename, roughly) with this parameter, + // then Tika can make use of it in guessing the appropriate MIME type: + String resourceName = req.getParams().get(ExtractingParams.RESOURCE_NAME, null); + if (resourceName != null) { + metadata.add(Metadata.RESOURCE_NAME_KEY, resourceName); + } + + SolrContentHandler handler = factory.createSolrContentHandler(metadata, params, schema); + InputStream inputStream = null; + try { + inputStream = stream.getStream(); + String xpathExpr = params.get(ExtractingParams.XPATH_EXPRESSION); + boolean extractOnly = params.getBool(ExtractingParams.EXTRACT_ONLY, false); + ContentHandler parsingHandler = handler; + + StringWriter writer = null; + BaseMarkupSerializer serializer = null; + if (extractOnly == true) { + String extractFormat = params.get(ExtractingParams.EXTRACT_FORMAT, "xml"); + writer = new StringWriter(); + if (extractFormat.equals(TEXT_FORMAT)) { + serializer = new TextSerializer(); + serializer.setOutputCharStream(writer); + serializer.setOutputFormat(new OutputFormat("Text", "UTF-8", true)); + } else { + serializer = new XMLSerializer(writer, new OutputFormat("XML", "UTF-8", true)); + } + if (xpathExpr != null) { + Matcher matcher = + PARSER.parse(xpathExpr); + serializer.startDocument();//The MatchingContentHandler does not invoke startDocument. See http://tika.markmail.org/message/kknu3hw7argwiqin + parsingHandler = new MatchingContentHandler(serializer, matcher); + } else { + parsingHandler = serializer; + } + } else if (xpathExpr != null) { + Matcher matcher = + PARSER.parse(xpathExpr); + parsingHandler = new MatchingContentHandler(handler, matcher); + } //else leave it as is + + //potentially use a wrapper handler for parsing, but we still need the SolrContentHandler for getting the document. + parser.parse(inputStream, parsingHandler, metadata); + if (extractOnly == false) { + addDoc(handler); + } else { + //serializer is not null, so we need to call endDoc on it if using xpath + if (xpathExpr != null){ + serializer.endDocument(); + } + rsp.add(stream.getName(), writer.toString()); + writer.close(); + String[] names = metadata.names(); + NamedList metadataNL = new NamedList(); + for (int i = 0; i < names.length; i++) { + String[] vals = metadata.getValues(names[i]); + metadataNL.add(names[i], vals); + } + rsp.add(stream.getName() + "_metadata", metadataNL); + } + } catch (SAXException e) { + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e); + } catch (TikaException e) { + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e); + } finally { + IOUtils.closeQuietly(inputStream); + } + } else { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Stream type of " + streamType + " didn't match any known parsers. Please supply the " + ExtractingParams.STREAM_TYPE + " parameter."); + } + } + + +} diff --git a/solr/contrib/extraction/src/main/java/org/apache/solr/handler/extraction/ExtractingMetadataConstants.java b/solr/contrib/extraction/src/main/java/org/apache/solr/handler/extraction/ExtractingMetadataConstants.java new file mode 100644 index 00000000000..474e16cfa5f --- /dev/null +++ b/solr/contrib/extraction/src/main/java/org/apache/solr/handler/extraction/ExtractingMetadataConstants.java @@ -0,0 +1,29 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.extraction; + + +/** + * Constants used internally by the {@link ExtractingRequestHandler}. + * + **/ +public interface ExtractingMetadataConstants { + String STREAM_NAME = "stream_name"; + String STREAM_SOURCE_INFO = "stream_source_info"; + String STREAM_SIZE = "stream_size"; + String STREAM_CONTENT_TYPE = "stream_content_type"; +} diff --git a/solr/contrib/extraction/src/main/java/org/apache/solr/handler/extraction/ExtractingParams.java b/solr/contrib/extraction/src/main/java/org/apache/solr/handler/extraction/ExtractingParams.java new file mode 100644 index 00000000000..67a73b73b6d --- /dev/null +++ b/solr/contrib/extraction/src/main/java/org/apache/solr/handler/extraction/ExtractingParams.java @@ -0,0 +1,142 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.extraction; + + +/** + * The various Solr Parameters names to use when extracting content. + * + **/ +public interface ExtractingParams { + + /** + * Map all generated attribute names to field names with lowercase and underscores. + */ + public static final String LOWERNAMES = "lowernames"; + + + /** + * The param prefix for mapping Tika metadata to Solr fields. + *

    + * To map a field, add a name like: + *

    fmap.title=solr.title
    + * + * In this example, the tika "title" metadata value will be added to a Solr field named "solr.title" + * + * + */ + public static final String MAP_PREFIX = "fmap."; + + /** + * The boost value for the name of the field. The boost can be specified by a name mapping. + *

    + * For example + *

    +   * map.title=solr.title
    +   * boost.solr.title=2.5
    +   * 
    + * will boost the solr.title field for this document by 2.5 + * + */ + public static final String BOOST_PREFIX = "boost."; + + /** + * Pass in literal values to be added to the document, as in + *
    +   *  literal.myField=Foo 
    +   * 
    + * + */ + public static final String LITERALS_PREFIX = "literal."; + + + /** + * Restrict the extracted parts of a document to be indexed + * by passing in an XPath expression. All content that satisfies the XPath expr. + * will be passed to the {@link SolrContentHandler}. + *

    + * See Tika's docs for what the extracted document looks like. + *

    + * @see #CAPTURE_ELEMENTS + */ + public static final String XPATH_EXPRESSION = "xpath"; + + + /** + * Only extract and return the content, do not index it. + */ + public static final String EXTRACT_ONLY = "extractOnly"; + + /** + * Content output format if extractOnly is true. Default is "xml", alternative is "text". + */ + public static final String EXTRACT_FORMAT = "extractFormat"; + + /** + * Capture attributes separately according to the name of the element, instead of just adding them to the string buffer + */ + public static final String CAPTURE_ATTRIBUTES = "captureAttr"; + + + /** + * Capture the specified fields (and everything included below it that isn't capture by some other capture field) separately from the default. This is different + * then the case of passing in an XPath expression. + *

    + * The Capture field is based on the localName returned to the {@link SolrContentHandler} + * by Tika, not to be confused by the mapped field. The field name can then + * be mapped into the index schema. + *

    + * For instance, a Tika document may look like: + *

    +   *  <html>
    +   *    ...
    +   *    <body>
    +   *      <p>some text here.  <div>more text</div></p>
    +   *      Some more text
    +   *    </body>
    +   * 
    + * By passing in the p tag, you could capture all P tags separately from the rest of the t + * Thus, in the example, the capture of the P tag would be: "some text here. more text" + * + */ + public static final String CAPTURE_ELEMENTS = "capture"; + + /** + * The type of the stream. If not specified, Tika will use mime type detection. + */ + public static final String STREAM_TYPE = "stream.type"; + + + /** + * Optional. The file name. If specified, Tika can take this into account while + * guessing the MIME type. + */ + public static final String RESOURCE_NAME = "resource.name"; + + + /** + * Optional. If specified, the prefix will be prepended to all Metadata, such that it would be possible + * to setup a dynamic field to automatically capture it + */ + public static final String UNKNOWN_FIELD_PREFIX = "uprefix"; + + /** + * Optional. If specified and the name of a potential field cannot be determined, the default Field specified + * will be used instead. + */ + public static final String DEFAULT_FIELD = "defaultField"; +} diff --git a/solr/contrib/extraction/src/main/java/org/apache/solr/handler/extraction/ExtractingRequestHandler.java b/solr/contrib/extraction/src/main/java/org/apache/solr/handler/extraction/ExtractingRequestHandler.java new file mode 100644 index 00000000000..14cfc7efc14 --- /dev/null +++ b/solr/contrib/extraction/src/main/java/org/apache/solr/handler/extraction/ExtractingRequestHandler.java @@ -0,0 +1,130 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.extraction; + + +import org.apache.solr.common.SolrException; +import org.apache.solr.common.SolrException.ErrorCode; +import org.apache.solr.common.util.DateUtil; +import org.apache.solr.common.util.NamedList; +import org.apache.solr.core.SolrCore; +import org.apache.solr.request.SolrQueryRequest; +import org.apache.solr.update.processor.UpdateRequestProcessor; +import org.apache.solr.util.plugin.SolrCoreAware; +import org.apache.solr.handler.ContentStreamHandlerBase; +import org.apache.solr.handler.ContentStreamLoader; +import org.apache.tika.config.TikaConfig; +import org.apache.tika.exception.TikaException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.util.Collection; +import java.util.HashSet; +import java.util.Iterator; +import java.util.Map; + + +/** + * Handler for rich documents like PDF or Word or any other file format that Tika handles that need the text to be extracted + * first from the document. + *

    + */ +public class ExtractingRequestHandler extends ContentStreamHandlerBase implements SolrCoreAware { + + private transient static Logger log = LoggerFactory.getLogger(ExtractingRequestHandler.class); + + public static final String CONFIG_LOCATION = "tika.config"; + public static final String DATE_FORMATS = "date.formats"; + + protected TikaConfig config; + + + protected Collection dateFormats = DateUtil.DEFAULT_DATE_FORMATS; + protected SolrContentHandlerFactory factory; + + + @Override + public void init(NamedList args) { + super.init(args); + } + + public void inform(SolrCore core) { + if (initArgs != null) { + //if relative,then relative to config dir, otherwise, absolute path + String tikaConfigLoc = (String) initArgs.get(CONFIG_LOCATION); + if (tikaConfigLoc != null) { + File configFile = new File(tikaConfigLoc); + if (configFile.isAbsolute() == false) { + configFile = new File(core.getResourceLoader().getConfigDir(), configFile.getPath()); + } + try { + config = new TikaConfig(configFile); + } catch (Exception e) { + throw new SolrException(ErrorCode.SERVER_ERROR, e); + } + } else { + config = TikaConfig.getDefaultConfig(); + } + NamedList configDateFormats = (NamedList) initArgs.get(DATE_FORMATS); + if (configDateFormats != null && configDateFormats.size() > 0) { + dateFormats = new HashSet(); + Iterator it = configDateFormats.iterator(); + while (it.hasNext()) { + String format = (String) it.next().getValue(); + log.info("Adding Date Format: " + format); + dateFormats.add(format); + } + } + } else { + config = TikaConfig.getDefaultConfig(); + } + factory = createFactory(); + } + + protected SolrContentHandlerFactory createFactory() { + return new SolrContentHandlerFactory(dateFormats); + } + + + protected ContentStreamLoader newLoader(SolrQueryRequest req, UpdateRequestProcessor processor) { + return new ExtractingDocumentLoader(req, processor, config, factory); + } + + // ////////////////////// SolrInfoMBeans methods ////////////////////// + @Override + public String getDescription() { + return "Add/Update Rich document"; + } + + @Override + public String getVersion() { + return "$Revision:$"; + } + + @Override + public String getSourceId() { + return "$Id:$"; + } + + @Override + public String getSource() { + return "$URL:$"; + } +} + + diff --git a/solr/contrib/extraction/src/main/java/org/apache/solr/handler/extraction/SolrContentHandler.java b/solr/contrib/extraction/src/main/java/org/apache/solr/handler/extraction/SolrContentHandler.java new file mode 100644 index 00000000000..703801226d6 --- /dev/null +++ b/solr/contrib/extraction/src/main/java/org/apache/solr/handler/extraction/SolrContentHandler.java @@ -0,0 +1,308 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.handler.extraction; + +import org.apache.solr.common.SolrException; +import org.apache.solr.common.SolrInputDocument; +import org.apache.solr.common.params.SolrParams; +import org.apache.solr.common.util.DateUtil; +import org.apache.solr.schema.DateField; +import org.apache.solr.schema.IndexSchema; +import org.apache.solr.schema.SchemaField; +import org.apache.tika.metadata.Metadata; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.xml.sax.Attributes; +import org.xml.sax.SAXException; +import org.xml.sax.helpers.DefaultHandler; + +import java.text.DateFormat; +import java.util.*; + + +/** + * The class responsible for handling Tika events and translating them into {@link org.apache.solr.common.SolrInputDocument}s. + * This class is not thread-safe. + *

    + *

    + * User's may wish to override this class to provide their own functionality. + * + * @see org.apache.solr.handler.extraction.SolrContentHandlerFactory + * @see org.apache.solr.handler.extraction.ExtractingRequestHandler + * @see org.apache.solr.handler.extraction.ExtractingDocumentLoader + */ +public class SolrContentHandler extends DefaultHandler implements ExtractingParams { + private transient static Logger log = LoggerFactory.getLogger(SolrContentHandler.class); + private SolrInputDocument document; + + private Collection dateFormats = DateUtil.DEFAULT_DATE_FORMATS; + + private Metadata metadata; + private SolrParams params; + private StringBuilder catchAllBuilder = new StringBuilder(2048); + private IndexSchema schema; + private Map fieldBuilders = Collections.emptyMap(); + private LinkedList bldrStack = new LinkedList(); + + private boolean captureAttribs; + private boolean lowerNames; + private String contentFieldName = "content"; + + private String unknownFieldPrefix = ""; + private String defaultField = ""; + + public SolrContentHandler(Metadata metadata, SolrParams params, IndexSchema schema) { + this(metadata, params, schema, DateUtil.DEFAULT_DATE_FORMATS); + } + + + public SolrContentHandler(Metadata metadata, SolrParams params, + IndexSchema schema, Collection dateFormats) { + document = new SolrInputDocument(); + this.metadata = metadata; + this.params = params; + this.schema = schema; + this.dateFormats = dateFormats; + + this.lowerNames = params.getBool(LOWERNAMES, false); + this.captureAttribs = params.getBool(CAPTURE_ATTRIBUTES, false); + this.unknownFieldPrefix = params.get(UNKNOWN_FIELD_PREFIX, ""); + this.defaultField = params.get(DEFAULT_FIELD, ""); + String[] captureFields = params.getParams(CAPTURE_ELEMENTS); + if (captureFields != null && captureFields.length > 0) { + fieldBuilders = new HashMap(); + for (int i = 0; i < captureFields.length; i++) { + fieldBuilders.put(captureFields[i], new StringBuilder()); + } + } + bldrStack.add(catchAllBuilder); + } + + + /** + * This is called by a consumer when it is ready to deal with a new SolrInputDocument. Overriding + * classes can use this hook to add in or change whatever they deem fit for the document at that time. + * The base implementation adds the metadata as fields, allowing for potential remapping. + * + * @return The {@link org.apache.solr.common.SolrInputDocument}. + */ + public SolrInputDocument newDocument() { + float boost = 1.0f; + //handle the metadata extracted from the document + for (String name : metadata.names()) { + String[] vals = metadata.getValues(name); + addField(name, null, vals); + } + + //handle the literals from the params + Iterator paramNames = params.getParameterNamesIterator(); + while (paramNames.hasNext()) { + String pname = paramNames.next(); + if (!pname.startsWith(LITERALS_PREFIX)) continue; + + String name = pname.substring(LITERALS_PREFIX.length()); + addField(name, null, params.getParams(pname)); + } + + + //add in the content + addField(contentFieldName, catchAllBuilder.toString(), null); + + //add in the captured content + for (Map.Entry entry : fieldBuilders.entrySet()) { + if (entry.getValue().length() > 0) { + addField(entry.getKey(), entry.getValue().toString(), null); + } + } + if (log.isDebugEnabled()) { + log.debug("Doc: " + document); + } + return document; + } + + // Naming rules: + // 1) optionally map names to nicenames (lowercase+underscores) + // 2) execute "map" commands + // 3) if resulting field is unknown, map it to a common prefix + private void addField(String fname, String fval, String[] vals) { + if (lowerNames) { + StringBuilder sb = new StringBuilder(); + for (int i=0; i 0) { + name = unknownFieldPrefix + name; + sf = schema.getFieldOrNull(name); + } else if (sf == null && defaultField.length() > 0 && name.equals(Metadata.RESOURCE_NAME_KEY) == false /*let the fall through below handle this*/){ + name = defaultField; + sf = schema.getFieldOrNull(name); + } + + // Arguably we should handle this as a special case. Why? Because unlike basically + // all the other fields in metadata, this one was probably set not by Tika by in + // ExtractingDocumentLoader.load(). You shouldn't have to define a mapping for this + // field just because you specified a resource.name parameter to the handler, should + // you? + if (sf == null && unknownFieldPrefix.length()==0 && name == Metadata.RESOURCE_NAME_KEY) { + return; + } + + // normalize val params so vals.length>1 + if (vals != null && vals.length==1) { + fval = vals[0]; + vals = null; + } + + // single valued field with multiple values... catenate them. + if (sf != null && !sf.multiValued() && vals != null) { + StringBuilder builder = new StringBuilder(); + boolean first=true; + for (String val : vals) { + if (first) { + first=false; + } else { + builder.append(' '); + } + builder.append(val); + } + fval = builder.toString(); + vals=null; + } + + float boost = getBoost(name); + + if (fval != null) { + document.addField(name, transformValue(fval, sf), boost); + } + + if (vals != null) { + for (String val : vals) { + document.addField(name, transformValue(val, sf), boost); + } + } + + // no value set - throw exception for debugging + // if (vals==null && fval==null) throw new RuntimeException(name + " has no non-null value "); + } + + + @Override + public void startDocument() throws SAXException { + document.clear(); + catchAllBuilder.setLength(0); + for (StringBuilder builder : fieldBuilders.values()) { + builder.setLength(0); + } + bldrStack.clear(); + bldrStack.add(catchAllBuilder); + } + + + @Override + public void startElement(String uri, String localName, String qName, Attributes attributes) throws SAXException { + StringBuilder theBldr = fieldBuilders.get(localName); + if (theBldr != null) { + //we need to switch the currentBuilder + bldrStack.add(theBldr); + } + if (captureAttribs == true) { + for (int i = 0; i < attributes.getLength(); i++) { + addField(localName, attributes.getValue(i), null); + } + } else { + for (int i = 0; i < attributes.getLength(); i++) { + bldrStack.getLast().append(attributes.getValue(i)).append(' '); + } + } + bldrStack.getLast().append(' '); + } + + @Override + public void endElement(String uri, String localName, String qName) throws SAXException { + StringBuilder theBldr = fieldBuilders.get(localName); + if (theBldr != null) { + //pop the stack + bldrStack.removeLast(); + assert (bldrStack.size() >= 1); + } + bldrStack.getLast().append(' '); + } + + + @Override + public void characters(char[] chars, int offset, int length) throws SAXException { + bldrStack.getLast().append(chars, offset, length); + } + + + /** + * Can be used to transform input values based on their {@link org.apache.solr.schema.SchemaField} + *

    + * This implementation only formats dates using the {@link org.apache.solr.common.util.DateUtil}. + * + * @param val The value to transform + * @param schFld The {@link org.apache.solr.schema.SchemaField} + * @return The potentially new value. + */ + protected String transformValue(String val, SchemaField schFld) { + String result = val; + if (schFld != null && schFld.getType() instanceof DateField) { + //try to transform the date + try { + Date date = DateUtil.parseDate(val, dateFormats); + DateFormat df = DateUtil.getThreadLocalDateFormat(); + result = df.format(date); + + } catch (Exception e) { + // Let the specific fieldType handle errors + // throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Invalid value: " + val + " for field: " + schFld, e); + } + } + return result; + } + + + /** + * Get the value of any boost factor for the mapped name. + * + * @param name The name of the field to see if there is a boost specified + * @return The boost value + */ + protected float getBoost(String name) { + return params.getFloat(BOOST_PREFIX + name, 1.0f); + } + + /** + * Get the name mapping + * + * @param name The name to check to see if there is a mapping + * @return The new name, if there is one, else name + */ + protected String findMappedName(String name) { + return params.get(MAP_PREFIX + name, name); + } + +} diff --git a/solr/contrib/extraction/src/main/java/org/apache/solr/handler/extraction/SolrContentHandlerFactory.java b/solr/contrib/extraction/src/main/java/org/apache/solr/handler/extraction/SolrContentHandlerFactory.java new file mode 100644 index 00000000000..acf94a2d801 --- /dev/null +++ b/solr/contrib/extraction/src/main/java/org/apache/solr/handler/extraction/SolrContentHandlerFactory.java @@ -0,0 +1,41 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.extraction; + +import org.apache.tika.metadata.Metadata; +import org.apache.solr.common.params.SolrParams; +import org.apache.solr.schema.IndexSchema; + +import java.util.Collection; + + +/** + * + * + **/ +public class SolrContentHandlerFactory { + protected Collection dateFormats; + + public SolrContentHandlerFactory(Collection dateFormats) { + this.dateFormats = dateFormats; + } + + public SolrContentHandler createSolrContentHandler(Metadata metadata, SolrParams params, IndexSchema schema) { + return new SolrContentHandler(metadata, params, schema, + dateFormats); + } +} diff --git a/solr/contrib/extraction/src/test/java/org/apache/solr/handler/ExtractingRequestHandlerTest.java b/solr/contrib/extraction/src/test/java/org/apache/solr/handler/ExtractingRequestHandlerTest.java new file mode 100644 index 00000000000..0ce7abae2ac --- /dev/null +++ b/solr/contrib/extraction/src/test/java/org/apache/solr/handler/ExtractingRequestHandlerTest.java @@ -0,0 +1,354 @@ +package org.apache.solr.handler; +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import org.apache.solr.util.AbstractSolrTestCase; +import org.apache.solr.request.LocalSolrQueryRequest; +import org.apache.solr.response.SolrQueryResponse; +import org.apache.solr.common.util.ContentStream; +import org.apache.solr.common.util.ContentStreamBase; +import org.apache.solr.common.util.NamedList; +import org.apache.solr.common.SolrException; +import org.apache.solr.handler.extraction.ExtractingParams; +import org.apache.solr.handler.extraction.ExtractingRequestHandler; +import org.apache.solr.handler.extraction.ExtractingDocumentLoader; + +import java.util.List; +import java.util.ArrayList; +import java.io.File; + + +/** + * + * + **/ +public class ExtractingRequestHandlerTest extends AbstractSolrTestCase { + @Override + public String getSchemaFile() { + return "schema.xml"; + } + + @Override + public String getSolrConfigFile() { + return "solrconfig.xml"; + } + + + public void testExtraction() throws Exception { + ExtractingRequestHandler handler = (ExtractingRequestHandler) h.getCore().getRequestHandler("/update/extract"); + assertTrue("handler is null and it shouldn't be", handler != null); + loadLocal("solr-word.pdf", "fmap.created", "extractedDate", "fmap.producer", "extractedProducer", + "fmap.creator", "extractedCreator", "fmap.Keywords", "extractedKeywords", + "fmap.Author", "extractedAuthor", + "fmap.content", "extractedContent", + "literal.id", "one", + "fmap.Last-Modified", "extractedDate" + ); + assertQ(req("title:solr-word"), "//*[@numFound='0']"); + assertU(commit()); + assertQ(req("title:solr-word"), "//*[@numFound='1']"); + + + loadLocal("simple.html", "fmap.created", "extractedDate", "fmap.producer", "extractedProducer", + "fmap.creator", "extractedCreator", "fmap.Keywords", "extractedKeywords", + "fmap.Author", "extractedAuthor", + "fmap.language", "extractedLanguage", + "literal.id", "two", + "fmap.content", "extractedContent", + "fmap.Last-Modified", "extractedDate" + ); + assertQ(req("title:Welcome"), "//*[@numFound='0']"); + assertU(commit()); + assertQ(req("title:Welcome"), "//*[@numFound='1']"); + + + loadLocal("simple.html", + "literal.id","simple2", + "uprefix", "t_", + "lowernames", "true", + "captureAttr", "true", + "fmap.a","t_href", + "fmap.content_type", "abcxyz", // test that lowernames is applied before mapping, and uprefix is applied after mapping + "commit", "true" // test immediate commit + ); + + // test that purposely causes a failure to print out the doc for test debugging + // assertQ(req("q","id:simple2","indent","true"), "//*[@numFound='0']"); + + // test both lowernames and unknown field mapping + //assertQ(req("+id:simple2 +t_content_type:[* TO *]"), "//*[@numFound='1']"); + assertQ(req("+id:simple2 +t_href:[* TO *]"), "//*[@numFound='1']"); + assertQ(req("+id:simple2 +t_abcxyz:[* TO *]"), "//*[@numFound='1']"); + + // load again in the exact same way, but boost one field + loadLocal("simple.html", + "literal.id","simple3", + "uprefix", "t_", + "lowernames", "true", + "captureAttr", "true", "fmap.a","t_href", + "commit", "true" + + ,"boost.t_href", "100.0" + ); + + assertQ(req("t_href:http"), "//*[@numFound='2']"); + assertQ(req("t_href:http"), "//doc[1]/str[.='simple3']"); + assertQ(req("+id:simple3 +t_content_type:[* TO *]"), "//*[@numFound='1']");//test lowercase and then uprefix + + // test capture + loadLocal("simple.html", + "literal.id","simple4", + "uprefix", "t_", + "capture","p", // capture only what is in the title element + "commit", "true" + ); + assertQ(req("+id:simple4 +t_content:Solr"), "//*[@numFound='1']"); + assertQ(req("+id:simple4 +t_p:\"here is some text\""), "//*[@numFound='1']"); + + loadLocal("version_control.xml", "fmap.created", "extractedDate", "fmap.producer", "extractedProducer", + "fmap.creator", "extractedCreator", "fmap.Keywords", "extractedKeywords", + "fmap.Author", "extractedAuthor", + "literal.id", "three", + "fmap.content", "extractedContent", + "fmap.language", "extractedLanguage", + "fmap.Last-Modified", "extractedDate" + ); + assertQ(req("stream_name:version_control.xml"), "//*[@numFound='0']"); + assertU(commit()); + assertQ(req("stream_name:version_control.xml"), "//*[@numFound='1']"); + + + } + + public void testDefaultField() throws Exception { + ExtractingRequestHandler handler = (ExtractingRequestHandler) h.getCore().getRequestHandler("/update/extract"); + assertTrue("handler is null and it shouldn't be", handler != null); + try { + loadLocal("simple.html", + "literal.id","simple2", + "lowernames", "true", + "captureAttr", "true", + //"fmap.content_type", "abcxyz", + "commit", "true" // test immediate commit + ); + assertTrue(false); + + } catch (SolrException e) { + //do nothing + } + + + loadLocal("simple.html", + "literal.id","simple2", + ExtractingParams.DEFAULT_FIELD, "defaultExtr",//test that unmapped fields go to the text field when no uprefix is specified + "lowernames", "true", + "captureAttr", "true", + //"fmap.content_type", "abcxyz", + "commit", "true" // test immediate commit + ); + assertQ(req("id:simple2"), "//*[@numFound='1']"); + assertQ(req("defaultExtr:http\\://www.apache.org"), "//*[@numFound='1']"); + + //Test when both uprefix and default are specified. + loadLocal("simple.html", + "literal.id","simple2", + ExtractingParams.DEFAULT_FIELD, "defaultExtr",//test that unmapped fields go to the text field when no uprefix is specified + ExtractingParams.UNKNOWN_FIELD_PREFIX, "t_", + "lowernames", "true", + "captureAttr", "true", + "fmap.a","t_href", + //"fmap.content_type", "abcxyz", + "commit", "true" // test immediate commit + ); + assertQ(req("+id:simple2 +t_href:[* TO *]"), "//*[@numFound='1']"); + } + + + public void testLiterals() throws Exception { + ExtractingRequestHandler handler = (ExtractingRequestHandler) h.getCore().getRequestHandler("/update/extract"); + assertTrue("handler is null and it shouldn't be", handler != null); + //test literal + loadLocal("version_control.xml", "fmap.created", "extractedDate", "fmap.producer", "extractedProducer", + "fmap.creator", "extractedCreator", "fmap.Keywords", "extractedKeywords", + "fmap.Author", "extractedAuthor", + "fmap.content", "extractedContent", + "literal.id", "one", + "fmap.language", "extractedLanguage", + "literal.extractionLiteralMV", "one", + "literal.extractionLiteralMV", "two", + "fmap.Last-Modified", "extractedDate" + + ); + assertQ(req("stream_name:version_control.xml"), "//*[@numFound='0']"); + assertU(commit()); + assertQ(req("stream_name:version_control.xml"), "//*[@numFound='1']"); + + assertQ(req("extractionLiteralMV:one"), "//*[@numFound='1']"); + assertQ(req("extractionLiteralMV:two"), "//*[@numFound='1']"); + + try { + loadLocal("version_control.xml", "fmap.created", "extractedDate", "fmap.producer", "extractedProducer", + "fmap.creator", "extractedCreator", "fmap.Keywords", "extractedKeywords", + "fmap.Author", "extractedAuthor", + "fmap.content", "extractedContent", + "literal.id", "two", + "fmap.language", "extractedLanguage", + "literal.extractionLiteral", "one", + "literal.extractionLiteral", "two", + "fmap.Last-Modified", "extractedDate" + ); + // TODO: original author did not specify why an exception should be thrown... how to fix? + // assertTrue("Exception should have been thrown", false); + } catch (SolrException e) { + //nothing to see here, move along + } + + loadLocal("version_control.xml", "fmap.created", "extractedDate", "fmap.producer", "extractedProducer", + "fmap.creator", "extractedCreator", "fmap.Keywords", "extractedKeywords", + "fmap.Author", "extractedAuthor", + "fmap.content", "extractedContent", + "literal.id", "three", + "fmap.language", "extractedLanguage", + "literal.extractionLiteral", "one", + "fmap.Last-Modified", "extractedDate" + ); + assertU(commit()); + assertQ(req("extractionLiteral:one"), "//*[@numFound='1']"); + + } + + + public void testPlainTextSpecifyingMimeType() throws Exception { + ExtractingRequestHandler handler = (ExtractingRequestHandler) h.getCore().getRequestHandler("/update/extract"); + assertTrue("handler is null and it shouldn't be", handler != null); + + // Load plain text specifying MIME type: + loadLocal("version_control.txt", "fmap.created", "extractedDate", "fmap.producer", "extractedProducer", + "fmap.creator", "extractedCreator", "fmap.Keywords", "extractedKeywords", + "fmap.Author", "extractedAuthor", + "literal.id", "one", + "fmap.language", "extractedLanguage", + "fmap.content", "extractedContent", + ExtractingParams.STREAM_TYPE, "text/plain" + ); + assertQ(req("extractedContent:Apache"), "//*[@numFound='0']"); + assertU(commit()); + assertQ(req("extractedContent:Apache"), "//*[@numFound='1']"); + } + + public void testPlainTextSpecifyingResourceName() throws Exception { + ExtractingRequestHandler handler = (ExtractingRequestHandler) h.getCore().getRequestHandler("/update/extract"); + assertTrue("handler is null and it shouldn't be", handler != null); + + // Load plain text specifying filename + loadLocal("version_control.txt", "fmap.created", "extractedDate", "fmap.producer", "extractedProducer", + "fmap.creator", "extractedCreator", "fmap.Keywords", "extractedKeywords", + "fmap.Author", "extractedAuthor", + "literal.id", "one", + "fmap.language", "extractedLanguage", + "fmap.content", "extractedContent", + ExtractingParams.RESOURCE_NAME, "version_control.txt" + ); + assertQ(req("extractedContent:Apache"), "//*[@numFound='0']"); + assertU(commit()); + assertQ(req("extractedContent:Apache"), "//*[@numFound='1']"); + } + + // Note: If you load a plain text file specifying neither MIME type nor filename, extraction will silently fail. This is because Tika's + // automatic MIME type detection will fail, and it will default to using an empty-string-returning default parser + + + public void testExtractOnly() throws Exception { + ExtractingRequestHandler handler = (ExtractingRequestHandler) h.getCore().getRequestHandler("/update/extract"); + assertTrue("handler is null and it shouldn't be", handler != null); + SolrQueryResponse rsp = loadLocal("solr-word.pdf", ExtractingParams.EXTRACT_ONLY, "true"); + assertTrue("rsp is null and it shouldn't be", rsp != null); + NamedList list = rsp.getValues(); + + String extraction = (String) list.get("solr-word.pdf"); + assertTrue("extraction is null and it shouldn't be", extraction != null); + assertTrue(extraction + " does not contain " + "solr-word", extraction.indexOf("solr-word") != -1); + + NamedList nl = (NamedList) list.get("solr-word.pdf_metadata"); + assertTrue("nl is null and it shouldn't be", nl != null); + Object title = nl.get("title"); + assertTrue("title is null and it shouldn't be", title != null); + assertTrue(extraction.indexOf(" tags, and they get collapesd + } + + /** test arabic PDF extraction is functional */ + public void testArabicPDF() throws Exception { + ExtractingRequestHandler handler = (ExtractingRequestHandler) + h.getCore().getRequestHandler("/update/extract"); + assertTrue("handler is null and it shouldn't be", handler != null); + + loadLocal("arabic.pdf", "fmap.created", "extractedDate", "fmap.producer", "extractedProducer", + "fmap.creator", "extractedCreator", "fmap.Keywords", "extractedKeywords", + "fmap.Author", "extractedAuthor", + "fmap.content", "wdf_nocase", + "literal.id", "one", + "fmap.Last-Modified", "extractedDate"); + assertQ(req("wdf_nocase:السلم"), "//result[@numFound=0]"); + assertU(commit()); + assertQ(req("wdf_nocase:السلم"), "//result[@numFound=1]"); + } + + SolrQueryResponse loadLocal(String filename, String... args) throws Exception { + LocalSolrQueryRequest req = (LocalSolrQueryRequest) req(args); + + // TODO: stop using locally defined streams once stream.file and + // stream.body work everywhere + List cs = new ArrayList(); + cs.add(new ContentStreamBase.FileStream(new File(filename))); + req.setContentStreams(cs); + return h.queryAndResponse("/update/extract", req); + } + + +} diff --git a/solr/contrib/extraction/src/test/resources/arabic.pdf b/solr/contrib/extraction/src/test/resources/arabic.pdf new file mode 100644 index 0000000000000000000000000000000000000000..3d47b999d5e7c6d53cb0ca22adaf06dd25cd725c GIT binary patch literal 11935 zcmaiaby$?&);5YD4${&M(lHD%fRwbfbO}fdokMp@Hwe<*-Hmj2mmt#JUElCK=RNs; z*ZW*N^X$FW+N<`x_dncJpGCx&K+Nm_s?O}r=FZyAGyp3Z3z?OkDS($3C=IbRvNI-Q zgGm&CVkYKx5GYX0T-OdF0@1fJfbjDJZ0(>BT?>FyGI4Mdp!4I~SFY~gcH6k4p{-Sd zNP3@w?GOcnBbg(Zzj8i4BoB3z?=#wm5;A`Jkw4urq1OBRw&(7JXH#gg&JUv73FYGG z#daO1X4cAdPZCM<%=g|gkR0>kDNR$xIK&(R)471C5nUSBTNGwgwxR3S8&bqj0L0SZ z?&NMybzl0%y`& z6!YzDBm!#LevxxbjH)^-qKVeaWpPe;0ZD_Q?OmOg(iSNmnRkZoCmuZP zM{`TlJY!p<`k=wI&5klgn&NB6;)a>oBz?4Be8U-NIPu3k@D9#7_WVnC^zSOiAL$mR z`P>gI(0cgHsU*YE&+RQ{&&!lO5a;`yjm=st_C9zzdb$%XporMiKDM@Jz{ir14IH7L zKYEWmzTWg;B0jJ}7gbJ=Sj8ai({~x){NPK+$lGCFG4T{n`s-kQj43j5=qU4jX4|5d zzSk8E5IK3GglcPD**l9DFd~U2w9S^%D!6mb|4U|65&7ZEs#2$+0#*TID&{9$Dy81y zcj6y##MIZbKbWTDHHLZ&4pk=^jt3OS)t2v62et~ER7dI+_&f$VR7kxf9mBb$+RLS3 zxdmhy~2M*f3r+GLs<&Ys~c!^Qy0 zhtVA-?>t&~92fF6g~n)??KE}7v&s#;JK_z~4HpQTXX4Ks!vdO@-QMuoG@MRW>AkwM z4ChiVD0|9-yYpq=_ilb1CV7_$>2Q{KW3Qx(8>6}R*bQ}2sIgC~@iX{9bRby!7F41( z^fGSPD?w7;^sMBsuD({fT-6@*7iRLJ^8OTCdkuSb3symAP znM&`FwHW%kq?hd@;g0F`wuaL^Phj$|^*r};MS~+>r{(Or?AAXwa11AODY+84U-|5Z z0%`JHNmOzH;}ZiPDr(!eiuMs5*qU%h-S<5%;qLE{FX`{7csiCLY3f^Otq1prv4fkBdW87 z0`i_O2DkQ@r%R@4_AJNw(p^7Y3I7Ud)Nk~9lzs5MwRyPnXA#cGXdg4KpFei*E;tgJp2 zFMZ5bkvioysEFTIcagY~;t;#ye-M$#mWxC?`eQL*JK#W-ihatxt8zhQkRmg7M!pfK z_u0XBjk35$vN4%z>}Z-{`_;YYXU>;tGxP!TSOXE?2hsb>COZfC7i63AW3UCAP+fvs z(>?WE>w8kgmMn6G&q2g|MJ2oZS_QFtYv03e6^h45YPpWFAY#8M_iyXwRa{~`z2|=V zFd10hlC*>I*cbrq%IM|LK1pI|LvzA-3;CWjbD7?~Js@##RJrcE`zc(4LuSd?jXoB7sJ6 zmQ$2#%@*5^Gnl^!$=Hf&`Lo*HWS`r=_S+5f?pP*q)H0)@A2qS90dq8$4J~u0-ZINF z>Com^OyiavaL;I*W3J{(7kec`&c@WZn7H*i@-a}Ytn0A9{jtCc=gh|cf_B(VK)FI> zR~&UnXt~r6El!u}HTec&Blg`6MlyNXn1msOVVm29n1=@Su9^6ZkUTG(xwjB);wqB( z=Sa`kq~Ihip~tO{Xr&$m=`4uXmGgA}m7Pn-vhXI-rxxfOkxJOKS)M>}ODH~|z&q@Z zv`3<(wSvl7{B~Vz_+$;H>$z^InW(vl0^s}hG6b+8&HVNq!!I|1kZu#apnTJ=e*TD( z034h-n?$`iibOu=H`#pNUuM!>U*(k{(+G!RW@#5R5xM;Ij@id1Kw_^TM%;(9+SmwN zKCN`N-HMHShda#NcQrI%J32IM0~zfb82XWVC@nQjG^r3BgNgNLO;0~FYad)hH9>?y zV#rEx{^2`ADAY8$rX&%VHJxY3wKY15ZDBj6UDL7Q^akw0bUK+; zjXm+di*X39ug?=}QFS{1`dPawPljzN#9V)pajT4cUi_dCQegEBYcJ09hs^xK(z?DTD2JL6Lj~br&u%6# zb*6|ttzUiT3ma6KA&%OnOI!+^bBl%5;`&mYu0o8^#2lJ?I}npRY%$%RHO^jXFsbIF zD#eu+oy2BZgFwtfVDg3>W79Tjgf6k`eN)?RoV0V?o3P|~3Nkm1CPbOU_7TB*T3!qM!(!ZT`FE0NPo9iFD$H3U2#{x z;t8RF?NgcNZn0)_u|yHR3Cc}v=}EInq&BZ8vTkdBA$4lD9}88G6ul3$`(2eWyXf`Q z&h_|U`XL@C9>({|0FxSiv%UMT^F(opw^XmgUdsl{SAO7O%+4*FviEQuaXvVk3c*|b ztTz04mmoeY)GtN6NEDF(zrfzKhO&gxV$FM1S$24rXx{fudh?T~%w7UEG1`6$RllmM z8C$4YrzA!RKso%wyiACA%YJUoVB+QPX~m|3ivBW|oWhz)XLjQ1Tt0&6(tYJz7f~rJ zre<~WiHBhM=;%wQAb%?5@5GhA_(win;*Xw#Oi5LJ+*RfSBIdT0!(_IYGnR0N8<5_7 zTh-EuFQ!}qp(x#Ccc5;gtSxPaQKYzdDgF@TYol$?pC$f$T~+KO&E-^!=Iu4|s(H&l8YwTjr&h9C1rA!2vY3Olh|h-cs2Cy1 z0tXyau4@W_=$N(YleR9r&lbr~eP(j#EpOh~XvdV0F{26|E%oYj(+yoORMc}xl5DNG zh)GG1o2lUqHP4b_WMsN5?4l&8eC>YNo@M`ao4RPvyW-B9(M4=k6AjP6{YNMjQHbN{K-Pu1d&uC1UV@3KG-J3}3ppj$=)O;8M`lMy=3Ar0Ji4i!FdIp3v0A{%{cqvBAo)ubGNm$ zQ_@3e``Ng4C&_~c_34vE()kngd%U^9T!y1C5k#1{Udqxij?76fx@s} zXv|JH3l2^X*5xUwBCpO-8h^^sH}Sx-3t2X2eN2EfbtENLGe*?0W^QLu4mOua=Lyk@ zMASW(%_DL7lM8!HLrEI3fcw%$p0pvp~3tRO_+mpvz1LI@|<&f1dt8a@I z6OzBj#4Nv4hN^FcPAnBqiR4Y6No3-r8L~~gL002*ccR*$Q~_yz;$25>v1}HAUd>6g zt4O}%JdmrNH}Q?$kRC&M#^r+7j}lr6h%TpQ&rOzoi1|=$+LQm#y}X~yJI@KZH~zsa z{IUf2&cJ5!6mb=g6aiLNy=0xOnvHqK1;@p-x*|)Ss>Kru?h)}+RwtQN1b?F&@Y@FQ z4=jkN_vhz%6vlpMGj)NqS*L3oL2r61X5E*&-mh5e**4p=o?PcK9UJ!-^ZJ)VqxM3D zL(sLBB2D@jgBcW{vMZ$`{&{M_1iQh-Ep?pxTDGcL)@n68<9GcVu4xIyt_7=)rKH-I zUs?3#ML<#(PpcLv%LWlC91)^fI&b;R;7gId<&iN6+f|yEwh545~Q$*K2wy zzZQ?1zH?loVwGg`!xh=dES~w{KKH?6-d zQDmnNe8p#hg_1NLW=>Ws(#rFLIXtT}zU_C{+F{%z6N#ggaku6^6%$2~T0~vjE>4ef z1!1zRX6x%7qpw+HqHP|+h^+o|)E_+RJx|eE4}M)B$6sWN$V#1LKb%Chj^LZ`MvifooOt5a(uWQ|6-pOfY%?1%^wyv7h?i!ug_!1KMw)|fn_?vbnPX9E>Tv?^vkB{9i zCIg2_nF}N<-eQXo$jnqqIsd7?$JZnsb%ewXF^n`1@;$}X>ZG+o?T-#(Jhf{SPN;_} z%N&4Ot(UDhE8nLBQh(fEw|O5S*habh@I-E)J(Xt0v}i7PbV3tFcwA4xF=4u1GZ*7( zI9J>KA#}*G2yhBs=3t|w%t23e!NhN}-sJMM5ABPi=ol4pX`La(!QmM>I!!#eB*fBR zbrTcwXRrtUOfn?%VzWgy{8~JVnWFTOZ7vkQ3V>`fQ=D*j(?TIINmN;GkX<%9;zR@r zjO5YAl-8j5s444;ximLlzuR_WiIH}RSsXe@jVi1u5yAzh#mGdd7~#%TSn$9{Xu?EI zV2M+Yq!Bfkg;G&rgMhDSabf+WbtV-L^vpNS$jWcDJ0W(on`JIjnp#*Y)7JcK38FHh z4Yt1xc8d@UYUQO1mzhmw!!*O?%;ouEFZ7wY*Mr%4s|W6-W!fuAqkI+Fw^qVcA__M3#<)3zCe&n! zrndJ)_2-p;+9w7_@;O?%omW=}R{m)y*kpltoTbcmqj3)1T{|`f>l3ZCnw~!59ubf^ zZ*|*iHt4G!T1YT3*$S{x<82(>lDIiK93-sVAzaEbPf_WA!%Glaf%Mz>PxTWtLU(6< zLr0$G&Z!vHodXnn(dQV*X^{4f(Wk_!yAj^-no?2~28=s$iFo3qzNgP-b@5%+Juqjw zgTD)z$VgU_J=L7XtPy&MDV4bjO`aFa>5F>#t};5Ql#F#mRhulOc^pYg zpVKpGAoUw?NY44qT9&Vx$*(9@G*75L{LhA?{L5KI)Fi?VlyiV4R!3ND-aCJ5EO*5n zz^Wl6;PfOm!f>w@zXM<0?oOy)sPM2GeWc*?l_ZiXokp3zHL>XSns9xp-o+02(#N8U z9P*`4C%3PK>Vpc;U4`lsruXCoSrxfAgW0tSI+PPlnic+ZXzKQqkblWBUn9nx&C;=2 z!s<{SVBrVa_#P*n)R7alvXaEtV?1Ew{y;BP2&Ys2L%r&$=#CWdBs1z~-3NN0_v?Jh z^+a2Z#CI@(Njql(tgi+3H_=g_0{E~yR-cZ`o&|16A#?~tjaHDs8OHS94?+8#4MhZYH zF$@w%b}zmmC%RQ*cu(T{Xl$l~q*~BMkj$WRDuC62@D%YrmF(sTy`$X1nJF-h zpT3L*2c{gx?cA*p*Lj6SWWDg|3NFl)5>pP>%#;VOqawGKlPZk* z9|%~D)Q@+-&v`m96XrUvvx=^-9(*)L!hL|sQ2N{-`CE@akz&U0VQwI;Ivs@$WFM{N z^a(pV0lYaT2zEuZ|LOk*gx=5+n89~|EmlPV5CLEhIyhf@tE`3whWC2;nK~Kqg=EV+(oZ~g0>Ziwy$V=qx6~EWt zW(ZUY{Z279h9@v(Zk7Ay$tc;k)6$=svb)H4(8dqr`PFDz9D1yZ*5}yxOT1NazoNbB zMNJmoi65WwR$qW6r}2ep%T(NX)!EmgeG%1_^Ap2|_g16>s1ir7sf-1!r+sYg1Kw8$ z7$B73@+jRA4&G&Ra`vZ;hoz+V8*6fK%ziuHeIT6IS(Y-&ag;7O^9s%Rs#GH|=epQ_a0-8yPhd5)l5UCP$<$XMz1UJH5ihgYpA z(CjgXHKldL4Wtng>-%`KbNAtxc-4H4(w(W@JC{3u@P(tPvOD~PJG_PPFZULf+nOOYhe9*3t}oTs{7H)d|74DgT@XXl~C>l4-vD8WUl73*wZ$1;iOscP-CZ zqLL<6!j}i9%ZC{ws1`e)7t!C>TsYLsOP8=G64`ujPUbKt>B+N)n4*x@&nEr+8CllRMHM+-ej%yGvA)D#=s6VRyo6dyAxM z6yKDw*SC%r-V@;b3rE!AJrc~ul7;siB@J$Z2>I}p;{s!|6&dcI)NC#i4=*{kLPdZ5 zT3oKmfENz9tqwnIe95L&$nYt%jv(R58$DDP@#TgzyLc1L+sLCG`>J1OnzB z()GgcXK%G)dAfS7{@mRl-#f=<;S$=C90@QhKGjPyu6s?s>i4bWLZ^|Af!&o1s{GBsJ>h z(1nfJG*?eH>sIZ;;y4>mV9W0aPusCbh~D0g*K!TeG9GFua5-@0G2!Dw_4f2tl?B7ZOZSw65IrwKx*b;@74KEPpqwd2=%e`^R}d7?k=hQ&)(DvSi(}pg^`}12 ztJW$~bC0#(S4cz)Jq`?@E2t5P^iWj{vX580bo$ zpDOCl7!UQxMW|=;E&~{z*f`+covua;y!f0S;VG8%0x;)zXKNG`bbVM+{wV_cp5|E3 z+Q8bv= zH2I33pME^W4k?TV^fIc9vTV8{?W3K@aFr9^V|Xx?8lgVo{Y;I1zi4$I)$Z)EHac*r zEPEX#X2#phPCvNm>k5+KTZ8qq7gD)ckrVFqv15q z&W~!W{D!QqwaOyLHb1yg>$uV>vr;P0PgNh#{cc)E1L$z7ech0|(>;<`en;3y@2`2r znx`St{Wua73C-^rib@RvV0nHPL3-ljeRSns%+ts3CMoRuU8g|u(OYo^;j;8v!`t)a zJ{IF7jas5yIeXg25V@{2%#*crqSp~gq`4lC)E4mjuS$j1sRelJ#VK~wQ~B=A?mswn zG@`ZYZ?||1y`y7q8nKFqHlGbb(QZxqDHZ|;i1pOk?`w6xcQ6^Kt+;*<`n__Wi&TGD z)LzrpX4XxxztwqYAyHa{W!Ki}~my2}wUWeJ=XvkFj+|fFsx=*;o zOAC(lV>iLtFM=^RE_zqSX7o)Q1-Q0~+HZE4UP_%t^4zdUfWaA63{kaS~cUu9WkUp2g)!pGOU)*|_iO*HCTX8!b)6#@7J_WzQpuYXix z3>A9du{?xLTCX=3OM%0lS&yB!z}x+|mna)(pbd5W1yhRE(H9lN`>44eek6Ub5|!5Kzq>Z{ zc&$pjG(3obY-;A@d2P)#ihSkI>1S(Jk5g)nyn=m*oQ5RuuJmk&<~-nB{DS795z)P< zRY#zBC3hufmglXda6@~Ms+@`vomEFc)8|__07}*pHK-_CI-U3Z;-WzlHsq(dt33GA zP!Ox!S2!AjmKUwMv5Rm%a@8rcXCj2ZMI4?`Zg1r+;b{k~n)^+hcb4tC6H9X#Vn<|i ztAuyNC$V*IMH*S<eM*qBgX_$r|4!uDhbRl}WVDw$YR$U({so z$N#{E_;w}Jp1sO#-gLxKr~easM`lAckm5xdeEm2UR!KBQqlwy{u@o>-w2wD9)b3rj ze{4i1o+*i}GU=mSS=@t($;ZIJIDi_s^dgFfOr)B~3px&vrd@oP_Hk_T)GntV7EqA) zQ&h^YOmTpJr{M?9>sq26q^i|CRm)xAj}@nyiGa>d)qlsdP3L8cW8gV9e{CpVDtc72 zZ2e77-^QqNG4#>~QucH45KlGr+(=zM41wWiK7Hmp)Rm-pn#y}`M#q6uW_4{ksSwU3 zlgFybd&;~U`Md@S>4wmWad(9{(!NuAD}%n%243qRVeRc9apmX!uzKH*J*Z68GTU_%2ZQY4xa)^v`@iELB(K zioE~nt6dvUckcSMHzOmd*{opVUmi=huNDRFsxpXNhtg8M?aF&Pd?-9ZCQ|k%-!k4923|PF^0n5(l2Lmk42Iit)SAk|L@ZgY$z5dLi1P$CJEG;dV_s#;~MU z)(7%116@?w>4yAYz%M8GKltyV6uk}emrUkN@Za%Q>SPzL9=EI}pbW$4VaXcL=PVTK z{v@W5*`4Ot*hPvex>u4pQIx4fWvMKJf3Jh6@I$^mBn}unOJ^^|;4WI)Xe0bJsBBJo ziw(?ZMF82qG*sD2GR|vMPCPox8L`2sbxM%iDmLJEC7$L`>G7S6 z_@mQ&iLTGBX(feee=xI{m_+BG@9fslGNhuQ>F%A$p){k^G~)3`tMp8RuC-||4tzr* zXL9NM3iFdm>De&)6Jo9Sj(llh!E#nHwzBOcNv+sUfjY(QeB!QQ`EZoEQf@H<;dBVV zd;ma?WxlOrgGxv!>Ud$St{!QEL5M9q#~$*2vXV@3x(rplf#nn}An%+TPYf~x;l{0jpwUG`4O{*ck=DxY=FH_=a`k-hjlQfVm| zQ{WKR^|$zvI8I-l0@6kXp4!R4U@LvAnCq(+6(bp%^l8Vz9*&dNRpmtRLKEoR(;dI0 zXkj+6v1GDtd|v%v&tAnW>nxG^qb26L^T@8eX`zWZfyJqV%H5Xp)-=~*VNx%SC7rhC z@)u6IVddtHmmRf8y4`xE-}ZnXwSj%e-Cy?p`a^C(-uPlKMX#UuBcvapLJ%UZPx*rC3|2LFn#j-pN|vOEWYPT@YpFz}t{ zL-JI1%YFrVf3RgzRkZ&fv&Nzmw!qP`F+{iDYwhjnWoy2EuWzPdfUx3`Jchr91j@~-|YADEK7k7wY@7!>`1r7{@eQQW*8u&00K zsruNC+{4&+&q^ReJNx(3t2JSXvj2sB_RK4LrkrtevUC4~e#ZGv`k9im zH3TSTWoah@vDJr~Sld~_7B<~CSzq`VIc#tu!6OK5_Y=gCi;SwM&=MQ764Gt zRv%(%N5;hs0sxo@>FMLURv3KsBeI}C4934;F0_3YqvQPRRiv^Exp zNYjsa!22tN{aO{=bhe?EtOV~bdwCiX;>*RGC|pYtRE(i6l|^F zB;bPKf=b<)+%y`UkC;4tBIe;_^^+M(teL*Dhx$SaUn_mq`18ks0fY7g7!M2RUB1tzD!lfqBNBJx}8@>-s6iISSBBwb0ZpTA4`()8xQ z-~F-p#aX|`PoMg8F4rF#^w$r#Su)n$XDFZI{T}}p$Z-F&r~g0)P|;q`?ingdPq4LRURw9>V-RffRZR@+jBPbJIXTJLz$|~gu+1Z5Wn(Af zV26D<{w8}KgIT!#lEZL9ru8>G{1dNa&u~@%0F|tiElr+jpk&XePyqZ3Oh7FEtVs++ z_FUn=>p}mbhZO}w;9n-d&rmCUMTngy5JpP{DnXp=wEoTQKUe+Llh3+F5a4HB=rg3* zpB(@dAhuTaP<@Ck8R$8L0#F8GV50kcRGtHXL9Aq)+^kwaaj2EOHH_b?2vxup;b)IGE@|6vTyq5@dF;-t+SVg<%Q8BG{5W6J!7NFg%BRKI52v)TbtjNEL1_0%( zA(n#rb|zMqnt$^JR8!X=v$Qui|Jx4>%(t|zr4cQ}l1W*S?%!JaKXh4G5yCJkFYG?z z6%!Q`V`1UoWMSciT`OR2*q0sl<#?X+1D^NDz+NBnKX&XeTbSuzdax)9%kvxz{`(7C z3ugP9juoZ@KifY0ge`c!vDjd`XE_@P=J&7guo^^Q{9;;>53DR8n1Y3y1;ozI&dtQa zLBqmAL-#Meae_h&0kBJt9RTz6_lFG3!NJBsW=Qro8Hk0O6?TO^KV+7Fm%+v$7$E;c z#`=uFf0KbgEMOS0|4jzVmhHdnVCBH};ot0FnRESTT-YVc1v@$aVFzaYZy6Wcf5ru| zaB==uTo4HSUu!|E|C);(R2O!OL!SYvXyO8awF;xCMm-+4%wgx5+bMk#?HQ1^cDhizzqS+vV&?z>sHjBc H!~p*fSus6= literal 0 HcmV?d00001 diff --git a/solr/contrib/extraction/src/test/resources/example.html b/solr/contrib/extraction/src/test/resources/example.html new file mode 100644 index 00000000000..5732f6214bc --- /dev/null +++ b/solr/contrib/extraction/src/test/resources/example.html @@ -0,0 +1,49 @@ + + + Welcome to Solr + + +

    + Here is some text +

    +
    Here is some text in a div
    +
    This has a link.
    +News + + + + diff --git a/solr/contrib/extraction/src/test/resources/simple.html b/solr/contrib/extraction/src/test/resources/simple.html new file mode 100644 index 00000000000..f33cf92677e --- /dev/null +++ b/solr/contrib/extraction/src/test/resources/simple.html @@ -0,0 +1,12 @@ + + + Welcome to Solr + + +

    + Here is some text +

    +
    Here is some text in a div
    +
    This has a link.
    + + diff --git a/solr/contrib/extraction/src/test/resources/solr-word.pdf b/solr/contrib/extraction/src/test/resources/solr-word.pdf new file mode 100644 index 0000000000000000000000000000000000000000..bd8b865905fc8e21d15921068dce37267c19f57c GIT binary patch literal 21052 zcmagEb8u(Dv+x~kY}4ZQIVq*=S??-M#mB>poRaz4cU`I@70T zPWPPY?#~}*hC)e9f`NsR6_#S?==SKm>@I(9Xatsxn3>qo#0Hj+kC;i$+`-b#ikSVc zN`;t7!rIQw+~sd;XY6J!W^U?eW=jj#kGN~{q1mAT`_q#2@s8iw2 zs)o;J_nS!xA#xaXF_;*fyiZWCrKi27xJW)Fl;!9F&geP8WgR#Jeah05a0$akCbYV0 zQH8Fq5q{kOX5(bUoTAfd&a2XqopQ%Bvlmyh!V>Q42E=mm1D>8&vO1~EHQrRJ83A-a z3>0==F#eQl?A=_DcLJBRSS)R!_Qa=HXCKXP=Bahr;e2ukt`Ejj_CgJCSG-ENbUJDJ zj8*%icl7=2WwgNI7~l-{otU(^Y?6T({pO2;91(OFO*%^!9(DIQ_GJ$AEH*#IE z=SEB7^E(zLDrW_WhNRQY1W*|8MP>{Tb{#Q7G9T9PXwjPGm{kQ5k(c6;jYPtvh)>M+ zIZ8zIgnN!o7lW~%jV0@+x;m*L_Ja$et?1UWjlC^w2K9j{u|1#_Y6Wtz<_>26Sm_68;dx45$iJlRdBEXh&j0d`mle)`5!<3-uvh8Um7U6IGU=O zyAkXD6~!conbgd^+=%swnM57!99>kMj7`mn{|ys$Wg+JLFY^Kdf3xJT$Ny4GurM+c zbN$o*HUBSR{^NoB|M2jiL}U3knZL7B`1>79!vC_(;cu!DGl`jdSeu%wNQ(TwlQ1M% zUJBh9d-SU@=371Fd>ZudTtB%QC7cS_;sc#(i}|t~j7g5aXAZt)S;j90anheB>F&{t z8e(DL3VQ{jWjtcvGTbBbi^P#33`kqPc#}F!)q0p&d%bhN-u$`;LDb_^Vok-_j3#C} z$G6ixeB9(V-*MH;CD8R~23>hjBS7;iQf41)9~jWBwQZ8?SKc`MDm^lgmDp%K&A+z5}6Kj_}DGU@z5&R)=4^uHnpKln@zb11dB+&U$>>Z4 zh|fJXn#Qk6_%Co&m+E%LIJx0A+HOCDHVKUuqay96zzMg&JyzB29@zV4P>gPrh9CJ) zXC>DkCMNN19tUaWv292Zk7PB4)d!G%tsW+OL%`Q z-Vbp+PAMEV zTj&cGx?fv`tXSg7Bj`*tu`gXMCWXxZn(s)$%xJGek~)B@sXpI(mAFqm2A>|@*?raZ*#odF*G?AMFN@%L@~*q2J6&25Ib4#sQDpTrx6IrolrCm(B1 zmEZgxrR6DB8#=r!XkPs|^bl;n8Y_(!8Y-_3W8xY4Yzr>ck%0a-Q5&%G{G~0G_BL2M zu|V)6g2C*R2)L?E^9PzoBh$?7329A>sNJ0brqHhv17o;VHntZe`L_0GYnUCBEe@x! zzyU|7XZQzWfgs z{ue*4|9?RF#|Zbo9AF`4lC?H-{Y#L4{Q7I}|KI=%%l~Cl)Y#3~&e8I}+2;BmsQoWF z{?h7yXYoJi_z(I1`;mnhz{Uz-`+wu2qzlGRPkmi5V5Zyi@nR-(J#8vCV_Xg(ohdDP zTq@mBLh+rEJSE<(2m)4q5h5}fQdRoY{Obc7M5C=E>fqWHav()$-nXk zYPFWPy43G4eoqzZtE-QXfUmD(LC=|hOYOWnzFF@XSEPcU$ox1Yduk4 zc?0iW(~s5ty(K$s5y(D*BtR@CBAvj8JSMJJ3K6#8pVXK7z0RXYNt{*H6hmIA@%trs zE7dx0vp?5`#zOeLdac1ptE4}$2U^MS*PR|#&NciE*1t1XnhuW0+iDQt_#SeR{NhOc zI#XXoph=60BkjW8+hwn;G0=2ak%e{?YqPLgOs(FM2!I-}Legeo>vA}8yX)Q&dxj^W zq}!(^d|X7McwyWk*5*bdlMI~LN8T62xC=ytmeP1f(oS~Cjx0CVyc0Z}BUK3FzzIhY zKtnm=vslA4p{Tm@*e6zZp(jz;7ZLaDr>6wPW(s7wL00)hn6ggyEX7I@RM8V3Skz2a zys28<$z=H{HtugG7!l9f6zdl!7cYK@vLN9z)@>%>oq;d*!F!?@E;LXa%qLOECd37Z zQ&WcU4(d{1q!>K(JVcj{XS~I1&d*i={7l7|xEi=5ZB2_KNlR@?a!j&3k?2S^+cWGd zv4&;8qEpOAinxuqwT1Rah)1U(B8A(1<7Y|YS{LsL`ZSOZ(EEhGh>2f5RQ3V)Py0DW z+%9T`{Fqux+BlAKE4u-)gW4c^lldEZJ-oF?xbj5o<>HoT%o9RprJ#5=VRL?;1<=2I zdl3nqevI87q$j08^3ZtbZd2mkr52lZoJu70r|8h1akzV|axNBKTMBaQcw7`aMswWy z))QD1b8mYO!zWl4sL>VL;+|lC=mYqXD)_@Qk9^dMp!xvVMjNexA)R<6wJAv`@ewvI zy&?W~^3DHypjJW_W2(Km?+elqpn{~^YA@gx?2Jp+bu1Y7*(RWJ7psN#tjs|KX*IDn zG3oHwYE&2dsW&Nb4wtWY&*9J`g^5N`ckP>u3ju8ysk#uvs`S3h_`6}Vyu=ZY5 zAvl`}PEbj&mP@q<&lHc$b^g)e&>*tms*gSCCybu5A_nY#+wnOv<1WzfJL55=L2I0}BwFNTh3vbx#5d zkBYWbyArTon0LlnZzi7wK0Q7SJ|*90e;V(>>>&(`4ATyy0o4~*<-WSM)O#RIh3kPI zLB(v=#El?=Y2cYPB5$GB@JV=M@@7o!aMv!J+(NlRy?Inj35JQBKKQd0=N)6LLMZLwn zp*Dypa8#mFFD2-RK;Q+UkDZ0~m2XL%E^7~4dK9^VAVa*?*mLnCRT!s!9mun&J}9|I zUW^5FH&um|r;vP|C(s}G-5J{=f{MM=f=@+`NQ;3*k~e(vdHr{?uN)uzA7uV{;K;6g z`P>u3?ev;Liks~(dYqmtxer^Ee4<;-tpu$1)Sl_QBi$3aFVuN^nm1W5P|uEcI|qh) zUo=i4I%BZa1F**3PL0Tc>q~2Ay|(f?+5Z9$kp|I;-(b8Hzczc@3`|*65a%v8^iS{>aJpJ^dg;f>RAv& zmltFcG8XZjLEZOL`Pe zyVlM4{)Fi3dP=DkL#hRd(Sgr=Mr|*Ar&x$>?`-pPeL(~$1(f^IXe*|s5`lmIG9Vw>@D>KabzNP1R)ZyNP0(^ZJqz*jxEq>TiiaGCOyoJ)imQk!!~PUV>haB`={zJBW>Y@O6Ts?%n$ zm^WiPao35_DN%#wla%gY<#S}#ibP2LspZxG10F6+lf-1%8zs1exbU|>k0Q<3wr)tV z;!$$^-R)PZD)2B_U`Be_h|YStI5=m|=u}LSi%UV%Uc`xXv z5;W5{-W)uh9~3;;f9R(-dyE~AjU*}MkxrW~_8#3gGm2mPD6L|=b=avAkU;5^cl0tyg5NQ_FXX^qqA;MoSE^{#|q@D zNp7a@sv`%@UHLYjxXt{!G*#t3ZFZGu0WTQfkM5YVVj@sOZM^~Y6v+^dH7~BCA~x*g zbhQKXTafQ%yUbK$dWgDce+Nd^OzD;-KSgNHBB&-=CdI}EJE(kvrO4_4pL;7^+L7p3 zW{-pg8fs9|hq_T`64tir@fMJ4VR*uy9+3OG?9cBs^LEA*D??72wP`+>>1r zoh|$F{Ls(5`%L3~Y{XPyO7taPIJnVXP$EX~6?W*Ja$i%jR}-c22+8||IMZjh+MpOh zduk`d;vkiEGd~f9eVvHRKvM)a2+|#;2^L%Ur#5^B&eMRU4MY~fo?d&?*BdDeEIsb- z+C@2NM^-bJE)K%yp^Baan7kKw(I$fcl4@$@WFU>Nr&RyeR4_Mn``X*hW*{)QotUF& zkag->LBYOQDMKXND2VZ?c6aalQe)nyptstP;`?MHn8JbN<#@I2y|>%d9qg+alP0MV z7Hr~m#nO{@^^122CR#n?^X8V-(@W;=@{gz{#~daIx8(1J;kBMB&r<2z-9lx%Iv6L~ z%?X;dPet)m{QZ;0Ezf>kVB7I%xCr)f&l|d86YXzqkqFspHNH5v*lRXvwtHG19Ou#@ zB%LxHCiK}B#ta(@@#o<4CD+91 zQ}+G!440juYAbNva@=lkWz>EcspyU=M-{KaFi`sT378yT95hKRK*)hp&K$Y~JY>zF z+gum|mIb9Flgbz%Y+BSX&Rm-E99lRyz7GtU?%AXO^ayP2tR_}@|8N|!pL5!((dsXm zRN9`}WOfGZ+@qbI`SWW8kCa-EuTF0Q>OPS(!yf$${4yQDeyt=VA5tW$jLPCxlqi#7 z;6bsu_5RKeUEBip-ukS3O%GO}+luNl(ypphBqUZXY3OdQJo2yCOv@NG*Ci2060A+~ zNQK|3DPo=876Ve2DRRrD1i!+qQR*moxpHNUEJvn*AFo@}39eVV*_?)aNGr_Bu3^o#TQrL@C;KQOrs zhOg#M9*b4&Wuxn8ktVZhDxnVyJj+;$s#tw7EZ)Z0UJju4D_@4E2~F-)>s%s>L^%>z z7HHNfSzdAi#v2GKm?jEeD8GQ^0+pHpL(cS+71z`;pmAjAk5GK|N?a)lR?9xZ{myLB z3DpzvPSNB;-_o#;14zUVZm^HbRYp}KsH77U%@}DY!V*PX)GFZ1ty&`mY72qRK4L%$ z3Gc+{TR-e0X@JI?|1J5;p;R89bPts6SDG|W6t@kA;K>f3JFe?boLARgu4NkACrMb(@n^MpLg!eh7A6#Lt6Nvl9>??pN+ntnoxz;NZBhdGwKTzDhW33iEm} zcCPLyW)Rn#`m8E;nljMy6Bn%vQGGE;5(731jL?@_&adAshDjOTGi=^Hr5loNYNkS6 zNXlV8$5r-{q;;UX&ci}KL5FrZ)u#QU8LgSh_GN=Dg(_y^PV`8+XqmEz&zhB$MltiI zU+#jd`_U;}UjJ;M@oVz+%y41bH+txKs*`X%<4>jBA^C5HO+jlK^tJcts&=V{iSuYm zlX0L{u0&VeeoZD{jL*Q!-OAR0IWPIVApz`9OSt9FT|2PUETwvNJNbkqc{f+Vbw0T; z7!Z)VIPS0e=tc;gxcWMlY+O&H2(a-H?8K6t`;k;OIsPm|lxFWbwNt~m&$pA>KsC;- z6|Z{7!8FXR2Kt>re-EEcDPn>c4ycw}yo_4gKd+JsxW~55NHO;T6_upT_%vST@Nz=lU%dd4-|Ko??tGn%sRt0!#1O{ z_H*IA1R;&XV57+;HDUEMQ6q;6X9Ysng36U)FxB`xckBcB)`)aSIH^ExQ0~pbKJQXG z>`~CMF3RDY8+>mL&F!HqlSgvkmu6=5?vk^5u!(;C{4x8iB^P`?*@ByI&^=$eil_RD zTZFtg77LIogI1o_@~w< znN0SMNrdxZcAA1-r29aw5Ck{DHU4WP1y$j(3|Mb$^n|k_fJ~*v29Cm=D}Zej2c1sc z*mZkWJ)eg#YKYbkTmuxDE5@?c=C@9pU>`r@nwX+u0?5gYz+# zMe|TuulMY}>dr=6QTqhJ0&PvE!i9j-dQH^t!%h!_fQN1@{}=n1!jsvByq%AbeqD_f z!`#Qf!1J8R%TMtly7=n33hq8%3Y@DCb(`;6QM#wPr@fH|!}>>^6SXs;kp-7?ACXMn zgsOb|%Fav2p5ORP%5~7L(?yI^SmnU7tyIEB+F{0l-^q1=089qT{CPs?m2ZF<3i4JR zGE=IU=o~hH$AX$BHyDkesHMasH|ugXnUJ?yh#}AN2>| zTT}O|fv2N`_AiWyW?)PQ!X-j(4T2ZUvb`_({F88^M8z&jUN^_oS#(N ziGF`pe3o63?Fa@(ahR?%e9C{f1q|>k^E^Nz?38HZYsH^T!20LPHeek+ z6ROAgl*D4GiNSq4b~m%_8W+j|<}c!_H0&k&4FNiblNP-oa@qk6gi$&rMLg~#4tvbM zs)s){oX?>Ca0b)DLc}Gncy-&vNSs5A+`TP}N%Ie#wRl%W*exX0olFzTZV}xL!MRW8 z4tRN=)Jc{h#HMjdw`A*JESo{}BJI-7)6vhF+;C~P%TXyY zq1o0W_~EW0%a;JMN1m-v31M26@sWSK$1}B_z65P)QYG$DtVOvRUx`wBg zh$ze1U($9m?c-O4zsLC;2xpeG`&C^of5CCdhq&sUE6I;GsANqGVFs(la}(I2URbXl zBT3QnSWm%z7kr&iztxupkGFve6IO6yslg0Ix7&0QiSd~q_xPmD8=tQN;mrGa>tYAL z{vG=}Z3Tv%?GY%J6JsI(5+vHF$j0(Oua|+{U)SW7YLSX`QPc9iWoG>z+|txF^DH0< zdgB*l<)pd1i%D@ii73QGiGS=GMC9SQ;8F==3snn<~j`mm`!g`Z&ROkL4rN(pC2SgjQf}Z8 z1I?sD18X2&yQrsR(SoIOQ}}Q~Vg3!H2EB#{ByS~FtJE;RK=VpDY~oo+lWeC_YhAsq zhVSG69OHcT>MvlsxNp=<)0A^#FdRBmatMJeyHb#tQTO4K8Js)qxEA!jk-6A#6s?bhxNlvUP2OUFe2Dux zA78c(*e6*?4B}?z51wgyp+E7&&0M_h9fsLkdfE7pj!6J4tz0!)U13MluH3{=ff0|! z(?sv|mzc(G;}CZZS5?-srSfJQ1souvhUnpXfLvr~yDk zeh9H#Tr~6J@_J^v@SeD|9h^nhCf6}G+P z#j)u#Uuy^6QeEjcG-1a$uWPaCZUlaWGV`4a+7=kfczRY@8Td^uTdW?} z*wvo5If$Fh9xX4Qrf$53PDRd}89AJ-oTfSC+NxH%z6#Eh=#XML)bTR>sARtqjDK*j>&A~K#7I2`oxpRhSd9lSdzf+u=D z^P*Rx?@9M3C@cfx!_;d)ppqISq>~mk7D+=CGh+mb7U>pOe-%cFGTkw9@(}Nw~A$vp)UMb{fFx9;e!E#$7a2(O3>o>nbB% zq12+;N_5bpw2lk!XaHCj?DDp8{W_fV*1BawrYxt=GY!(frR~Sco)MBDjEcT5KNkjr!zWC}$6DlD58;@QAA7uBxlKZp z;ukD1gEd!xodXUh+`dJ^3?GTPk&cHhN4bVC3j^8MEX^Ia#htB5*VrsK70;89NTB9R znldnjx%1~rP&wNS|28XMIbP1JA7nOfC*C~S!~xi1%~ zEg8e#tXHqsP4AuuI|0g;o>P)jt6$J<=+*RFNP|6rKWYDmFe*GhxYsIZlzR(gXK)$> z)2ai!0nwJ?BNUM)foiu_QB*~>vwzmG<3-)uek@1r&MHcuEf0x(nj}Mf>8V@LDnvMA z@u>1IpK%i+w`eXzp64LbrGD|L<+IW#ebdKCl2z7%L<j6bYd{E*b5^J@<{WDbNA(D(T*S5ohK(69~wy}9xTN^w+ znY7YXpA6G{o02AC_G&RiDj42@EPrx;&{>&{bONrn zEE~40X=iVDWdle55JSdBw7=Qc?*Oi(-6b&zyE-x9%CKqV$RD~JW>Br)AZt^kt2mH? zr-ZuZwx)_VF1i(IV2;|R{Q4X_&JQ8Z<@>q-~C35{BMG_aL5n$k~T=25d-n1l& z6!w(G)jyk3*kbS#qceZ#yJ6@FyY_>Sk%5mK`@f%`pEuIdsgwY5Xd&%(>m)_i;FJ2HzYQG|Y0k>YV zj>Xy?Fnr3Ku{SlEThc1oJ{InhtbpMOsc1Fx!<+;t6|AB}V2m`-;#-ss^GaYcz1??Q z26_t#scN%tGT^{sb;-g2tLpT`Qs*UKd7Ao!`+UQUiI4N2BF3b{Q~8qa+_YC-rvGE!mcz$ zC@Foa;OyWuGnE_yN18kO@_DzFBi_yE&~Hy~c~7nZuH-u6s{Cs`hc=0rji^w9q#%CT z27X>dE%XOlP^L|k>trzh1qKvD^m>*8oSwq0Rf17x+?{pz4xB4`#O3iTl*Yb2L=UWN3bdy zRbp8-2iKv(N3ErV>%=yO{gkH{Ia*i~QKLTU9ID~bgz~@J7EowA-}YI_M$$4UuP-0F zas>8cX0&-8EC4L4>>Mk;W_K9f3<#gV_d+3J4La|g11v>;fJ#oR7%D+!dkUP+lb@%q zWa;`2cE^TIweHv17DO}?CkW&hWKTuJCMS4sj|B);tKL#(GvMzYB?_j;#J-pv@z)JH zxPq60w9g0*cTBXc%Luj_q?Q={(j4~eTzvj&uyfiZw0U+_^pw|&p<=z3mw;DfKO>*z z(e19cBA|Yi)x{eJHlR!%F21Et%ovr~d7)1%C?*l|ii0DeZ9OeCy^;%tgqVv_=*joW zc#?C_y9C=AIj&a=sQ~_u*?tMzQK+Cw%2kgpxLpI%z8d~h zrwsonxhUHud2yXDD)jIq9-knwNc3bH$2$&w_U$Au(fz)tY~5tsEYotj3enqn>bbwV zB%K-!eiKHFg>CE*X5FHqRC$vR`qeBl(}ZlgXj-JgyYR(~HBV(B_jmtq63o9_D)}23 z_o5;;iBroN#ezNMI$8HoQ3D0Vf*dn=B^yqrT`kr9J8QzhH+9y^YxO)3N zR_|9z)SnRVMFy;&_6n+f_OrMZ;?Ao}XXm_{dbIokA6pc{Wt^xHqeF1kqyA~;cpYWk zY0HqLD3vYV5P`xWil%ST8d|hfSOgWnEzGn=rM?}GjrOgZ#P|O~PJDtVq0*%xsZ3gp zPWyUC*QLD9P6Q>&)im!5JGPX>*|LS#QnftNyMEW08P-^DKYBbv@ot7#A1MOzHqkWE z4TS+YjS-M&D^yCc0fAFMWevLOzK+@-hP?AkJ=(_WAfLCiT;D0aH)fz?f_->VVE?$q zg&yaiJFciYtQ*87vA?cCN3vEOf9Qzx>J2!rODur#Aw8j?F zL5Kh!Xe-5eBNto1;z&m~@G{6UB4ggweY*S2!?{Y-KoP>aDl1Bim!y5O-R8It@!biX za-C_jP50^@$0onC%&2Mx z?F$p>2oG%uwzYAJMeFs6T`^ERu4KDNS78&{*++@S7n(?74Qssr2H+dGlf-cxR0Wmu zGDsP&yY6!T*hyFh5-NxmRVmwRf z=uz#3V0bxoO#l#5_Wqj0RR^W;!ah|3C_6wT-@nq1dguStIpSvu&MlLyqy)~W-KfKB zMBd@1(>0@8aLxf)1~*D`X=&9T)DScf)MGRVgd|b~PBj^TrP93s!j@qKdyU>|aCsG) z=ARPqiP)W;q8pG4@-L+T@(n8kRKBT(2b6QjcM_ZG$A~H6uw>8+V^eGJmfEdVBAs z5#1DIpIG3M=XIXE z-W0}39_&IB(IPzqs<$J^CG`nT@g=~7$|Q4HYDC8d37kH@OES0#IkY)GviZwlwgC44 z71iruEC9gdtA%mh^~L5(kzr?-{(a_)ZV&3n(fZqwCMRGb&-RZsz8$^|zP*~+H`?(} zl%Fy&6DkMe5;aRwPQvd2Gz%ixUt~^Q4KSw`%Pv`c5ls~ z?7)M!w;0)8-MHD31~(~CnVq4c0w$~H<1B6YMr~$f;V}Y(3~MSia2)l0OJ68|>C| z?-7{L6;7vLEc^3MsIkSGu*=I0Y&UFoch@pY-I6X1D_xfhkDU4%Tb~wt0|G7($1O2? z8i!k7eL5Y-^lfM9K8nRaRnBgY!OMDbu&0rB6p|?QSdxfh`eQD$cnHq^Hp_-2wBi5<8W9|3ju1C zy?4ku$nxMRVSwX73r+2+fn(RUOO9!lYnCPgK}w`Y;7L*f^YL< zgPwlpBY_5+8h+QIMDo4vyKk0P_}?lRzg&G#zTk}*6kzop;HxMsC(DW-HgHD{>Z@Vw z%9h1{_$|;L7}+WbkcV8q8UntcXC(pKA;(o_MnSlTXf#nqY8@GE`H?6#Ws}-?>>Lj? z>drWDXn!x*lW(EJ+pwK<#CqB&LZK>Sh?K+}$Z=$D`)IHM(MxH9JOd#029rjpE^bFI z)gB&jI#h$z>LUpnO(F9jQuR0#H_YTX@T8g|*3oEZfI=3Y72FjNjz#hM1`3ncxN?-S zyTj-)B)??I4rSh~h%4BAksH>1!bT_E%oznNOV1L)q@D#obDt=mFmK6M%!fgs=knJQ zwo9Y$oj(E|-ZyKGY2b_4f(ZiUN6jX%TDq5&kn%>!b4!tjOP01DuGs|5oMF$2-hQb) zd*rnv$U|)s!A7zPs=4b?QFvy8d?nYY4r-KV3p%;Xc}kUgL-{A9ZV!|)_TuSJi?S|$ zlQ4$r6O;qUS(pk{nr&*pO>>tZtOcGrq;G> zvPE_dXT@~ zQbDEeWbNf7cT8s~(GI%9Y4`Q5=~vH-NSV3BNFP6fmckqvYkYQj0*N+GIbhTrlk^f^ zsS$p_g4-@N1BTVC9^(dt52FV(S5Vdhatss?jpmz~1b97>urbUziIDjAsS`iku7?QL zU29@JZx2lm-OMN&Z5d%<);C#yk+KN!br8$t`VH?*4Lw8*+LZp`A_Z!5*<>hpH;RM(jlooN62eqA1(ZtKD1w?9^ z7W0V>ke&gG-%`ZL&R`F!QELyM%yWy%R_;LcbH>W=5sE~1kTpvm1>idV+W`ZDz z)pY|?1rc4+IFJ~SK8>uXddg(xmIQf9`W_CRpWD7SsRWIfY+?gxYYdNaGdAhGmOSjW z8zt&O1zgChD!hT$mV&m5qgg!mG6b9<-(+Qk zJ*iJ5Ak@VglNWGP&UFO!56V8R=H#IYNT=EvxGwr$K&jO+8M6aC>_)~|IcBH~xJi0c z`aOvpGtWg!+jPpEpBb8AYf2T=X-3RXRpu(u^VFRptzv+KimR`!B3d}jC)BokZ>Z$f z6GxP)J=@hBfLb0z?iv?wzTDch0tgjgq%VGQ4G|U7$H)H7!H4)wTh)A!1WQ-iQn9}E zj7dd`TL_yQM0Sh7$HbcEkS`f*HEbmKc>lXn0C_Fj(Wtf31_JsE+Mr_eDA{OHFKjQa zH~)w4-qYTWj%2n=Ej}G|a@8w(E?uAPEukzQD zx{XO*JIva_V;P?XBrC$X017X~=iKm_L;(3h)WWZq@9*y8=L@YkL~uk?y@Oxdm5RXg z_e0+7JV+}{VB-NByF?GTxJ|2Bjq|Ji&du{LP)>|r!g3(8ZvLbFsFe(uQmAe}Xc&26 z1wTkCrdO8sT;bt?ZxU&f^Jd0 zIHhaaHr4TzRNZ|7vk^za_Q{+DuxE$cpIO)i|~^fA$nH;DjrQ>-nzw7l)N$|7}_vTvVue{d@_lE0?2~Qf$2er z42XvrVD`0#8;Vb^Z!aGmOn4F(MVHF6+Tw*mma3KfIPa2xLCtAhs#VtW=a(9sIWn1K#0UOVy-)5 zkfwlB*|Dy-qj8L02{91!7rAp=R3t+{D9{Q=pivSu)p^bcd4<@Of-&JL#T5jB%}8N_ zpRo`lMK984DH2SMRc*dtj)a~LGKUyz3sKX-&yG!-tAm%r<*EJ%D|VFN4yA!}Eh1s;CuBk9Ucw}r*57VuMK8%opDpuy&~TJASpp1n>YXA~xXpwp}5lU_UZVz$)E zjN#ow4$^b81JBn?CnK$!0mp_)o7vrmI%N@>uZ!+WtzP9d*v(u84nF3CG-jQ>z{^j` zvb5H5pUxE8s)kisG{8Cc3lE>AIVNH*D~52VdYM8NKF4>0Gi2zUNUBnW+%W~hyabP4 zm|S?PNPCajh*o{|TgFwA&Bwd<^i9to>iQC{W9NRdb*&}jdm(v3o8AEltc`NLoNr2E zzj%<;$LO;~wr^?8Xv>Ab*C`F#vzygQ1Fzp@Q!liu1+J*>nJDt+Fv}DkpA^H9MIQ5# zei-W%q!|f;Z`_A~^7J{2?|}b?(wp7P^NjA?cCL7QJ0av$GwMJ9e+3=f$ysCC$Z@8+ zs^isn$OTOP<`(kyjYUAznLNH`h*Q}a!@cmrfhr2@Kejrz%gsam#pSOk10~F7+t`DO z&UmffNlZ=JLP$qYuVR8xO>zG7q4*(L-J0NDp#B&i9Li@r_p@=AfYMn5hURI>`jI4s zE24N2{V=IdD=2Gyo#J3B)SSj+8kIYtSB=yG9FHSMoa8fz6CRu&hQIUmOY@DN!5h9{B<^?sne>i6E zn4%dJ+}NcU==A6c`589VI^8;l^ndHe>Z=>7U0^+B5xEQC-Qi(S&YN+RfSdH;OHnnC z1MQ+T?6hmN*OwMar^=DV<}L~5m`w=QV{7?rOc4~Sg>1Bm-kSvs3tC!2tnbb1+s!2@ z#8=RuTeU~cvK~&)S;`h6GK9Jc3Y04s&+imYf-3duA^rNcv77(6nvqD^emms3@5!kl zocpqKQ}x2?!Qa3tHP&ouX^Efh%99(p&^~sspuCqNy4BM5)r?0|t#ZNtd2rvl9obxq| z3llWYJKON#yD3+7!_ePIjH|okxa2s)(C^ocFYNlH+LC&Q3$MnpuZ)aVyfTk3zBg{c z6$jA~r{W{#fOr9#j~aI^RiC#_QRlR|{ZB3P;_JgIMwn&oE-e4#z?WikO5~V{TO34_ zvbyWQ@;1&q4K04yMJrB%+xMTE3z~>=Q*U1r`q4J8+1UMx;h&s27|n3i^VfrrjL|}7 z@gs*`mZS%1z%e_*b_<>fiaZHg&0P_C0AmCH^h6vSCVVB3K48Z}So?-aq8WJ13^tWg z^;{#|e+W%mdc8H|$AFzwwnvFZsj-n!R4T$|MQc>{vXLG^aFX{!73sJf6b6zz)Sn(1 zJSJjKPY^cv$p9}{gJROIg3Dt4yI%>h3CI+)IQx@tkFO-IdwhF?FX(@I z7hB^R$6=I2p@sK4MOM}z5q-8uI#J)-CeEDH)D;Hms-q@(-07#{r|3O`b_<}A3hHL& zSDN#AB(ly(-6uVB4O;Se;b z>Vrw=`)rqu)bXDfemJD2P(y!@FH@ipRRu!jrN0Na zO<0EGnc!ECHTAsmEui6|fLO@+vy5xw*$~)fIcRJF57}@-#4l7j*=e$POCz*M=g4hK z z_!jzc3m+mYhrnZ#EQ8Kvh&dWfS6^*!$7?baR7MPK>4a15ON`H!9O%Iw*AY zj~e)tQm8gR8zF>?vX2EXbLg5ud$`USknf4bXc_HO`k6b zEvIGjBxzbHQ0SxnH#4U`ueD|ah4Xv9n*BHcOu>%dE1SFv?A~G43OfOpPWc>FSk6Dd z?2s0Kf;W3}t{tGN=Q}J%;lzhf_KpJnGa+gYggtTt2$o`G$CfZ&iWr%fy_i;NBqa6D-$=+ZVGlN3ek~L&WwlT&Sh8bg+B4wG9bf``=ol45qBH;01wXkf>xhPQ6kep`6$a9MEZ zP}!|BHUmW~l6v#LGJfdhqGwetD?#to@5Jd0pDLB?3;r%cdo^Fs-;B97adl^cB!!*8 zKfNfGpWz+;xl%{EuwaU#r-yw{ThO8S4m6_C&*~?>HtvVfH-K3<~M-IW5bb1e3 z&WAa050*p*sE;*cvMVx6Ls?X3IQNQbyA}fh<>{|CwTWU)B^nCt7MGafvXjcY-Z1Al z=a_X9G!iQ@1!-4j#6o$FHkzg`Jz#X0Hu@$z7W1y&+|P<)(&oN4&!E|rJJyVb!wu!u z#cSK&no6t*du6PRU0V_qj{=`uzp$cKA0z$@g!ynPT>Z(-A@%C=$VTHDlI)^jJ<7Bz z5o#!-^el#5C2daAGvh-wl)1`}Nhu^8OWXOVkSWuA6MMU8-@&~g_ll1oKWm!*Uo`*G zDBvz};aNKyaU$&QXv-WL+$q`|!MiXR=hQNP?C_A3!1*Fe26%f~_)tPPeqOCl0;+*I zP7#@MV(>!heN)2i?zqM*?P=P{%{B0?j{n}XB}mxB9nCKbb-KRapE;Vz>G0OKfkG}I z3X{jYHO9SZ9@+TfNwKO}N$HBhV8lX5Gah50KrYy|gCW2WlZp35NVN!Lvh`HmA0!8C zpBj#uucCVk@9I6E(>ZcDM=(f>EBEepg5}u&zpV7Hy6NHqG`n9;?H5JK?PIp3!8T+ijh zP@wsK&ucU>!`4WZ9&3Yo`5hqTb`|<|Az5)axcAV^?$X0_MfyZMdyMHa#~ti<%eJi} z2UP32Gvkziz-8AyhqZ6aP((cbx9S)ZGpYhA*f5BraUjV{U@_vriW z-EVKN^nEINp*WKorBSIYb)M~C%Fr}{>{pWLP%ir7#A!4F9_&sds2r-~{CNhS?;i=4a*X5tH=i*&~jrWusLCQru%=HB(6} zLGhv#D1_EPJDjS=GEr-y)dentYKq)Umgo0o39s7pi1l$ZRNA@P#V;g0Y4V7HV|m3+bNGJj!9VylU>m!|I! z52}u+)Cs0+AMy@hhF;nw1~SPFs}uR!8fD#8CYu?Ipq9&C4i&iKW@!d)i|TiSUtc@) zSntYoX@H3Ank;-(CtxY0d9LgtCZd^q;M~*khBrtus=o&&?>-BQxkPLtd#AD+;@Fqt zFm5$30<%EQrSa|Qq1D6~hW%}qK8(tr_=y$p4T03uGIib&g-eQ_N+F>l7hX*i_bik1 z8aH^`*Hj56|fW0|GS@fX#SLTeei3Nb2ki&!GFkw-7TiSQ}tJ)Vu{ zH9dFXl_WMk`4dxaTW5ZgVYDh)DACKzCsj#Q!R`_0vX%?kC}rnX|4Wp$pvnK)>f`=Kbx#_d_kq^Hr< z42XA5NdI70!I56({gdJMWRt#s%3v?Fs;HFeq&{}SHh#psMCnk-p9*v`DKJCL3 zpA}=3@rkwL;wAACNp&TP>fI3edwEG?#FVivuDtG>ib?k|#dbs4B1hHZI=_zEO^(^c z&36_CUr%8d9m)M7? z88=qzHCix+%QDluA$2gXtU--l^RZ}cOI7U?T>OX=2FQgQ=yU7qPbWXDoz_fL^7*VB z(n}pKVX?^LMhsS$;sDyfcK3rsmU+l}q8gv=Nh104pQ7#FWrd%V*^!F5W$e`Vw_O)v z)^$#Z+WJV&7+CaBbtjEWtvq{Sk8M1=WqrC|(v~iHPkZ0GAJW+rSvLDk`Qw`g;ShYJ zu}byVp^qQ^ThfXuD?Plu3knbB92~8z)XlW5ijD@|Yw?Quq*9d>&f@N=%hGs_?%gHU zaY?#q>f)kRCxVB&zl$xl-+1PNeoay2AB48jV$}kNi~x&Lf_lq|k*SO0vfujbE}sjE zIIeqW2Yy^qQF*fFY}RcANSZ6d)Ar#_rekroV}^O?_2sg! zHp7b#^~x#NJ!xe#A#aDGi(HFC<*!7kwk>w+xkvDJU%yXtoqh1mH@3*LWvA}6v3A+a zqcvM(wdf0r^5Nc=`)1wbc|7akzzc@XF(K`KohEr>dEl?cJ{WN$?llyEk8qK z!QMU&wF@jyKYGmhSa^j<^z*YZlTv>zBNF+}%lc_VFU+azTii`;LnYj%>!5nud1OTiwI1~!FJpP`WK9a+r0mcB>X*f7| z16P+otehumgT4a=8|rhm z|EB`|4K!8LrefjY$GQTVA#J2c9aOJc@TyIXamR} z=I;s&4hYIJ3;^13(h1}eyOI4Z%MVnvpHM{vDiH_Pj35yyIy4d`KnoCPBsB;Ss^&oo z23X6XFg%U|xQT!VSXM9&OWh0yZ-pN?Y-ePPJHe?&0Mw<8z91W32$&oYsKtS-ZTbJf zt7i_7_c$j*)1H8(kRnKdRIoc|Cc+$KTRmqfLV5dk7g&GY8 zKyDjMxZ_k{b{qW=NMRNn+M1??5g=j%7z~DhArMF;%o7UL0B)dHds4vv9rA;;k5h1g zK;mI2Vc6U8UT9v*#?DjB>sy{ zANHSdNPQq{KgYp1 zz-IXCoMDCr`ag_8rC@-!2W11r=R%0a0ecG8b0(1htQJR)fXokt1(E>9*5+>ufU>m@ z1BB+2~Hj)X4BGLN7YHF5_ HR>J=S)~yn4 literal 0 HcmV?d00001 diff --git a/solr/contrib/extraction/src/test/resources/solr/conf/protwords.txt b/solr/contrib/extraction/src/test/resources/solr/conf/protwords.txt new file mode 100644 index 00000000000..7878147ba58 --- /dev/null +++ b/solr/contrib/extraction/src/test/resources/solr/conf/protwords.txt @@ -0,0 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +#use a protected word file to avoid stemming two +#unrelated words to the same base word. +#to test, we will use words that would normally obviously be stemmed. +cats +ridding diff --git a/solr/contrib/extraction/src/test/resources/solr/conf/schema.xml b/solr/contrib/extraction/src/test/resources/solr/conf/schema.xml new file mode 100644 index 00000000000..8b3f68ea287 --- /dev/null +++ b/solr/contrib/extraction/src/test/resources/solr/conf/schema.xml @@ -0,0 +1,471 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + text + id + + + + + + + + + + + + + + + diff --git a/solr/contrib/extraction/src/test/resources/solr/conf/solrconfig.xml b/solr/contrib/extraction/src/test/resources/solr/conf/solrconfig.xml new file mode 100644 index 00000000000..f7495d646c7 --- /dev/null +++ b/solr/contrib/extraction/src/test/resources/solr/conf/solrconfig.xml @@ -0,0 +1,359 @@ + + + + + + + + + + + + ${solr.data.dir:./solr/data} + + + + + false + 10 + + + + 32 + 2147483647 + 10000 + 1000 + 10000 + + + false + + + org.apache.lucene.index.LogByteSizeMergePolicy + + + org.apache.lucene.index.ConcurrentMergeScheduler + + 1000 + 10000 + + single + + + + + false + 10 + 32 + 2147483647 + 10000 + + true + + + + + + + + + + + + + + + + + 1024 + + + + + + + + + + + true + + + + + true + + 10 + + + + + + + + + + + + + + + + + + + + + + + + + true + + + + 0.01 + + text^0.5 features_t^1.0 subject^1.4 title_stemmed^2.0 + + + text^0.2 features_t^1.1 subject^1.4 title_stemmed^2.0 title^1.5 + + + ord(weight)^0.5 recip(rord(iind),1,1000,1000)^0.3 + + + 3<-1 5<-2 6<90% + + 100 + + + + *:* + 0.01 + + text^0.5 features_t^1.0 subject^1.4 title_stemmed^2.0 + + + text^0.2 features_t^1.1 subject^1.4 title_stemmed^2.0 title^1.5 + + + ord(weight)^0.5 recip(rord(iind),1,1000,1000)^0.3 + + + 3<-1 5<-2 6<90% + + 100 + + + + 1000 + 1.4142135 + 12 + foo + + + sqrt 2 + log 10 + + + + + + + + 4 + true + text,name,subject,title,whitetok + + + + + + + 4 + true + text,name,subject,title,whitetok + + + + + + false + + + + + + + + + + 100 + + + + + + 70 + + + + + + + ]]> + ]]> + + + + + + + + + + max-age=30, public + + + + + solr + solrconfig.xml scheam.xml admin-extra.html + + + + prefix-${solr.test.sys.prop2}-suffix + + + + + + diff --git a/solr/contrib/extraction/src/test/resources/solr/conf/stopwords.txt b/solr/contrib/extraction/src/test/resources/solr/conf/stopwords.txt new file mode 100644 index 00000000000..688e3075431 --- /dev/null +++ b/solr/contrib/extraction/src/test/resources/solr/conf/stopwords.txt @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +stopworda +stopwordb diff --git a/solr/contrib/extraction/src/test/resources/solr/conf/synonyms.txt b/solr/contrib/extraction/src/test/resources/solr/conf/synonyms.txt new file mode 100644 index 00000000000..a7624f0597d --- /dev/null +++ b/solr/contrib/extraction/src/test/resources/solr/conf/synonyms.txt @@ -0,0 +1,22 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +a => aa +b => b1 b2 +c => c1,c2 +a\=>a => b\=>b +a\,a => b\,b +foo,bar,baz + +Television,TV,Televisions diff --git a/solr/contrib/extraction/src/test/resources/version_control.txt b/solr/contrib/extraction/src/test/resources/version_control.txt new file mode 100644 index 00000000000..7a89c5b7d56 --- /dev/null +++ b/solr/contrib/extraction/src/test/resources/version_control.txt @@ -0,0 +1,18 @@ +Solr Version Control System + +Overview + +The Solr source code resides in the Apache Subversion (SVN) repository. +The command-line SVN client can be obtained here or as an optional package +for cygwin. + +The TortoiseSVN GUI client for Windows can be obtained here. There +are also SVN plugins available for older versions of Eclipse and +IntelliJ IDEA that don't have subversion support already included. + +------------------------------- + +Note: This document is an excerpt from a document Licensed to the +Apache Software Foundation (ASF) under one or more contributor +license agreements. See the XML version (version_control.xml) for +more details. diff --git a/solr/contrib/extraction/src/test/resources/version_control.xml b/solr/contrib/extraction/src/test/resources/version_control.xml new file mode 100644 index 00000000000..4e099601ba8 --- /dev/null +++ b/solr/contrib/extraction/src/test/resources/version_control.xml @@ -0,0 +1,42 @@ + + + + + + +
    + Solr Version Control System +
    + + + +
    + Overview +

    + The Solr source code resides in the Apache Subversion (SVN) repository. + The command-line SVN client can be obtained here or as an optional package for cygwin. + The TortoiseSVN GUI client for Windows can be obtained here. There + are also SVN plugins available for older versions of Eclipse and + IntelliJ IDEA that don't have subversion support already included. +

    +
    +

    Here is some more text. It contains a link.

    +

    Text Here

    + + + diff --git a/solr/contrib/velocity/build.xml b/solr/contrib/velocity/build.xml new file mode 100644 index 00000000000..deec7c026ad --- /dev/null +++ b/solr/contrib/velocity/build.xml @@ -0,0 +1,125 @@ + + + + + + + + + + + + Solritas: Velocity Response Writer + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Tests failed! + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/solr/contrib/velocity/solr-velocity-pom.xml.template b/solr/contrib/velocity/solr-velocity-pom.xml.template new file mode 100644 index 00000000000..8dcaf7fd279 --- /dev/null +++ b/solr/contrib/velocity/solr-velocity-pom.xml.template @@ -0,0 +1,61 @@ + + + + + 4.0.0 + + + org.apache.solr + solr-parent + @maven_version@ + + + org.apache.solr + solr-velocity + Apache Solr Velocity Extension + @maven_version@ + Apache Solr Velocity Extension adds support for integrating Solr and Velocity + jar + + + + commons-lang + commons-lang + 2.4 + + + commons-beanutils + commons-beanutils + 1.7.0 + + + org.apache.velocity + velocity + 1.6.1 + + + org.apache.velocity + velocity-tools + 2.0-beta3 + + + diff --git a/solr/contrib/velocity/src/main/java/footer.vm b/solr/contrib/velocity/src/main/java/footer.vm new file mode 100644 index 00000000000..ad6b4b356e9 --- /dev/null +++ b/solr/contrib/velocity/src/main/java/footer.vm @@ -0,0 +1,8 @@ +## This template currently lives in the java src/ tree as an example of templates being loaded from the classpath + +#if($params.getBool("debugQuery",false)) + #parse("debug.vm") +#end + +
    +Generated by VelocityResponseWriter \ No newline at end of file diff --git a/solr/contrib/velocity/src/main/java/org/apache/solr/request/PageTool.java b/solr/contrib/velocity/src/main/java/org/apache/solr/request/PageTool.java new file mode 100644 index 00000000000..b96566fa32a --- /dev/null +++ b/solr/contrib/velocity/src/main/java/org/apache/solr/request/PageTool.java @@ -0,0 +1,80 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.request; + +import org.apache.solr.response.SolrQueryResponse; +import org.apache.solr.search.DocSlice; +import org.apache.solr.common.SolrDocumentList; + +public class PageTool { + private long start; + private int results_per_page = 10; + private long results_found; + private int page_count; + private int current_page_number; + + public PageTool(SolrQueryRequest request, SolrQueryResponse response) { + String rows = request.getParams().get("rows"); + + if (rows != null) { + results_per_page = new Integer(rows); + } + + Object docs = response.getValues().get("response"); + if (docs != null) { + if (docs instanceof DocSlice) { + DocSlice doc_slice = (DocSlice) docs; + results_found = doc_slice.matches(); + start = doc_slice.offset(); + } else { + SolrDocumentList doc_list = (SolrDocumentList) docs; + results_found = doc_list.getNumFound(); + start = doc_list.getStart(); + } + } + + page_count = (int) Math.ceil(results_found / (double) results_per_page); + current_page_number = (int) Math.ceil(start / (double) results_per_page) + (page_count > 0 ? 1 : 0); + } + + public long getStart() { + return start; + } + + public int getResults_per_page() { + return results_per_page; + } + + public long getResults_found() { + return results_found; + } + + public int getPage_count() { + return page_count; + } + + public int getCurrent_page_number() { + return current_page_number; + } + + public String toString() { + return "Found " + results_found + + " Page " + current_page_number + " of " + page_count + + " Starting at " + start + " per page " + results_per_page; + } +} diff --git a/solr/contrib/velocity/src/main/java/org/apache/solr/request/SolrParamResourceLoader.java b/solr/contrib/velocity/src/main/java/org/apache/solr/request/SolrParamResourceLoader.java new file mode 100644 index 00000000000..73064f54002 --- /dev/null +++ b/solr/contrib/velocity/src/main/java/org/apache/solr/request/SolrParamResourceLoader.java @@ -0,0 +1,66 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.request; + +import org.apache.velocity.runtime.resource.loader.ResourceLoader; +import org.apache.velocity.runtime.resource.Resource; +import org.apache.velocity.exception.ResourceNotFoundException; +import org.apache.commons.collections.ExtendedProperties; + +import java.io.ByteArrayInputStream; +import java.io.InputStream; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; + +public class SolrParamResourceLoader extends ResourceLoader { + private Map templates = new HashMap(); + public SolrParamResourceLoader(SolrQueryRequest request) { + super(); + + // TODO: Consider using content streams, but need a template name associated with each stream + // for now, a custom param convention of template.=