From e3a3a7aafeca072a4d52ff54fa58b38e4d6ccadb Mon Sep 17 00:00:00 2001 From: Uwe Schindler Date: Wed, 18 May 2011 16:16:29 +0000 Subject: [PATCH 01/27] Fix small copy paste error (no test failed!!!) in LUCENE-3084 (how can this happen?) git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1124316 13f79535-47bb-0310-9956-ffa450edef68 --- lucene/src/java/org/apache/lucene/index/IndexWriter.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lucene/src/java/org/apache/lucene/index/IndexWriter.java b/lucene/src/java/org/apache/lucene/index/IndexWriter.java index 07cb0f58316..305676ec2b6 100644 --- a/lucene/src/java/org/apache/lucene/index/IndexWriter.java +++ b/lucene/src/java/org/apache/lucene/index/IndexWriter.java @@ -2552,7 +2552,7 @@ public class IndexWriter implements Closeable { lastCommitChangeCount = pendingCommitChangeCount; segmentInfos.updateGeneration(pendingCommit); segmentInfos.setUserData(pendingCommit.getUserData()); - rollbackSegments = segmentInfos.createBackupSegmentInfos(true); + rollbackSegments = pendingCommit.createBackupSegmentInfos(true); deleter.checkpoint(pendingCommit, true); } finally { // Matches the incRef done in startCommit: From deb41ad1f146fbdf01b096af2cdb24d7e49c08a2 Mon Sep 17 00:00:00 2001 From: Uwe Schindler Date: Wed, 18 May 2011 16:29:21 +0000 Subject: [PATCH 02/27] Fix tabs git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1124330 13f79535-47bb-0310-9956-ffa450edef68 --- lucene/CHANGES.txt | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt index d25b7862088..409ca5d0123 100644 --- a/lucene/CHANGES.txt +++ b/lucene/CHANGES.txt @@ -479,14 +479,14 @@ Changes in backwards compatibility policy * LUCENE-3084: MergePolicy.OneMerge.segments was changed from SegmentInfos to a List. SegmentInfos itsself was changed - to no longer extend Vector (to update code that is using - Vector-API, use the new asList() and asSet() methods returning unmodifiable - collections; modifying SegmentInfos is now only possible through - the explicitely declared methods). IndexWriter.segString() now takes - Iterable instead of List. A simple recompile - should fix this. MergePolicy and SegmentInfos are internal/experimental - APIs not covered by the strict backwards compatibility policy. - (Uwe Schindler, Mike McCandless) + to no longer extend Vector (to update code that is using + Vector-API, use the new asList() and asSet() methods returning unmodifiable + collections; modifying SegmentInfos is now only possible through + the explicitely declared methods). IndexWriter.segString() now takes + Iterable instead of List. A simple recompile + should fix this. MergePolicy and SegmentInfos are internal/experimental + APIs not covered by the strict backwards compatibility policy. + (Uwe Schindler, Mike McCandless) Changes in runtime behavior From 62a40f5c3666448db2c23a2950ffea39dd3fba0b Mon Sep 17 00:00:00 2001 From: Michael McCandless Date: Wed, 18 May 2011 18:13:23 +0000 Subject: [PATCH 03/27] LUCENE-2027: remove Directory.touchFile git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1124363 13f79535-47bb-0310-9956-ffa450edef68 --- lucene/CHANGES.txt | 3 +++ .../apache/lucene/store/je/JEDirectory.java | 11 -------- .../apache/lucene/store/db/DbDirectory.java | 13 ---------- .../lucene/store/NRTCachingDirectory.java | 9 ------- .../lucene/index/CompoundFileReader.java | 6 ----- .../org/apache/lucene/store/Directory.java | 4 --- .../org/apache/lucene/store/FSDirectory.java | 8 ------ .../lucene/store/FileSwitchDirectory.java | 5 ---- .../org/apache/lucene/store/RAMDirectory.java | 26 ------------------- .../java/org/apache/lucene/store/RAMFile.java | 1 - .../lucene/store/MockDirectoryWrapper.java | 6 ----- .../apache/lucene/index/TestFieldsReader.java | 4 --- .../lucene/store/TestBufferedIndexInput.java | 6 ----- 13 files changed, 3 insertions(+), 99 deletions(-) diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt index 409ca5d0123..2368db614f9 100644 --- a/lucene/CHANGES.txt +++ b/lucene/CHANGES.txt @@ -530,6 +530,9 @@ API Changes ClassCastException when loading lazy fields or NumericFields. (Uwe Schindler, Ryan McKinley, Mike McCandless) +* LUCENE-2027: Directory.touchFile is deprecated and will be removed + in 4.0. (Mike McCandless) + Optimizations * LUCENE-2990: ArrayUtil/CollectionUtil.*Sort() methods now exit early diff --git a/lucene/contrib/db/bdb-je/src/java/org/apache/lucene/store/je/JEDirectory.java b/lucene/contrib/db/bdb-je/src/java/org/apache/lucene/store/je/JEDirectory.java index 8e2e760d697..0bbeda51f8e 100644 --- a/lucene/contrib/db/bdb-je/src/java/org/apache/lucene/store/je/JEDirectory.java +++ b/lucene/contrib/db/bdb-je/src/java/org/apache/lucene/store/je/JEDirectory.java @@ -199,17 +199,6 @@ public class JEDirectory extends Directory { return new JELock(); } - @Override - public void touchFile(String name) throws IOException { - File file = new File(name); - long length = 0L; - - if (file.exists(this)) - length = file.getLength(); - - file.modify(this, length, System.currentTimeMillis()); - } - /** * Once a transaction handle was committed it is no longer valid. In order * to continue using this JEDirectory instance after a commit, the diff --git a/lucene/contrib/db/bdb/src/java/org/apache/lucene/store/db/DbDirectory.java b/lucene/contrib/db/bdb/src/java/org/apache/lucene/store/db/DbDirectory.java index 829dbedda3d..2e84262855e 100644 --- a/lucene/contrib/db/bdb/src/java/org/apache/lucene/store/db/DbDirectory.java +++ b/lucene/contrib/db/bdb/src/java/org/apache/lucene/store/db/DbDirectory.java @@ -222,19 +222,6 @@ public class DbDirectory extends Directory { return new DbLock(); } - @Override - public void touchFile(String name) - throws IOException - { - File file = new File(name); - long length = 0L; - - if (file.exists(this)) - length = file.getLength(); - - file.modify(this, length, System.currentTimeMillis()); - } - /** * Once a transaction handle was committed it is no longer valid. In * order to continue using this DbDirectory instance after a commit, the diff --git a/lucene/contrib/misc/src/java/org/apache/lucene/store/NRTCachingDirectory.java b/lucene/contrib/misc/src/java/org/apache/lucene/store/NRTCachingDirectory.java index 7430076f906..0c097737295 100644 --- a/lucene/contrib/misc/src/java/org/apache/lucene/store/NRTCachingDirectory.java +++ b/lucene/contrib/misc/src/java/org/apache/lucene/store/NRTCachingDirectory.java @@ -134,15 +134,6 @@ public class NRTCachingDirectory extends Directory { } } - @Override - public synchronized void touchFile(String name) throws IOException { - if (cache.fileExists(name)) { - cache.touchFile(name); - } else { - delegate.touchFile(name); - } - } - @Override public synchronized void deleteFile(String name) throws IOException { // Delete from both, in case we are currently uncaching: diff --git a/lucene/src/java/org/apache/lucene/index/CompoundFileReader.java b/lucene/src/java/org/apache/lucene/index/CompoundFileReader.java index 87c3c7b1ac5..ce77bc94a61 100644 --- a/lucene/src/java/org/apache/lucene/index/CompoundFileReader.java +++ b/lucene/src/java/org/apache/lucene/index/CompoundFileReader.java @@ -189,12 +189,6 @@ public class CompoundFileReader extends Directory { return directory.fileModified(fileName); } - /** Set the modified time of the compound file to now. */ - @Override - public void touchFile(String name) throws IOException { - directory.touchFile(fileName); - } - /** Not implemented * @throws UnsupportedOperationException */ @Override diff --git a/lucene/src/java/org/apache/lucene/store/Directory.java b/lucene/src/java/org/apache/lucene/store/Directory.java index f9e3167ccc2..2aba2e7ef12 100644 --- a/lucene/src/java/org/apache/lucene/store/Directory.java +++ b/lucene/src/java/org/apache/lucene/store/Directory.java @@ -65,10 +65,6 @@ public abstract class Directory implements Closeable { public abstract long fileModified(String name) throws IOException; - /** Set the modified time of an existing file to now. */ - public abstract void touchFile(String name) - throws IOException; - /** Removes an existing file in the directory. */ public abstract void deleteFile(String name) throws IOException; diff --git a/lucene/src/java/org/apache/lucene/store/FSDirectory.java b/lucene/src/java/org/apache/lucene/store/FSDirectory.java index fcb79138fb5..c5a83879be9 100644 --- a/lucene/src/java/org/apache/lucene/store/FSDirectory.java +++ b/lucene/src/java/org/apache/lucene/store/FSDirectory.java @@ -272,14 +272,6 @@ public abstract class FSDirectory extends Directory { return file.lastModified(); } - /** Set the modified time of an existing file to now. */ - @Override - public void touchFile(String name) { - ensureOpen(); - File file = new File(directory, name); - file.setLastModified(System.currentTimeMillis()); - } - /** Returns the length in bytes of a file in the directory. */ @Override public long fileLength(String name) throws IOException { diff --git a/lucene/src/java/org/apache/lucene/store/FileSwitchDirectory.java b/lucene/src/java/org/apache/lucene/store/FileSwitchDirectory.java index 55236b471b7..495ea02b208 100644 --- a/lucene/src/java/org/apache/lucene/store/FileSwitchDirectory.java +++ b/lucene/src/java/org/apache/lucene/store/FileSwitchDirectory.java @@ -114,11 +114,6 @@ public class FileSwitchDirectory extends Directory { return getDirectory(name).fileModified(name); } - @Override - public void touchFile(String name) throws IOException { - getDirectory(name).touchFile(name); - } - @Override public void deleteFile(String name) throws IOException { getDirectory(name).deleteFile(name); diff --git a/lucene/src/java/org/apache/lucene/store/RAMDirectory.java b/lucene/src/java/org/apache/lucene/store/RAMDirectory.java index 9d07160a7b2..e33af1a5ee6 100644 --- a/lucene/src/java/org/apache/lucene/store/RAMDirectory.java +++ b/lucene/src/java/org/apache/lucene/store/RAMDirectory.java @@ -27,8 +27,6 @@ import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicLong; -import org.apache.lucene.util.ThreadInterruptedException; - /** * A memory-resident {@link Directory} implementation. Locking * implementation is by default the {@link SingleInstanceLockFactory} @@ -112,30 +110,6 @@ public class RAMDirectory extends Directory { return file.getLastModified(); } - /** Set the modified time of an existing file to now. - * @throws IOException if the file does not exist - */ - @Override - public void touchFile(String name) throws IOException { - ensureOpen(); - RAMFile file = fileMap.get(name); - if (file == null) { - throw new FileNotFoundException(name); - } - - long ts2, ts1 = System.currentTimeMillis(); - do { - try { - Thread.sleep(0, 1); - } catch (InterruptedException ie) { - throw new ThreadInterruptedException(ie); - } - ts2 = System.currentTimeMillis(); - } while(ts1 == ts2); - - file.setLastModified(ts2); - } - /** Returns the length in bytes of a file in the directory. * @throws IOException if the file does not exist */ diff --git a/lucene/src/java/org/apache/lucene/store/RAMFile.java b/lucene/src/java/org/apache/lucene/store/RAMFile.java index 123f800fba8..4fe78a63d4c 100644 --- a/lucene/src/java/org/apache/lucene/store/RAMFile.java +++ b/lucene/src/java/org/apache/lucene/store/RAMFile.java @@ -26,7 +26,6 @@ public class RAMFile { RAMDirectory directory; protected long sizeInBytes; - // This is publicly modifiable via Directory.touchFile(), so direct access not supported private long lastModified = System.currentTimeMillis(); // File used as buffer, in no RAMDirectory diff --git a/lucene/src/test-framework/org/apache/lucene/store/MockDirectoryWrapper.java b/lucene/src/test-framework/org/apache/lucene/store/MockDirectoryWrapper.java index c7b0d036dd1..14ee65dc11d 100644 --- a/lucene/src/test-framework/org/apache/lucene/store/MockDirectoryWrapper.java +++ b/lucene/src/test-framework/org/apache/lucene/store/MockDirectoryWrapper.java @@ -559,12 +559,6 @@ public class MockDirectoryWrapper extends Directory { return delegate.fileModified(name); } - @Override - public synchronized void touchFile(String name) throws IOException { - maybeYield(); - delegate.touchFile(name); - } - @Override public synchronized long fileLength(String name) throws IOException { maybeYield(); diff --git a/lucene/src/test/org/apache/lucene/index/TestFieldsReader.java b/lucene/src/test/org/apache/lucene/index/TestFieldsReader.java index 75a9be9cc0e..4214263230d 100644 --- a/lucene/src/test/org/apache/lucene/index/TestFieldsReader.java +++ b/lucene/src/test/org/apache/lucene/index/TestFieldsReader.java @@ -411,10 +411,6 @@ public class TestFieldsReader extends LuceneTestCase { return fsDir.fileModified(name); } @Override - public void touchFile(String name) throws IOException { - fsDir.touchFile(name); - } - @Override public void deleteFile(String name) throws IOException { fsDir.deleteFile(name); } diff --git a/lucene/src/test/org/apache/lucene/store/TestBufferedIndexInput.java b/lucene/src/test/org/apache/lucene/store/TestBufferedIndexInput.java index cab6a66b266..6066ef9d513 100755 --- a/lucene/src/test/org/apache/lucene/store/TestBufferedIndexInput.java +++ b/lucene/src/test/org/apache/lucene/store/TestBufferedIndexInput.java @@ -345,12 +345,6 @@ public class TestBufferedIndexInput extends LuceneTestCase { dir.deleteFile(name); } @Override - public void touchFile(String name) - throws IOException - { - dir.touchFile(name); - } - @Override public long fileModified(String name) throws IOException { From 731e619a7c34163779557c01298e3e9f994f4750 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Wed, 18 May 2011 18:23:06 +0000 Subject: [PATCH 04/27] LUCENE-3012: if you use setNorm, lucene writes a headerless separate norms file git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1124366 13f79535-47bb-0310-9956-ffa450edef68 --- lucene/CHANGES.txt | 3 +++ .../apache/lucene/index/SegmentReader.java | 16 +++++++++++++++- .../org/apache/lucene/index/index.32.cfs.zip | Bin 5158 -> 5184 bytes .../apache/lucene/index/index.32.nocfs.zip | Bin 12119 -> 7603 bytes 4 files changed, 18 insertions(+), 1 deletion(-) diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt index 2368db614f9..04b52168971 100644 --- a/lucene/CHANGES.txt +++ b/lucene/CHANGES.txt @@ -564,6 +564,9 @@ Bug fixes PhraseQuery as term with lower doc freq will also have less positions. (Uwe Schindler, Robert Muir, Otis Gospodnetic) +* LUCENE-3012: Lucene writes the header now for separate norm files (*.sNNN) + (Robert Muir) + Test Cases * LUCENE-3002: added 'tests.iter.min' to control 'tests.iter' by allowing to diff --git a/lucene/src/java/org/apache/lucene/index/SegmentReader.java b/lucene/src/java/org/apache/lucene/index/SegmentReader.java index aa97da86d47..ec045c416f5 100644 --- a/lucene/src/java/org/apache/lucene/index/SegmentReader.java +++ b/lucene/src/java/org/apache/lucene/index/SegmentReader.java @@ -38,6 +38,7 @@ import org.apache.lucene.util.BitVector; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.CloseableThreadLocal; +import org.apache.lucene.util.StringHelper; /** * @lucene.experimental @@ -441,6 +442,7 @@ public class SegmentReader extends IndexReader implements Cloneable { boolean success = false; try { try { + out.writeBytes(SegmentMerger.NORMS_HEADER, 0, SegmentMerger.NORMS_HEADER.length); out.writeBytes(bytes, maxDoc()); } finally { out.close(); @@ -986,8 +988,20 @@ public class SegmentReader extends IndexReader implements Cloneable { // If this were to change in the future, a clone could be done here. normInput = singleNormStream; } else { - normSeek = 0; normInput = d.openInput(fileName); + // if the segment was created in 3.2 or after, we wrote the header for sure, + // and don't need to do the sketchy file size check. otherwise, we check + // if the size is exactly equal to maxDoc to detect a headerless file. + // NOTE: remove this check in Lucene 5.0! + String version = si.getVersion(); + final boolean isUnversioned = + (version == null || StringHelper.getVersionComparator().compare(version, "3.2") < 0) + && normInput.length() == maxDoc(); + if (isUnversioned) { + normSeek = 0; + } else { + normSeek = SegmentMerger.NORMS_HEADER.length; + } } norms.put(fi.name, new Norm(normInput, fi.number, normSeek)); diff --git a/lucene/src/test/org/apache/lucene/index/index.32.cfs.zip b/lucene/src/test/org/apache/lucene/index/index.32.cfs.zip index 52939839d691b8a4494f8521f1cf64e26fb00749..eff3153eb2af9aa4ed2d060aa9658fcfcb3953f9 100644 GIT binary patch literal 5184 zcmb7|2T)Vl+r~qHP^Ak&fl#EkP^E~pAiYUPg@8b4(gi8fJE0>TDFRBDAVoS7stAHK zL8L24l~7~>`GW4wUv_49{ch&mdvoTV-<*4Lp7(k4>Zs%55d+Q+Q&-e=SXa}-vu|gO z5WoO1hnfosSXnz7=o0~O%Sc8w&)WS5Bmf+|Ra^kz$LY__amb0RYesmcU&b_kSa87`15A$kXIr6*+SCUC5zCUJ|?YEMIPE@tZIW%@unzIWjCP zjMJ-^(b397`D0!DrRx;R4t~S>sbT z@~{)Lc;u7bI|gj*jhqLjbwm0pOOP*#EB$+~$HMM8`d`jcFZa}1-i<{`RK+Pz)ks|f zA$_CQAsN%gKF4n5&DFbBl6>w%nDkrC80)VaHPuPFmM4e8NtV^UG?W_lt4PfVT}fo& zgkNx9lv|Ob76v|^mz`NGvLjES3ATwPSnf-*)(Y<`LD|e{MIfSU?#jd2=`Mp90hgrY z=|coUokD>MG(Vzi-T`Nk;q+e2(oy(l` ziLOy{;_CLM?aa+UP4ysyJzN@@@(G?hsEY@b{;ZB1+FL3R#krV5ZPrNok$X#BGx=UWqeO2YiQW6S zEDMV1w(6Gs`g+G6On#XH^dW+_u9AO-%ie(mHYn~u@_>5s)$aZL%thU0GT6w3yr|^l zgnSinY{GzKd2%ACiRzYEFYU?g_4P&(O9a6%t3wXs198Q)kP{;d@tY2X<0=)2rfju0 z>|;m2$BgcN@_Nnm2lqw>Ib~~sj63f<@kYiFrGMVE-4^eJ{WNQ;F$$mRTJ_X3>((Rx zJ;5&r+E%rQ(c?a}|LKQ~g=wV$d$loyV8mjUIcmsnPwQl9dWC$EN^6yBJH*6XyIH$y zeywlUU>+TTudG2dCkGyCc5WRoV9v24D&*x4@_5RgpkNAIRaIwh!S)F92kl~ef<*Cl zaL=n$Iuti6606evzf>yppDH!hbf!_J*!Z$}IB8Oah`kr(NAV^I#t<3u23UvgAbC*@kDCCqYODb2+b5lPAZCQ-mbnWX715fPbAx)Ga~6a5pU}LV)|S_u%6|%;4mb* z=gu%E{Xp@`JE~383T+4`Y9)qLAkvKLq-w<)w1tB|^Lc~rtK04E$_I&vFGma3y4%hf z{)*cyG@2LR&}hU`U>yCnv%fek`@MSP zq2JL{X;r$3vs-~qfetBl_0ynt-m`$#l=>0%5vQbJy9fQ1 z_m4zp7V18{Dy&6amRTQeHP}CrxPvRLIdtD}anpx@K&qc(BUT{Yj>KQ^@I^~4RZrfU z7(LnMt@jK?;%fI&rnqW#?VpU^C~2WCu~8Q{l2M?d z0lgE3=292do&f1VTpLZU7H3(c3W)6F3Tch^NpYK4TBEdtB<}j`lL9_Qm{c}@0uKtm z01pafBO2PFL(db&95yv`ckTy8cS!+LlkbvV2zL!n$nS%8xF9W(Q??r$)Lma$E0hzx~d>1Jf6VDh5oh_hNFUDKYyF8~Z3`YGu!`8TVH(}>fmf`|y%BwrOB%6Hvw7c$E zdQA+=>zTARPNHFcL>jwB{^5C~9j&pp`;M?=XBew>SAcOb(tmmHlP=j1LzSU(D0MRN zi|V+wL$U85`K;&10$;_~pDi--OYi#3iMs@T?-a|PY}!nWcFA~RIe+@OH7tI0DQcl2 zaxhC}urN|9k#MN&*ln(5eSQA2s{az~JvU6thIi4bQdn!yuv@hZdok$lJM5QusYB0+ zdE|=H!RJ)$f~<3Y9}V=URPwoY)TMT-=SAvuX&yiMj*|k0aaV%uea}WI$kh5wnzVa< z{5a&~5Z*%VV<)?5rZ%#6J#pD==gnT3`EB|SQh6H;Z|hlp51X?5n>WY|V< z=C;CE=XQtwo84kac23OojBxS)X51X)a1U>6NLcwRBnba^NN96@Cr8uj<@KF1B8fHW zrdrH38Z-b$!892CHlK=_7>Ev}z|B{ER@BWMMZzcrZGY8n*Z~B72jEcwiJ}4m0|N1b z(B^=FNSDtCmQRc1NEneRX%e#shwENjgRM(3(;J@B$u?khVaNmirG)~j3EnG@`;TwC zPeTDv4{q3GjDWoA=nmgiOaD^LQ~tykF)uPklU-RT--0*LX^(`tr6- zG$BXri{i`vI(xXQR$juAdiM-FXFfQDajjjLVsn1bq0x)XG|H&}&ug{auCfPtcFr0( zY5G2w#0**1A{zEq+X^RPTS*%Ig2Y48cpMlZnNXwe zL;#|wV3%-Mf?3ub`X~)gG4Xxve8-gI0e1| zBI$+hB^WIJHQ6IQr-+<+H2Tn=l@37EW)*?8Z)4ZBDC_#iD<6DVbiLWr!nw3IXSqVT zjA>YjnUp@*uSlelbhG%YG`<>4L5&lT+z`D-Sx;;!(q&!nAu2#a{hx9|LZLs_&kG7SJ+GW@fDJ$pQA<}C(1)UwoWx~FSYZof6zWDgo=jY9- zrR^VEG8w6ZilI$Q79gTr=AD*M!BqYelZ$bvwsFdY!CRyrZtDBGJ~B3D|MVpC;Ne8-V4MK=0S`Ro zbBHN7Tc>uGDKY)6J003X=5Gvz4d3pTavsl>SM=$3w)yN6oR0xd7-RTIvBHfY{WIeh z`A^{*$~aRCf%n(qo75FlK|B{>x9WMQ6p2nEVfdu(hm2Ev-tPu zeOVElGHQDD2}B5EDZlxFSP8#Hq{eLX!zV3c+*vs&`L5@WO~@9BBFq^cyVV4E{83(m z+SpCOuZ}Z0+MEJZJ$IGk4q%et(YB6eR zk7)07e5A&n4;aidpcoLmTF`K*i#gnnV{MFMMu49D79d*W<#;U2bb;deQy@u#wkjZZJdVQII046e?~|knVQu_ zVnaOKvc9Reg}T!ulikW>9TqUv{Af1-bK{n{XNh9erHU?IL2}mMF7|zBe;_OieM|%D zpvF+vMiB61M<+H+`JBFbXp(WVp6Qy@pYPj**~>a4U{nYm+*!83*SBM<uD#+6wk`NH`3t zD(tFR>M_2_S4{6k8)Ac~M!MOh&lsiHi4M`KdztIzbcD}u2Ckz01~Yftv%|be@RNnN zUnqwiGEelRG zOH67b+Te#0g3zdUHpFv`-TUO4AF+@%Ls-3KBai>) zr`AlW_zhSu703BgOQQd!rK8i+(*S=m5Ml4#VRiMAK(|ukT{U5m{Op%<83hVl2&Nb_ zd0qVAWTO<4<#Ifp*CB3T6S?9?a*_qi@1leFY@Xeu&Fc!tbG*Uq1hB+m{$vUNM(#$P zz}+!Nw{+A70)7v|dpU{Km)cE?8aP8(4{SoMu0F!|&7}Pbfqr*nE*rnd-f;JZB=_+; zyHZBUV{i5#2Ig>We={Fujf`qGq%{!rC?hH)QBp`KF&PK8;BiSM36~4+Z|47*^x$af zEQi25s{$|p+^rG!tergE%|-q#l{kBMBM|5YF72#cJs1o&mQR0gJQT_O5zxD283^b; zl>@NIU8Cq2d}Lm3lxnJd1aVwZkm$apP8y>n%zMr%3sYT86qZhwQC@hlee0WvN)=f!nRSq zIg#?JzNjF3n~G?IsCLIyk(XR;4XB1IV)e!yB7!FIlnWk;3GYtXa2npxgPPvOLbJ(e z=H%i)ZtV%%jY3=-7v30)$j4{1%z1#olTG~-m;bgpohCDrB-qtCt8$S3vN{3?Yp4HO zA6XC~JbV}n!~rtl{!_^Xj0OKZ3WLtB|0}@Y|17}#t@!ej_D@BZUuhaYXg@dc@3GGR zHT;#egEbj!dikTA^LwoG>FG}vR#||;phf~L>#t<>_h9D})L$otPk09V|4i(>Xa34+ z#s1Lde>&*z&Fg&3_?1OWj%EEBL4J>QUWdQ3fE0jps{B3P`JMYKkD2m+yr28{pNRON Tvn@%4{k#MM0It|^0KoqN*F9$p literal 5158 zcma)=2T&8+9)|;j(0dTP^cH#z9hA^p=slo-5RfWWinM?_+rokOrUz@it5daJL4s#I@3mX8~No~G03IliT&60gu($!ar z0bqRu6yqf6V}sjub2UP^@X2v-aB|g)3nutWh!|*Tt*ordtQNa+05}RaRxLwF2%?(- zcPGggcjs!dQm}EQX}MD+bN1d`&6%&+<4C>zV)oCZ%XP|9lVzO6umI-EcE#WTC8WAIYY1kc}3VmZbsdK;4Ggg*y}hS3I7tncY+&z$K25 zbY{xAIN2Cjj_&MI^~veH|1g}1@hXXvi5&mcJ3@Li%~Ar^Z*23%@$(wo{5#!lK(zmG zr<{vT=&3X=EqD~l-(M_j)P?f_-uO&hfe8;Jamw4Dj*BB)zjzg%q|y&6aJ00Mx`$m z?FBO(<`KF4WkYK{((I>zjyr{wWC9V%*Sy#0KoF*tN9r%Q%U1|<9 z_^rxZQfXCOcQ-t4sMA=@9N287Y6mg`u}0tZ`DmvhVAgArPz51}avg8xuqFohTSE62 zexmDKF6ED_4;PX=4s&O;O|n%`+C<98tgKKLK+NZp#s~^La-(+nc9wSQcKoHNFdyaV znJanJAy@`26xb{jgo`WzcU6-3($H~loY(mym&TG*eP@R0}V05ExiYLQ|D^+pm zMe0)*LyedfAARorgn7lbEQn=6_=)SsF7oSVW1Zd2Y&r!S%}>MBTDNI5FQ=^AF&Spl zMcrHXz?s9zcJoH2%vFvDOow!D5-P>TdE<&(8kpD-dY17nAa0a|Y(IO`(OV6r;w4gQ z?&Br$ZBUwR{ZuOSqQu)^t8hZA{1HN;*46Ps+0pN{aVFm<2{zw}k9oc2@xey}gZ+cu z74dPLHAh=pe}d;7ZNAuD%x!Gf?H7}x#dAbec}4;qr9~`;*93xnzj&ape@HYQlceTc zuccmweyH7d9qufk9Ipy=>e$o87)ppS6!2d|l{#Jo5@Pya8~%R{wGkQPC?QPDcxuC)LoXRaHAGtjPe+Y zQX#=3W3-r3hM=%g<)wVzKQ?hKriuMPR!pOHPVfbk7u^io>#W^t-0S0ux);%w=4`#@ zmwo%~)mzgJK8SMTQVVdBvG;|SWY<#IZd|DH(q;j7iD2T8#&M)p5Ikq}Gm(}+6{%HL zNQW8Q?u41FN^9Iyk(N`x414A+QxUBv)uEr2=7*EE;Qf($Y_Twqccr7|@PV3Hmrtr@ zMDMQG(8>APj`PcHrT)k6EU|w+SA(pzO`B-(&P%2dt@(#Aq`jtM&bL=PZXZdljT|ie z!*YW03K`wVzU<@8Rh?Pu5z57VVR}D(R-9a}Pc%42ovlmox{(2yZs{ z4GbTL-h|ktxJ#QqTk5M6e***U2KbiMNm2ui&)oV}762jKk@8WaPU-bxt8SFxscilnSO>3XG?rD5}m;r+CcQ60B^{s?3M2H6&tSe-aA)fn8myGbBtZNd%} zGqd(!i0YB;8#lKYfnp4k&I=}q{g1?I0@BdL0` zyTm&<@oA$vLc<^h&w5B<{bF_3&z>7*3e~BA3%bcAI+q(=~FJ*U1%g8+3h|2uTHe$+3WjP|vau%;h zUruwMU^3NtAM;WCQBk=_?ut%u@O`y)vB-VqIa6$iP^3*{;ya^_9vnW~7?5C#x45+Zz~)J)5J5+r-0*OZ zm1`#PYG+UMjAY4LXHUA`FLy=$)!no~ zoiR*OulK`)olPtZjf3zD@x&Ox)@DE|QhK$qD3#c6@@BwV3%Kw|84ot^Z^*}<(QGJF zUI6#zVj*W_8(LJg0|zhbh$xj-brZ9Y$NQgeOYhv-*|sUNri}=fZL4FwQ-sEHTz(<0 zd9#mTif$%Dq2^PbxRGc$ORf25NXA4(wNI0_%~7M?3qQtmL4V@I?ZEsymxjQ+)o&^W z6a3H*)|}Xgts2L=a_s{?jBd_$CD!Vc9%rA3r8Z!)+MV}}T!n0~kQsIJyHq#d=f)Kc zbD=UaMUqnMcw%yHx9+q)H7_w-dWMx{aNVWw9>bT+;Kpj9cYc3brk5+nAJ{_@^zB$L zcf;9N`;APKpcZYHLSWTCufUvi+i6bSSp&tW+gUee5`4 zZ1syk)y0EON5{UOb=6jUel1>Y!^gE<$mf=(I@^l2W*3PkiETcrs;FF+!Oi;X= zL)6E~=q%Q#dKVMn)?mNSr+Rf2#Js7uxVNXeLWmsFQnC!*+<)6#l67@%0PSL#>*YL> zpxv67a)fqh>adg+XNb>cyOIrmJFz~?*R<0~AF=q%H_Cs0ILJ4MR2#QAA+abbLF+y>4S$%x%L|Flr}5#2~4V{P9zSQ*Ha@9 zFsU>7u`|jRhN4`cl6EQpjMf(B04Bce^wyAe!K>0%eq@w&k9M5f@cDZs!qOxPmsn$W zjRK=ywc8KCf`?eeK^tAuMhe8ty;1V*>h0t=Xx(|9_pFJTTLx1d5cBdBOVu+CKiGhI zu|6lqBcF|8B(XCbzefUBeeDofq(u!$gu0fkNpVv=O)$j<2TVOk^GWo3V~1*^=R#~w zl5VR$Ws@e)iV|qYQj&P08YUIF?VEy-uAI?B7>Q`+2>8Io5ibT^ypVGXWs4IaD0Ydk z>W&hfO|fH`$t5L-08;@XiNDX)q5oO|j9%WN5(Xdd zcg=F-vg0HpfaT`Y(Yg7S@x{84#0)T=C{_>%q~MXuS(}<*k*kOYGM(a~y#^rt8muS_@O>!DdeW4VG=!Oixcw%6_DnlbbAf%UZHYi985@yw4}mn0rdr?kcL zb@r1Kl`T~kBdI?aUE1m|GLGN7iG6%uwmo|?Fz?Siea*6^3+4_IA{B?aZf;Fc`K+cC z3m!d0wiE%XJwxG@`gQ`~W$>ue4yW)kyZZdI*;7z|!1~W}sMJ_H~mw(&Ot*6%mnlDvp*P@)nrt`ki|oFF$hZzqz!q)5MXi5zayZPuMh} zbEoW#Hk^`8bh3tu_RBh6PNE}u!P&ZR*BcZqwA!x z8#dTLOVxO$J*%|JwQXFyLg+?j!}w6Ai(;t%?W?=t>ez*jNUz6TDD{Qx7Aifb6$dhj zww33Qh`EQ4W`yIx^iv?s6KyM@qSn*I`=^ghFLef5uo8W{GbM{~#`L}W2V%FJOM@2t zFYI!NyWy`5JE%j=W=oTOffA-c zsq@|Up)v=8+Re78J?X&Sm&T8p-@PP~&IQV%QD|q5!;VH#?*;$0eXP!b)}q0!+QuBw zy!XbLWgsQvKRzTHWoTlB6Gy&Q)qz9rUC$>dtTq9#N#)QaL(-rjN|)VxmyE8?OR zYju`c(k6DIny~GeDXFskGG(hbAE!x%a;tR#e6`QnNV2~S*ZL)1HjyQ~J zkIG@BqtC?~^s1|RNZKp6ArcD>v2>(!a_N3`wKe5gE;|>_J7hom$e`|a_8jA*IM#QO zME^~sPZt*#!9k=T{BE0JO-*i;$0jQCi2_#uKfMeowf&?khzBK8{E?i+SXR<_!3_=+tco*1@u0n@qxoWg0xhV^v-5>5xnY&Fht zgzG00_xCmQ_cdtKMGR=os5CW4_)g4P*zpWIqjK2zp@+krJ5t=|+Z@X2CE0!)D0=1y zouKRf%<%Lowpb)~b@XLNnHbB5dl!G;x4h$em?#yZ@Jk74Gn<_x!n`>c(}*#Z$P;Pr zf^_xr6tG9S{!=;n`u=hte5;7zUw}LEWCo)c*jr3pBK!ep3H>*m2?(U}IZ@*EF;qyX zGE}IhDx1Qf5$j=32LLPf0tHYA@nsIp;^eL3)2raCEtZ7J#kJ`U8V)kgbw>#niAsqH zQbPuCL^UKP2Zfm@+vM(YH>W{mp@JOEji#-cSDH-)tGJ}X#Dpa}`tH&>(&|>LiPnqi zv`Ro5xSH#$>)FMgnYKa&&EklP86CN5(z992-GdzTJb0WC+79j@w)Tz)W||9CY&RMx zu|5r*bv-Rt3yOYYC8TxR#zq&LPT$=&l#x2@O$V`1W!^ZF3^OcP*o-)TFSUN&SfRxX z(YK=o_4WU+Z5G^b+bq9qv3z6wza`+0tS6XZw-owoLHJL!zn|>qMc|LL-LJIoOTyn{ z{X8lCk%h*?u)a@Ge~F9keE;Qf0_Avu%EN?#|JC>3j3a%zsLHyT>Qv7 zB*n147mwd#{Vd>*tUWToFFO7n@n-{nB*Mu5CVutt|M76Y&P@OWm|p{C)4_z|>)C$* DH$YF+ diff --git a/lucene/src/test/org/apache/lucene/index/index.32.nocfs.zip b/lucene/src/test/org/apache/lucene/index/index.32.nocfs.zip index c32fbd689490697e9bcba846478a677473ec63e0..0b345da695b19aae2d67af49c7841f903934cef3 100644 GIT binary patch literal 7603 zcmc(j3pmv27sqGJ)iswPw=|*6bp|s-S%q=gOw<%YErysRX+_j7GFT-NxkiNzW!A1} zcO}^nl`$jfwsyO%jjlF}SWEf8W619}`z*8nr|19wd0xiD^ZA|gzVA8Ta}IMI3N3>W zKE4sPYknG<;>m)~!i7g@Bm6D=%}GJQVV)k+2vn`?Z8zcCyH6H@L_bF%5YyklzY@Sg zCLj<@026-3Ps0d{`LqV`TMOW0r(q;^P}F=V!ROGVcp(i2Lgd3b&+Isj#NsBqMHy;rPY>uN@&{vF05m$A(gKdgCJwezLBinek5 zr&^XTpN_}Z_zZ1-{Ve|X=#02_r`Q8p4t?}CoPS?f`nWH*LF@GoJu!H5IoBPHPgyc! z(n>j*N{s93%Yzb?Eu5r}mnQQ!($x%>t;m&P6+UDHHr$n6b<4k$Gq* zZ)HND3o$(o33vTO)@N^=fa_&oy?1@ze6DX}u~_T`gqpzv*;4hkl95=vpbS}VMGs*c z8^oz|AK)1NUv^bRF#p0NvyBtG4-lzeXeIY6CUl=CQqRcb_~A{$xFv>Us86i;1ithwss$*%EqgiwIg%Qd!MfDaE<#e;*0e% zwqLkxWOKLsxe#%Oe$6h<*TIXuesiqps?FflQ{bY^?!lgj`Ch**=qfsPax$GtV6)h| zGDGdpySp_|x+X>>LsR4GvsK(qUT3?YgKWTWy57of;a%-)Zf_G@>9|HF8lVXdnAk}< zDLWZobt4+F#i9GAgL^@qp|Y4&9`?LbO2DevxlIY*Gw-Gh%MQKuchEmeRL%)R{&X8D z1~fx|&oBY73&{6wezs@+qYi3C%XuRxYE8)))~ycS^|h8QY_jFY=k_Hj6-{`bLog=O z1b%@JK1pp9dspD&-@P@#yY%LakJ_2Zq*t_a zbV{p!;8z8OxXZ<_zkNONeE7kmgEp!WA7`}d+xop*{cr?l(06OV;rKEAW%>Gs9Y*7C zH?DCRPCa~YG;q_E5ijoH6;77U%%`VA-F$Yh_4&Nu~8cviu0hxgQgjoO>CMk4s)dUs`&!>yu}CBO803F3}pZUtL5y z|0PwH$dS_A*M!5OiD>zA2sL{Z6As=aNwsTFrq0Oyc-@M%J@S=#zM4jL1mlUNU!TuN z+qL+>VI!W4NBKHZ_Mh7nHJ4O`{&~M5v!`yij1NMZ(A9)mV@5qD#=~K|E8s+x&HZ@=5?u^mAHE_wWt}XWYYscX`IyXH1Zt!K^es9$= z!R>re;(pCxqtm!4w>yTEz0v~jT08jQP4@2U;;L%@tam7QW_oC0#Zm;<2a=hhqJ!o; zXklgw2nY-awv4#Bw2CTrC3CHeIo|^w5d_stlt)Fg=i?3Buvb9ZD`L8)8bYrUy?ZVs z6)s#nM%#1j751Ol9kse&>W_!(oHLRIC7zor6)G!6wsbjpALvvigvw|x!G{Z0czZL5MS@aPH&J zHwt&dHxcins(t`C?Kl_nQPK0qyl@i}5K)CV#-oGXx{8M*7Uz&rr3DrAaU;dU^$6ZQ zj^|5el%s^) zr>>DzpYGAGkkg2|Qm)Sv+w{EZk=MdRHRYiqvq{=1BihpfY`+-f)UKKm}9h^>6e$-3qqznt*O6l`N@yC%4pVJe#C3NRQm zoRrxjHENJmsN*X+ii=tXt;*W1vOnf_{__2r&&uMD*eu>r`*S_@zFFdhH|Y+@Gq(B8 zzN3a6`@P(<`pnG~K5Ei0kh;af@)M;BP{;zDtR|22`SbKw+~q;u7e1FhUcTk$=rZ`@gR1+xsULE+!p>Tc*N`4#_V6LXpo{$%|b+_Cw0(w}96p_dakwlp;%qd4_t8 z+j=fa;3g0o91qmS`HkMQupuAZDucE*m>k)AshFn`oavzRRLl2hz1^j;V_A9ul#pCc zt^?PwQ7ua8Zt=yIHu>YLo^eAhzhyon-IaYY7->~>?c51kvs~e%5i^DN6=U0F!);|R zf4CgcZ`-&7F2@5)L&T&w-cilQA9yV=0ZY&oXQvy@05ds#v2 zU0t69A+p4H=hx7jqyY<@?Ho@cDG~#PREK5=8^f&-kRu+s!R47pt+dA2kqjjSI+5se929$x3!P|EQhet*F zQ|7A#g>Ww{))}StNM0An$16KNKdfqdD`7V}C^6YC;SXEIgw!dbj$Y>DC)dda?R-HM zBiL;n-*A0jr<*Bb!`^kSq;_&3wURueK4kor$9BWF<6yEGq-BvC$BKB zF;S{Wy`mU$(f(4Fl9dOQy!`T7>ILeCW-Cg)!Q~6J7xZa$-pv%U&rzjbvU|yI<5POb z%-t(9S2NTQFLijo2B!QT=S(F#)3}~ zpsZfZw2#vjjAShM1Odvjz|OG1JV7!RyfKHeR^Vn>KrAI=!MkJ;OLQmtg{;}0Suz;B zPlg(14%EwkrfZgr1#d^8EHzLXOtU0*@}C1TZH6UzK*C!_s9iE3&k604JRsrSA9VAD zlMBH57Xb#{mL+jO!pn0gt5#vgE{X3dyq1QtHZB6JnaWx+AK`U0lvO&*lGrYIDGg=4m|-mx zLvv*SydH+KB0w7<^ilE{f|tf3mMDSMidYNA(41YuB4qZ#BA|BtC1QP$7_tU!67ca~ z#1OnVhT4@g%aS;T;Po(+H2_%9>R7Tn2CovKycJ;bI?EH)jQ>Q$00~4Obivnha2Wf9 I6I;Z80Avxw{r~^~ literal 12119 zcmd^_c|6qH|HnVG7-NYc%g7Q+B+Z>cxD|~ddtI{JR5VR9V=1yLU9yy94W+cmXs5-{ z^3|%Ms1&WLE9F{}x=LN`{LY7w&u4nX^tlo;}fw79~n9N|YbY;tpo%V6|weO)%?CE(EF*YsN#6-DAG2Go+j;3KG zM;iy47HDdqX@aH>+IS;5J{dF(Svd^kiW&_kRFa|xM&kk6*U_vTbu{siib0bMk6c~& z_77s+9?V{IhCg?TK`gnrNz!Stgit50k$G9OKnM1SPJ)9*>g3bA{{A&_a=ope&*a=W zqiDD9x{}?!s#`gxGI1)@TFhozONYZXm)K*WtLIA3TJ7k@E%x(u)Rs(nn!9U@7E!2s zx%=-Q7=x}aVur^U7CKV*OWDa7J6&(t>n`Mt*=+X|KYm$w*qa_1X%nh3t-Q31A0A>% zy$ErP0=L`ysPJ|0LOnw7t*4PdkM!XI2ZxO~;5RZoUlu1xcrY=0L*hv|As)7&_rYYq z`g52U5;pQGvDab<7R+SaBnx^?nXz$;HSsJ_Gh_-`p`BleiCeA4|4&x7gSl`=tS5iZ z9|t;_Fx*i@K)^^-^{KzVe>{Q6#J3Q%RUj6HklatNlxI6ZLPL1+#5L6eEI<_ygkYmZ z1cZ-7_PKT5=@MO|^7W^tU9Dk>?2tTJIXP+160eXN?Pv8D;Lp|q)B1g^3DYes?%0!j z3}f-TYV{ACcJ2OqOVzUruRru7MkBHZ0ReR=<2&)>*LS z(o_p9jvduE{qb*YiN(#^Y-t{#$ef3)fGZP`!F7PogjPCw?5Cis26@GJ&B zbvkP4wRAAlJ)jM3=ctht2~S--6tPXxsd@b^T~cbO>ZV_Cysi5f9}G#U70}iD3FA7( zv~8ywcug~CCNTIk#s`XPpQb^xB!j;~a$u^ZfeV3i47iYO_32n?@HO0s11_|fMqzW7 zk2KkWe~yV4TDAyXbB`?WL;oR1qlxNh#{XSQyFj67m{MrtK+^(E4Kz*A)Il3>O5u}1 z(~zYITUrQZRTRq7;NgtAG~5A8ljM>03g7AiOT+CwY3DZhw!}L%3m=r}9-lZ~nIcM{;wM#8jt z+jYm`aX8#eY~tPnS(PTQ@f>^U6Dt&@^In9X+g?-QxMN?Z`GruUj@>&lcO*hQhST9S zr?*<3m3+$Totb50xK{hm^PgirtQKUoX zd^`6!W(92JC{;b1pmtBCOu|0O|cEOtaN|sy!}cQqf-e^#uKsWk%Yvm8`r&viZft zin8nympJ#RIVL8G`njv=Jo!cCFBrYQr<7WgFr0-}47!Bd>^r2$wMqWk+fwqd3d?3T z-^}~68CrgKaX{0xlHx132vdrdC!TC-yR+B1_U~VvpF(j-ZLcrZ*&MB(kT<34bj8m4 z>*?N0mqmSW z$TRf^7F9iDQ7~Bqiy~d=*3b{lSZn{mu9F8|8aF5w+QzC&t zSc%6wyNyrqYmvH|>9KtA0_i-;`R0B_AC2nUop06sH4nykv>nWeuiTTUQhDd=_dm0;%3z+vq(YjFGZ0bzFD8ae>kb=RMtX&uZ+?;2gXJbPp`FP zB$Xch<;f!AKluUn6&Y$0r8MUng`Txoo`ZqH)wQXF#+{R(>GPItDLI(=aMCN^NbY?j z+LT_!3VX#~ZP_iwTR+P=oHnp#UG^;3<4XO?C~(zQlG0=3yEZ@-hB=|no@ea6x6Lu? zpRRdbf6!9vp0kR}*R`6RSr-z2{z5=%#iR9g6->%7{-xS|G$G|#?)tX@5|A;lgLml-j6`wUxf?Y8L!TU-QevTP~=U6y( zq}N!6G}Dg6#Ygk}ihos~tI+JOSQfnBJ_K*)@S-O+&bLRIqiExMUvkdn`P%muY^cdE zRVJ)b4pa?vGK*IUesov)a(7J){aSIPOHqB&QJLq*9%bBn9OM;5O(ZOwe$^x2V@Ysy z#XHl^SL%MBZ`WxZw(n}_OX?L$6W-gW?4tw5_%*`{6!PzUNl1d*JaHB9q9}kiaO|3j z#SWC@mshWhz#DRg2+ZWYrm=av{zNx$`uNm+9mI%MtItkK$8WRqHvC1 z5GObyoZ`m`9_eNT<7a`AQgVaPTD;CyjJgY84?$&B8Ub8vG8*o80>SP}uKASTsZ?rd zBDI`fpx}N6v$3`s!ld^nKrWvpUy~E?Gk)`8c^CP52fBrF80z}5Z&Wwek-}YPI0UFiNCIn}>y# zfP%yo+%UJeP)?|0U@$+?&$moV)z$KjE7!!*=0wZqTScuN_m1tj)go(`k~YSg(wBvQ zLgAA4P|Pld&|Kqjzt-1clNzsFF2tMdX{o_m8WN7CsWB@(QaJGD!eF&q2H!R)-_;5q}_sPf*PA>EObc=$>K>0Sfh+`_@wy0A_>6f;J8FrB)SZR zjBA~MB@RV?9%P9{kuKnJ1I8#8MIIky{eU9T#UkW<$%C5$h)XPrJP%A4W{E?Q^57yk zz!HNZKMt`(-mB=0KXSfe#(=m4`Tu{bAxAj@OXRq|uc0FXmgpM#zN<|I83A~V;?U5p zL6%rF)IG=&i-zWd#|s$a4`>LT%|`BtvLP18ZHu-izVB)a6bIuHhlYxWSYpu7=COjf zMBc0D)a2lN4c>H=!NvS*qEfU8LnkPalfMXt3DT9KvCtVqBuiCUz!G_o-xp+%fF-&h z(MdmKTrUJH(FKXl>meuK6C_K)eEonR(MdgITzdvtViBZ_>OfpSAV_qs4jC6`h$XTh z(K#k0>jU@(%-0W?8als(WI2N8?f^?|t1L_k~ng9R* From fa5da66404b2bc136137714be6e112105f82d379 Mon Sep 17 00:00:00 2001 From: Shai Erera Date: Wed, 18 May 2011 18:51:39 +0000 Subject: [PATCH 05/27] LUCENE-3102: add no-wrap ability to CachingCollector git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1124379 13f79535-47bb-0310-9956-ffa450edef68 --- lucene/CHANGES.txt | 2 +- .../lucene/search/CachingCollector.java | 42 ++++++++++++++++++ .../lucene/search/TestCachingCollector.java | 13 ++++++ .../lucene/search/grouping/TestGrouping.java | 43 +++++++++++++++---- 4 files changed, 91 insertions(+), 9 deletions(-) diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt index 04b52168971..a9aa96824d3 100644 --- a/lucene/CHANGES.txt +++ b/lucene/CHANGES.txt @@ -412,7 +412,7 @@ New features bytes in RAM. (Mike McCandless) * LUCENE-1421, LUCENE-3102: added CachingCollector which allow you to cache - document IDs and scores encountered during the search, and "reply" them to + document IDs and scores encountered during the search, and "replay" them to another Collector. (Mike McCandless, Shai Erera) Optimizations diff --git a/lucene/src/java/org/apache/lucene/search/CachingCollector.java b/lucene/src/java/org/apache/lucene/search/CachingCollector.java index 18ea7f95787..9aa53ce79e7 100644 --- a/lucene/src/java/org/apache/lucene/search/CachingCollector.java +++ b/lucene/src/java/org/apache/lucene/search/CachingCollector.java @@ -308,6 +308,48 @@ public abstract class CachingCollector extends Collector { protected int base; protected int lastDocBase; + /** + * Creates a {@link CachingCollector} which does not wrap another collector. + * The cached documents and scores can later be {@link #replay(Collector) + * replayed}. + * + * @param acceptDocsOutOfOrder + * whether documents are allowed to be collected out-of-order + */ + public static CachingCollector create(final boolean acceptDocsOutOfOrder, boolean cacheScores, double maxRAMMB) { + Collector other = new Collector() { + @Override + public boolean acceptsDocsOutOfOrder() { + return acceptDocsOutOfOrder; + } + + @Override + public void setScorer(Scorer scorer) throws IOException {} + + @Override + public void collect(int doc) throws IOException {} + + @Override + public void setNextReader(AtomicReaderContext context) throws IOException {} + + }; + return create(other, cacheScores, maxRAMMB); + } + + /** + * Create a new {@link CachingCollector} that wraps the given collector and + * caches documents and scores up to the specified RAM threshold. + * + * @param other + * the Collector to wrap and delegate calls to. + * @param cacheScores + * whether to cache scores in addition to document IDs. Note that + * this increases the RAM consumed per doc + * @param maxRAMMB + * the maximum RAM in MB to consume for caching the documents and + * scores. If the collector exceeds the threshold, no documents and + * scores are cached. + */ public static CachingCollector create(Collector other, boolean cacheScores, double maxRAMMB) { return cacheScores ? new ScoreCachingCollector(other, maxRAMMB) : new NoScoreCachingCollector(other, maxRAMMB); } diff --git a/lucene/src/test/org/apache/lucene/search/TestCachingCollector.java b/lucene/src/test/org/apache/lucene/search/TestCachingCollector.java index bb30d540570..e812f25d207 100755 --- a/lucene/src/test/org/apache/lucene/search/TestCachingCollector.java +++ b/lucene/src/test/org/apache/lucene/search/TestCachingCollector.java @@ -171,5 +171,18 @@ public class TestCachingCollector extends LuceneTestCase { assertFalse(cc.isCached()); } } + + public void testNoWrappedCollector() throws Exception { + for (boolean cacheScores : new boolean[] { false, true }) { + // create w/ null wrapped collector, and test that the methods work + CachingCollector cc = CachingCollector.create(true, cacheScores, 50 * ONE_BYTE); + cc.setNextReader(null); + cc.setScorer(new MockScorer()); + cc.collect(0); + + assertTrue(cc.isCached()); + cc.replay(new NoOpCollector(true)); + } + } } diff --git a/modules/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java b/modules/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java index 92f9102c656..a2eaa57b78f 100644 --- a/modules/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java +++ b/modules/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java @@ -445,27 +445,54 @@ public class TestGrouping extends LuceneTestCase { final FirstPassGroupingCollector c1 = new FirstPassGroupingCollector("group", groupSort, groupOffset+topNGroups); final CachingCollector cCache; final Collector c; + + final boolean useWrappingCollector = random.nextBoolean(); + if (doCache) { final double maxCacheMB = random.nextDouble(); if (VERBOSE) { System.out.println("TEST: maxCacheMB=" + maxCacheMB); } - if (doAllGroups) { - cCache = CachingCollector.create(c1, true, maxCacheMB); - c = MultiCollector.wrap(cCache, allGroupsCollector); + if (useWrappingCollector) { + if (doAllGroups) { + cCache = CachingCollector.create(c1, true, maxCacheMB); + c = MultiCollector.wrap(cCache, allGroupsCollector); + } else { + c = cCache = CachingCollector.create(c1, true, maxCacheMB); + } } else { - c = cCache = CachingCollector.create(c1, true, maxCacheMB); + // Collect only into cache, then replay multiple times: + c = cCache = CachingCollector.create(false, true, maxCacheMB); } - } else if (doAllGroups) { - c = MultiCollector.wrap(c1, allGroupsCollector); - cCache = null; } else { - c = c1; cCache = null; + if (doAllGroups) { + c = MultiCollector.wrap(c1, allGroupsCollector); + } else { + c = c1; + } } + s.search(new TermQuery(new Term("content", searchTerm)), c); + if (doCache && !useWrappingCollector) { + if (cCache.isCached()) { + // Replay for first-pass grouping + cCache.replay(c1); + if (doAllGroups) { + // Replay for all groups: + cCache.replay(allGroupsCollector); + } + } else { + // Replay by re-running search: + s.search(new TermQuery(new Term("content", searchTerm)), c1); + if (doAllGroups) { + s.search(new TermQuery(new Term("content", searchTerm)), allGroupsCollector); + } + } + } + final Collection topGroups = c1.getTopGroups(groupOffset, fillFields); final TopGroups groupsResult; From d987a8d48fae4f8d420370821e3db35382e08e0e Mon Sep 17 00:00:00 2001 From: Michael McCandless Date: Thu, 19 May 2011 17:23:30 +0000 Subject: [PATCH 06/27] LUCENE-3123: create 50 segs not 100 (to test bg optimize) git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1125006 13f79535-47bb-0310-9956-ffa450edef68 --- lucene/src/test/org/apache/lucene/index/TestIndexWriter.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lucene/src/test/org/apache/lucene/index/TestIndexWriter.java b/lucene/src/test/org/apache/lucene/index/TestIndexWriter.java index 17b09dfde4d..82c86ddc8ce 100644 --- a/lucene/src/test/org/apache/lucene/index/TestIndexWriter.java +++ b/lucene/src/test/org/apache/lucene/index/TestIndexWriter.java @@ -1045,11 +1045,11 @@ public class TestIndexWriter extends LuceneTestCase { newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setOpenMode(OpenMode.CREATE). setMaxBufferedDocs(2). - setMergePolicy(newLogMergePolicy(101)) + setMergePolicy(newLogMergePolicy(51)) ); Document doc = new Document(); doc.add(newField("field", "aaa", Store.YES, Index.ANALYZED, TermVector.WITH_POSITIONS_OFFSETS)); - for(int i=0;i<200;i++) + for(int i=0;i<100;i++) writer.addDocument(doc); writer.optimize(false); From 90f7f3f3b4829eb917dd13d4bf8862949a927f91 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Thu, 19 May 2011 17:37:22 +0000 Subject: [PATCH 07/27] SOLR-2531: remove some per-term waste in SimpleFacets git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1125011 13f79535-47bb-0310-9956-ffa450edef68 --- solr/src/java/org/apache/solr/request/SimpleFacets.java | 5 ----- 1 file changed, 5 deletions(-) diff --git a/solr/src/java/org/apache/solr/request/SimpleFacets.java b/solr/src/java/org/apache/solr/request/SimpleFacets.java index 37080c06461..dc81e6a5c17 100644 --- a/solr/src/java/org/apache/solr/request/SimpleFacets.java +++ b/solr/src/java/org/apache/solr/request/SimpleFacets.java @@ -656,7 +656,6 @@ public class SimpleFacets { } } - Term template = new Term(field); DocsEnum docsEnum = null; CharArr spare = new CharArr(); @@ -676,10 +675,6 @@ public class SimpleFacets { if (df >= minDfFilterCache) { // use the filter cache - // TODO: need a term query that takes a BytesRef to handle binary terms - spare.reset(); - ByteUtils.UTF8toUTF16(term, spare); - Term t = template.createTerm(spare.toString()); if (deState==null) { deState = new SolrIndexSearcher.DocsEnumState(); From dc76ad5ca25a80a78e560323661e6443ca09bc47 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Thu, 19 May 2011 21:32:15 +0000 Subject: [PATCH 08/27] CHANGES cleanup git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1125127 13f79535-47bb-0310-9956-ffa450edef68 --- lucene/CHANGES.txt | 29 ++++++++--------------------- 1 file changed, 8 insertions(+), 21 deletions(-) diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt index a9aa96824d3..dea6964f13f 100644 --- a/lucene/CHANGES.txt +++ b/lucene/CHANGES.txt @@ -444,27 +444,6 @@ Bug fixes with more document deletions is requested before a reader with fewer deletions, provided they share some segments. (yonik) -* LUCENE-2936: PhraseQuery score explanations were not correctly - identifying matches vs non-matches. (hossman) - -* LUCENE-2996: addIndexes(IndexReader) did not flush before adding the new - indexes, causing existing deletions to be applied on the incoming indexes as - well. (Shai Erera, Mike McCandless) - -* LUCENE-3068: sloppy phrase query failed to match valid documents when multiple - query terms had same position in the query. (Doron Cohen) - -Test Cases - -* LUCENE-3002: added 'tests.iter.min' to control 'tests.iter' by allowing to - stop iterating if at least 'tests.iter.min' ran and a failure occured. - (Shai Erera, Chris Hostetter) - -Build - -* LUCENE-3006: Building javadocs will fail on warnings by default. - Override with -Dfailonjavadocwarning=false (sarowe, gsingers) - ======================= Lucene 3.x (not yet released) ======================= Changes in backwards compatibility policy @@ -564,9 +543,17 @@ Bug fixes PhraseQuery as term with lower doc freq will also have less positions. (Uwe Schindler, Robert Muir, Otis Gospodnetic) +* LUCENE-3068: sloppy phrase query failed to match valid documents when multiple + query terms had same position in the query. (Doron Cohen) + * LUCENE-3012: Lucene writes the header now for separate norm files (*.sNNN) (Robert Muir) +Build + +* LUCENE-3006: Building javadocs will fail on warnings by default. + Override with -Dfailonjavadocwarning=false (sarowe, gsingers) + Test Cases * LUCENE-3002: added 'tests.iter.min' to control 'tests.iter' by allowing to From 0cce52fb922bddb68dda6158afc8bc9d3787ca3f Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Thu, 19 May 2011 21:49:42 +0000 Subject: [PATCH 09/27] CHANGES cleanup git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1125134 13f79535-47bb-0310-9956-ffa450edef68 --- lucene/CHANGES.txt | 27 ++++++++++----------------- 1 file changed, 10 insertions(+), 17 deletions(-) diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt index dea6964f13f..191d740ce70 100644 --- a/lucene/CHANGES.txt +++ b/lucene/CHANGES.txt @@ -275,10 +275,6 @@ API Changes * LUCENE-2953: In addition to changes in 3.x, PriorityQueue#initialize(int) function was moved into the ctor. (Uwe Schindler, Yonik Seeley) -* LUCENE-3061: IndexWriter's getNextMerge() and merge(OneMerge) are now public - (though @lucene.experimental), allowing for custom MergeScheduler - implementations. (Shai Erera) - New features * LUCENE-2604: Added RegexpQuery support to QueryParser. Regular expressions @@ -334,9 +330,6 @@ New features * LUCENE-2489: Added PerFieldCodecWrapper (in oal.index.codecs) which lets you set the Codec per field (Mike McCandless) -* LUCENE-1810: Added FieldSelectorResult.LATENT to not cache lazy loaded fields - (Tim Smith, Grant Ingersoll) - * LUCENE-2373: Extend CodecProvider to use SegmentInfosWriter and SegmentInfosReader to allow customization of SegmentInfos data. (Andrzej Bialecki) @@ -366,9 +359,6 @@ New features use MultiFields static methods directly, instead) if you need to use the flex APIs directly on a composite reader. (Mike McCandless) -* LUCENE-2692: Added several new SpanQuery classes for positional checking - (match is in a range, payload is a specific value) (Grant Ingersoll) - * LUCENE-2690: MultiTermQuery boolean rewrites per segment. (Uwe Schindler, Robert Muir, Mike McCandless, Simon Willnauer) @@ -401,9 +391,6 @@ New features * LUCENE-2862: Added TermsEnum.totalTermFreq() and Terms.getSumTotalTermFreq(). (Mike McCandless, Robert Muir) -* LUCENE-3001: Added TrieFieldHelper to write solr compatible numeric - fields without the solr dependency. (ryan) - * LUCENE-3003: Added new expert class oal.index.DocTermsOrd, refactored from Solr's UnInvertedField, for accessing term ords for multi-valued fields, per document. This is similar to FieldCache in @@ -411,10 +398,6 @@ New features it's able to handle multi-valued fields and does not hold the term bytes in RAM. (Mike McCandless) -* LUCENE-1421, LUCENE-3102: added CachingCollector which allow you to cache - document IDs and scores encountered during the search, and "replay" them to - another Collector. (Mike McCandless, Shai Erera) - Optimizations * LUCENE-2588: Don't store unnecessary suffixes when writing the terms @@ -499,6 +482,10 @@ New features * LUCENE-3071: Adding ReversePathHierarchyTokenizer, added skip parameter to PathHierarchyTokenizer (Olivier Favre via ryan) +* LUCENE-1421, LUCENE-3102: added CachingCollector which allow you to cache + document IDs and scores encountered during the search, and "replay" them to + another Collector. (Mike McCandless, Shai Erera) + API Changes * LUCENE-3061: IndexWriter's getNextMerge() and merge(OneMerge) are now public @@ -1098,6 +1085,12 @@ New features * LUCENE-2913: Add missing getters to Numeric* classes. (Uwe Schindler) +* LUCENE-1810: Added FieldSelectorResult.LATENT to not cache lazy loaded fields + (Tim Smith, Grant Ingersoll) + +* LUCENE-2692: Added several new SpanQuery classes for positional checking + (match is in a range, payload is a specific value) (Grant Ingersoll) + Optimizations * LUCENE-2494: Use CompletionService in ParallelMultiSearcher instead of From 4832ff03592031b0ed3899e6c64484c61ea4058a Mon Sep 17 00:00:00 2001 From: Koji Sekiguchi Date: Thu, 19 May 2011 22:41:19 +0000 Subject: [PATCH 10/27] SOLR-2528: set HtmlEncoder default off in example solrconfig.xml git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1125150 13f79535-47bb-0310-9956-ffa450edef68 --- solr/CHANGES.txt | 3 +++ solr/example/solr/conf/solrconfig.xml | 1 - 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index fcf50c1367d..74d81f56e96 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -246,6 +246,9 @@ Other Changes variance in asserting score comparisons in unit tests. (David Smiley, Chris Hostetter) +* SOLR-2528: Remove default="true" from HtmlEncoder in example solrconfig.xml, + because html encoding confuses non-ascii users. (koji) + Documentation ---------------------- diff --git a/solr/example/solr/conf/solrconfig.xml b/solr/example/solr/conf/solrconfig.xml index aebbaddc46c..594d3c80508 100755 --- a/solr/example/solr/conf/solrconfig.xml +++ b/solr/example/solr/conf/solrconfig.xml @@ -1376,7 +1376,6 @@ From 897ad478774639cdb59e691641742e821f601020 Mon Sep 17 00:00:00 2001 From: Koji Sekiguchi Date: Thu, 19 May 2011 22:50:09 +0000 Subject: [PATCH 11/27] SOLR-2528: move change log entry git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1125156 13f79535-47bb-0310-9956-ffa450edef68 --- solr/CHANGES.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index 74d81f56e96..ef172696315 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -246,9 +246,6 @@ Other Changes variance in asserting score comparisons in unit tests. (David Smiley, Chris Hostetter) -* SOLR-2528: Remove default="true" from HtmlEncoder in example solrconfig.xml, - because html encoding confuses non-ascii users. (koji) - Documentation ---------------------- @@ -343,6 +340,9 @@ Other Changes * SOLR-2105: Rename RequestHandler param 'update.processor' to 'update.chain'. (Jan Høydahl via Mark Miller) +* SOLR-2528: Remove default="true" from HtmlEncoder in example solrconfig.xml, + because html encoding confuses non-ascii users. (koji) + Build ---------------------- From ba7845ae74e456d69bfc567549435caca6e05209 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Thu, 19 May 2011 23:02:52 +0000 Subject: [PATCH 12/27] add some javadocs to the test-framework analysis components git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1125165 13f79535-47bb-0310-9956-ffa450edef68 --- .../lucene/analysis/BaseTokenStreamTestCase.java | 10 +++++++++- .../org/apache/lucene/analysis/MockAnalyzer.java | 13 +++++++++++++ .../org/apache/lucene/analysis/MockTokenizer.java | 11 ++++++++++- 3 files changed, 32 insertions(+), 2 deletions(-) diff --git a/lucene/src/test-framework/org/apache/lucene/analysis/BaseTokenStreamTestCase.java b/lucene/src/test-framework/org/apache/lucene/analysis/BaseTokenStreamTestCase.java index c5bb9f26448..382e2f42ce5 100644 --- a/lucene/src/test-framework/org/apache/lucene/analysis/BaseTokenStreamTestCase.java +++ b/lucene/src/test-framework/org/apache/lucene/analysis/BaseTokenStreamTestCase.java @@ -30,7 +30,15 @@ import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util._TestUtil; /** - * Base class for all Lucene unit tests that use TokenStreams. + * Base class for all Lucene unit tests that use TokenStreams. + *

+ * When writing unit tests for analysis components, its highly recommended + * to use the helper methods here (especially in conjunction with {@link MockAnalyzer} or + * {@link MockTokenizer}), as they contain many assertions and checks to + * catch bugs. + * + * @see MockAnalyzer + * @see MockTokenizer */ public abstract class BaseTokenStreamTestCase extends LuceneTestCase { // some helpers to test Analyzers and TokenStreams: diff --git a/lucene/src/test-framework/org/apache/lucene/analysis/MockAnalyzer.java b/lucene/src/test-framework/org/apache/lucene/analysis/MockAnalyzer.java index 3818d071f99..b94e3846733 100644 --- a/lucene/src/test-framework/org/apache/lucene/analysis/MockAnalyzer.java +++ b/lucene/src/test-framework/org/apache/lucene/analysis/MockAnalyzer.java @@ -27,6 +27,19 @@ import org.apache.lucene.util.automaton.CharacterRunAutomaton; /** * Analyzer for testing + *

+ * This analyzer is a replacement for Whitespace/Simple/KeywordAnalyzers + * for unit tests. If you are testing a custom component such as a queryparser + * or analyzer-wrapper that consumes analysis streams, its a great idea to test + * it with this analyzer instead. MockAnalyzer has the following behavior: + *

    + *
  • By default, the assertions in {@link MockTokenizer} are turned on for extra + * checks that the consumer is consuming properly. These checks can be disabled + * with {@link #setEnableChecks(boolean)}. + *
  • Payload data is randomly injected into the stream for more thorough testing + * of payloads. + *
+ * @see MockTokenizer */ public final class MockAnalyzer extends Analyzer { private final CharacterRunAutomaton runAutomaton; diff --git a/lucene/src/test-framework/org/apache/lucene/analysis/MockTokenizer.java b/lucene/src/test-framework/org/apache/lucene/analysis/MockTokenizer.java index 5b9f7a016c8..fb4c0530ee1 100644 --- a/lucene/src/test-framework/org/apache/lucene/analysis/MockTokenizer.java +++ b/lucene/src/test-framework/org/apache/lucene/analysis/MockTokenizer.java @@ -26,7 +26,16 @@ import org.apache.lucene.util.automaton.CharacterRunAutomaton; import org.apache.lucene.util.automaton.RegExp; /** - * Automaton-based tokenizer for testing. Optionally lowercases. + * Tokenizer for testing. + *

+ * This tokenizer is a replacement for {@link #WHITESPACE}, {@link #SIMPLE}, and {@link #KEYWORD} + * tokenizers. If you are writing a component such as a TokenFilter, its a great idea to test + * it wrapping this tokenizer instead for extra checks. This tokenizer has the following behavior: + *

    + *
  • An internal state-machine is used for checking consumer consistency. These checks can + * be disabled with {@link #setEnableChecks(boolean)}. + *
  • For convenience, optionally lowercases terms that it outputs. + *
*/ public class MockTokenizer extends Tokenizer { /** Acts Similar to WhitespaceTokenizer */ From cee538500e7a84ea2920c7f3e23c48a9a8061fc0 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Fri, 20 May 2011 13:33:21 +0000 Subject: [PATCH 13/27] LUCENE-3117: pull SegmentReader.Norms out of SegmentReader.java git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1125376 13f79535-47bb-0310-9956-ffa450edef68 --- .../org/apache/lucene/index/SegmentNorms.java | 240 ++++++++++++++++++ .../apache/lucene/index/SegmentReader.java | 239 +---------------- .../lucene/index/TestIndexReaderClone.java | 4 +- .../index/TestIndexReaderCloneNorms.java | 12 +- 4 files changed, 262 insertions(+), 233 deletions(-) create mode 100644 lucene/src/java/org/apache/lucene/index/SegmentNorms.java diff --git a/lucene/src/java/org/apache/lucene/index/SegmentNorms.java b/lucene/src/java/org/apache/lucene/index/SegmentNorms.java new file mode 100644 index 00000000000..64f282291d3 --- /dev/null +++ b/lucene/src/java/org/apache/lucene/index/SegmentNorms.java @@ -0,0 +1,240 @@ +package org.apache.lucene.index; + +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import java.io.IOException; +import java.util.concurrent.atomic.AtomicInteger; + +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.store.IndexOutput; + +/** + * Byte[] referencing is used because a new norm object needs + * to be created for each clone, and the byte array is all + * that is needed for sharing between cloned readers. The + * current norm referencing is for sharing between readers + * whereas the byte[] referencing is for copy on write which + * is independent of reader references (i.e. incRef, decRef). + */ + +final class SegmentNorms implements Cloneable { + int refCount = 1; + + // If this instance is a clone, the originalNorm + // references the Norm that has a real open IndexInput: + private SegmentNorms origNorm; + + private IndexInput in; + private long normSeek; + + // null until bytes is set + private AtomicInteger bytesRef; + private byte[] bytes; + private int number; + + boolean dirty; + boolean rollbackDirty; + + private final SegmentReader owner; + + public SegmentNorms(IndexInput in, int number, long normSeek, SegmentReader owner) { + this.in = in; + this.number = number; + this.normSeek = normSeek; + this.owner = owner; + } + + public synchronized void incRef() { + assert refCount > 0 && (origNorm == null || origNorm.refCount > 0); + refCount++; + } + + private void closeInput() throws IOException { + if (in != null) { + if (in != owner.singleNormStream) { + // It's private to us -- just close it + in.close(); + } else { + // We are sharing this with others -- decRef and + // maybe close the shared norm stream + if (owner.singleNormRef.decrementAndGet() == 0) { + owner.singleNormStream.close(); + owner.singleNormStream = null; + } + } + + in = null; + } + } + + public synchronized void decRef() throws IOException { + assert refCount > 0 && (origNorm == null || origNorm.refCount > 0); + + if (--refCount == 0) { + if (origNorm != null) { + origNorm.decRef(); + origNorm = null; + } else { + closeInput(); + } + + if (bytes != null) { + assert bytesRef != null; + bytesRef.decrementAndGet(); + bytes = null; + bytesRef = null; + } else { + assert bytesRef == null; + } + } + } + + // Load & cache full bytes array. Returns bytes. + public synchronized byte[] bytes() throws IOException { + assert refCount > 0 && (origNorm == null || origNorm.refCount > 0); + if (bytes == null) { // value not yet read + assert bytesRef == null; + if (origNorm != null) { + // Ask origNorm to load so that for a series of + // reopened readers we share a single read-only + // byte[] + bytes = origNorm.bytes(); + bytesRef = origNorm.bytesRef; + bytesRef.incrementAndGet(); + + // Once we've loaded the bytes we no longer need + // origNorm: + origNorm.decRef(); + origNorm = null; + + } else { + // We are the origNorm, so load the bytes for real + // ourself: + final int count = owner.maxDoc(); + bytes = new byte[count]; + + // Since we are orig, in must not be null + assert in != null; + + // Read from disk. + synchronized(in) { + in.seek(normSeek); + in.readBytes(bytes, 0, count, false); + } + + bytesRef = new AtomicInteger(1); + closeInput(); + } + } + + return bytes; + } + + // Only for testing + AtomicInteger bytesRef() { + return bytesRef; + } + + // Called if we intend to change a norm value. We make a + // private copy of bytes if it's shared with others: + public synchronized byte[] copyOnWrite() throws IOException { + assert refCount > 0 && (origNorm == null || origNorm.refCount > 0); + bytes(); + assert bytes != null; + assert bytesRef != null; + if (bytesRef.get() > 1) { + // I cannot be the origNorm for another norm + // instance if I'm being changed. Ie, only the + // "head Norm" can be changed: + assert refCount == 1; + final AtomicInteger oldRef = bytesRef; + bytes = owner.cloneNormBytes(bytes); + bytesRef = new AtomicInteger(1); + oldRef.decrementAndGet(); + } + dirty = true; + return bytes; + } + + // Returns a copy of this Norm instance that shares + // IndexInput & bytes with the original one + @Override + public synchronized Object clone() { + assert refCount > 0 && (origNorm == null || origNorm.refCount > 0); + + SegmentNorms clone; + try { + clone = (SegmentNorms) super.clone(); + } catch (CloneNotSupportedException cnse) { + // Cannot happen + throw new RuntimeException("unexpected CloneNotSupportedException", cnse); + } + clone.refCount = 1; + + if (bytes != null) { + assert bytesRef != null; + assert origNorm == null; + + // Clone holds a reference to my bytes: + clone.bytesRef.incrementAndGet(); + } else { + assert bytesRef == null; + if (origNorm == null) { + // I become the origNorm for the clone: + clone.origNorm = this; + } + clone.origNorm.incRef(); + } + + // Only the origNorm will actually readBytes from in: + clone.in = null; + + return clone; + } + + // Flush all pending changes to the next generation + // separate norms file. + public void reWrite(SegmentInfo si) throws IOException { + assert refCount > 0 && (origNorm == null || origNorm.refCount > 0): "refCount=" + refCount + " origNorm=" + origNorm; + + // NOTE: norms are re-written in regular directory, not cfs + si.advanceNormGen(this.number); + final String normFileName = si.getNormFileName(this.number); + IndexOutput out = owner.directory().createOutput(normFileName); + boolean success = false; + try { + try { + out.writeBytes(SegmentMerger.NORMS_HEADER, 0, SegmentMerger.NORMS_HEADER.length); + out.writeBytes(bytes, owner.maxDoc()); + } finally { + out.close(); + } + success = true; + } finally { + if (!success) { + try { + owner.directory().deleteFile(normFileName); + } catch (Throwable t) { + // suppress this so we keep throwing the + // original exception + } + } + } + this.dirty = false; + } +} diff --git a/lucene/src/java/org/apache/lucene/index/SegmentReader.java b/lucene/src/java/org/apache/lucene/index/SegmentReader.java index ec045c416f5..1bfe13b8a88 100644 --- a/lucene/src/java/org/apache/lucene/index/SegmentReader.java +++ b/lucene/src/java/org/apache/lucene/index/SegmentReader.java @@ -33,7 +33,6 @@ import org.apache.lucene.index.codecs.FieldsProducer; import org.apache.lucene.store.BufferedIndexInput; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IndexInput; -import org.apache.lucene.store.IndexOutput; import org.apache.lucene.util.BitVector; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; @@ -68,8 +67,8 @@ public class SegmentReader extends IndexReader implements Cloneable { private int rollbackPendingDeleteCount; // optionally used for the .nrm file shared by multiple norms - private IndexInput singleNormStream; - private AtomicInteger singleNormRef; + IndexInput singleNormStream; + AtomicInteger singleNormRef; CoreReaders core; @@ -250,219 +249,9 @@ public class SegmentReader extends IndexReader implements Cloneable { } } - /** - * Byte[] referencing is used because a new norm object needs - * to be created for each clone, and the byte array is all - * that is needed for sharing between cloned readers. The - * current norm referencing is for sharing between readers - * whereas the byte[] referencing is for copy on write which - * is independent of reader references (i.e. incRef, decRef). - */ - final class Norm implements Cloneable { - private int refCount = 1; - // If this instance is a clone, the originalNorm - // references the Norm that has a real open IndexInput: - private Norm origNorm; - - private IndexInput in; - private long normSeek; - - // null until bytes is set - private AtomicInteger bytesRef; - private byte[] bytes; - private boolean dirty; - private int number; - private boolean rollbackDirty; - - public Norm(IndexInput in, int number, long normSeek) { - this.in = in; - this.number = number; - this.normSeek = normSeek; - } - - public synchronized void incRef() { - assert refCount > 0 && (origNorm == null || origNorm.refCount > 0); - refCount++; - } - - private void closeInput() throws IOException { - if (in != null) { - if (in != singleNormStream) { - // It's private to us -- just close it - in.close(); - } else { - // We are sharing this with others -- decRef and - // maybe close the shared norm stream - if (singleNormRef.decrementAndGet() == 0) { - singleNormStream.close(); - singleNormStream = null; - } - } - - in = null; - } - } - - public synchronized void decRef() throws IOException { - assert refCount > 0 && (origNorm == null || origNorm.refCount > 0); - - if (--refCount == 0) { - if (origNorm != null) { - origNorm.decRef(); - origNorm = null; - } else { - closeInput(); - } - - if (bytes != null) { - assert bytesRef != null; - bytesRef.decrementAndGet(); - bytes = null; - bytesRef = null; - } else { - assert bytesRef == null; - } - } - } - - // Load & cache full bytes array. Returns bytes. - public synchronized byte[] bytes() throws IOException { - assert refCount > 0 && (origNorm == null || origNorm.refCount > 0); - if (bytes == null) { // value not yet read - assert bytesRef == null; - if (origNorm != null) { - // Ask origNorm to load so that for a series of - // reopened readers we share a single read-only - // byte[] - bytes = origNorm.bytes(); - bytesRef = origNorm.bytesRef; - bytesRef.incrementAndGet(); - - // Once we've loaded the bytes we no longer need - // origNorm: - origNorm.decRef(); - origNorm = null; - - } else { - // We are the origNorm, so load the bytes for real - // ourself: - final int count = maxDoc(); - bytes = new byte[count]; - - // Since we are orig, in must not be null - assert in != null; - - // Read from disk. - synchronized(in) { - in.seek(normSeek); - in.readBytes(bytes, 0, count, false); - } - - bytesRef = new AtomicInteger(1); - closeInput(); - } - } - - return bytes; - } - - // Only for testing - AtomicInteger bytesRef() { - return bytesRef; - } - - // Called if we intend to change a norm value. We make a - // private copy of bytes if it's shared with others: - public synchronized byte[] copyOnWrite() throws IOException { - assert refCount > 0 && (origNorm == null || origNorm.refCount > 0); - bytes(); - assert bytes != null; - assert bytesRef != null; - if (bytesRef.get() > 1) { - // I cannot be the origNorm for another norm - // instance if I'm being changed. Ie, only the - // "head Norm" can be changed: - assert refCount == 1; - final AtomicInteger oldRef = bytesRef; - bytes = cloneNormBytes(bytes); - bytesRef = new AtomicInteger(1); - oldRef.decrementAndGet(); - } - dirty = true; - return bytes; - } - - // Returns a copy of this Norm instance that shares - // IndexInput & bytes with the original one - @Override - public synchronized Object clone() { - assert refCount > 0 && (origNorm == null || origNorm.refCount > 0); - - Norm clone; - try { - clone = (Norm) super.clone(); - } catch (CloneNotSupportedException cnse) { - // Cannot happen - throw new RuntimeException("unexpected CloneNotSupportedException", cnse); - } - clone.refCount = 1; - - if (bytes != null) { - assert bytesRef != null; - assert origNorm == null; - - // Clone holds a reference to my bytes: - clone.bytesRef.incrementAndGet(); - } else { - assert bytesRef == null; - if (origNorm == null) { - // I become the origNorm for the clone: - clone.origNorm = this; - } - clone.origNorm.incRef(); - } - - // Only the origNorm will actually readBytes from in: - clone.in = null; - - return clone; - } - - // Flush all pending changes to the next generation - // separate norms file. - public void reWrite(SegmentInfo si) throws IOException { - assert refCount > 0 && (origNorm == null || origNorm.refCount > 0): "refCount=" + refCount + " origNorm=" + origNorm; - - // NOTE: norms are re-written in regular directory, not cfs - si.advanceNormGen(this.number); - final String normFileName = si.getNormFileName(this.number); - IndexOutput out = directory().createOutput(normFileName); - boolean success = false; - try { - try { - out.writeBytes(SegmentMerger.NORMS_HEADER, 0, SegmentMerger.NORMS_HEADER.length); - out.writeBytes(bytes, maxDoc()); - } finally { - out.close(); - } - success = true; - } finally { - if (!success) { - try { - directory().deleteFile(normFileName); - } catch (Throwable t) { - // suppress this so we keep throwing the - // original exception - } - } - } - this.dirty = false; - } - } - - Map norms = new HashMap(); + Map norms = new HashMap(); /** * @throws CorruptIndexException if the index is corrupt @@ -660,16 +449,16 @@ public class SegmentReader extends IndexReader implements Cloneable { } } - clone.norms = new HashMap(); + clone.norms = new HashMap(); // Clone norms for (FieldInfo fi : core.fieldInfos) { // Clone unchanged norms to the cloned reader if (doClone || !fieldNormsChanged.contains(fi.number)) { final String curField = fi.name; - Norm norm = this.norms.get(curField); + SegmentNorms norm = this.norms.get(curField); if (norm != null) - clone.norms.put(curField, (Norm) norm.clone()); + clone.norms.put(curField, (SegmentNorms) norm.clone()); } } @@ -739,7 +528,7 @@ public class SegmentReader extends IndexReader implements Cloneable { if (normsDirty) { // re-write norms si.initNormGen(); - for (final Norm norm : norms.values()) { + for (final SegmentNorms norm : norms.values()) { if (norm.dirty) { norm.reWrite(si); } @@ -765,7 +554,7 @@ public class SegmentReader extends IndexReader implements Cloneable { deletedDocs = null; } - for (final Norm norm : norms.values()) { + for (final SegmentNorms norm : norms.values()) { norm.decRef(); } if (core != null) { @@ -935,7 +724,7 @@ public class SegmentReader extends IndexReader implements Cloneable { @Override public byte[] norms(String field) throws IOException { ensureOpen(); - final Norm norm = norms.get(field); + final SegmentNorms norm = norms.get(field); if (norm == null) { // not indexed, or norms not stored return null; @@ -946,7 +735,7 @@ public class SegmentReader extends IndexReader implements Cloneable { @Override protected void doSetNorm(int doc, String field, byte value) throws IOException { - Norm norm = norms.get(field); + SegmentNorms norm = norms.get(field); if (norm == null) // not an indexed field return; @@ -1004,7 +793,7 @@ public class SegmentReader extends IndexReader implements Cloneable { } } - norms.put(fi.name, new Norm(normInput, fi.number, normSeek)); + norms.put(fi.name, new SegmentNorms(normInput, fi.number, normSeek, this)); nextNormSeek += maxDoc; // increment also if some norms are separate } } @@ -1024,7 +813,7 @@ public class SegmentReader extends IndexReader implements Cloneable { if (singleNormStream != null) { return false; } - for (final Norm norm : norms.values()) { + for (final SegmentNorms norm : norms.values()) { if (norm.refCount > 0) { return false; } @@ -1171,7 +960,7 @@ public class SegmentReader extends IndexReader implements Cloneable { rollbackDeletedDocsDirty = deletedDocsDirty; rollbackNormsDirty = normsDirty; rollbackPendingDeleteCount = pendingDeleteCount; - for (Norm norm : norms.values()) { + for (SegmentNorms norm : norms.values()) { norm.rollbackDirty = norm.dirty; } } @@ -1182,7 +971,7 @@ public class SegmentReader extends IndexReader implements Cloneable { deletedDocsDirty = rollbackDeletedDocsDirty; normsDirty = rollbackNormsDirty; pendingDeleteCount = rollbackPendingDeleteCount; - for (Norm norm : norms.values()) { + for (SegmentNorms norm : norms.values()) { norm.dirty = norm.rollbackDirty; } } diff --git a/lucene/src/test/org/apache/lucene/index/TestIndexReaderClone.java b/lucene/src/test/org/apache/lucene/index/TestIndexReaderClone.java index c26dd34aa92..87e866829b3 100644 --- a/lucene/src/test/org/apache/lucene/index/TestIndexReaderClone.java +++ b/lucene/src/test/org/apache/lucene/index/TestIndexReaderClone.java @@ -17,7 +17,7 @@ package org.apache.lucene.index; * limitations under the License. */ -import org.apache.lucene.index.SegmentReader.Norm; +import org.apache.lucene.index.SegmentNorms; import org.apache.lucene.search.DefaultSimilarity; import org.apache.lucene.search.Similarity; import org.apache.lucene.analysis.MockAnalyzer; @@ -338,7 +338,7 @@ public class TestIndexReaderClone extends LuceneTestCase { origSegmentReader.close(); assertDelDocsRefCountEquals(1, origSegmentReader); // check the norm refs - Norm norm = clonedSegmentReader.norms.get("field1"); + SegmentNorms norm = clonedSegmentReader.norms.get("field1"); assertEquals(1, norm.bytesRef().get()); clonedSegmentReader.close(); dir1.close(); diff --git a/lucene/src/test/org/apache/lucene/index/TestIndexReaderCloneNorms.java b/lucene/src/test/org/apache/lucene/index/TestIndexReaderCloneNorms.java index 34064dd9f28..32cef3ea5db 100644 --- a/lucene/src/test/org/apache/lucene/index/TestIndexReaderCloneNorms.java +++ b/lucene/src/test/org/apache/lucene/index/TestIndexReaderCloneNorms.java @@ -29,7 +29,7 @@ import org.apache.lucene.document.Field; import org.apache.lucene.document.Field.Index; import org.apache.lucene.document.Field.Store; import org.apache.lucene.index.IndexWriterConfig.OpenMode; -import org.apache.lucene.index.SegmentReader.Norm; +import org.apache.lucene.index.SegmentNorms; import org.apache.lucene.search.DefaultSimilarity; import org.apache.lucene.search.DefaultSimilarityProvider; import org.apache.lucene.search.Similarity; @@ -184,7 +184,7 @@ public class TestIndexReaderCloneNorms extends LuceneTestCase { TestIndexReaderReopen.createIndex(random, dir1, false); SegmentReader reader1 = getOnlySegmentReader(IndexReader.open(dir1, false)); reader1.norms("field1"); - Norm r1norm = reader1.norms.get("field1"); + SegmentNorms r1norm = reader1.norms.get("field1"); AtomicInteger r1BytesRef = r1norm.bytesRef(); SegmentReader reader2 = (SegmentReader)reader1.clone(); assertEquals(2, r1norm.bytesRef().get()); @@ -203,14 +203,14 @@ public class TestIndexReaderCloneNorms extends LuceneTestCase { IndexReader reader2C = (IndexReader) reader1.clone(); SegmentReader segmentReader2C = getOnlySegmentReader(reader2C); segmentReader2C.norms("field1"); // load the norms for the field - Norm reader2CNorm = segmentReader2C.norms.get("field1"); + SegmentNorms reader2CNorm = segmentReader2C.norms.get("field1"); assertTrue("reader2CNorm.bytesRef()=" + reader2CNorm.bytesRef(), reader2CNorm.bytesRef().get() == 2); IndexReader reader3C = (IndexReader) reader2C.clone(); SegmentReader segmentReader3C = getOnlySegmentReader(reader3C); - Norm reader3CCNorm = segmentReader3C.norms.get("field1"); + SegmentNorms reader3CCNorm = segmentReader3C.norms.get("field1"); assertEquals(3, reader3CCNorm.bytesRef().get()); // edit a norm and the refcount should be 1 @@ -231,13 +231,13 @@ public class TestIndexReaderCloneNorms extends LuceneTestCase { // norm values should be different assertTrue(sim.decodeNormValue(segmentReader3C.norms("field1")[5]) != sim.decodeNormValue(segmentReader4C.norms("field1")[5])); - Norm reader4CCNorm = segmentReader4C.norms.get("field1"); + SegmentNorms reader4CCNorm = segmentReader4C.norms.get("field1"); assertEquals(3, reader3CCNorm.bytesRef().get()); assertEquals(1, reader4CCNorm.bytesRef().get()); IndexReader reader5C = (IndexReader) reader4C.clone(); SegmentReader segmentReader5C = getOnlySegmentReader(reader5C); - Norm reader5CCNorm = segmentReader5C.norms.get("field1"); + SegmentNorms reader5CCNorm = segmentReader5C.norms.get("field1"); reader5C.setNorm(5, "field1", sim.encodeNormValue(0.7f)); assertEquals(1, reader5CCNorm.bytesRef().get()); From 4a0057d6aba73f1a88329c28b710323045575e01 Mon Sep 17 00:00:00 2001 From: Ryan McKinley Date: Fri, 20 May 2011 18:34:18 +0000 Subject: [PATCH 14/27] SOLR-2513: remove final from XMLWriter git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1125497 13f79535-47bb-0310-9956-ffa450edef68 --- .../java/org/apache/solr/response/XMLWriter.java | 15 +++++---------- 1 file changed, 5 insertions(+), 10 deletions(-) diff --git a/solr/src/java/org/apache/solr/response/XMLWriter.java b/solr/src/java/org/apache/solr/response/XMLWriter.java index a2a7fb92e88..8bc74a947c7 100644 --- a/solr/src/java/org/apache/solr/response/XMLWriter.java +++ b/solr/src/java/org/apache/solr/response/XMLWriter.java @@ -19,9 +19,7 @@ package org.apache.solr.response; import java.io.IOException; import java.io.Writer; -import java.util.ArrayList; import java.util.Arrays; -import java.util.Collection; import java.util.Iterator; import java.util.Map; import java.util.Set; @@ -32,11 +30,13 @@ import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.XML; import org.apache.solr.request.SolrQueryRequest; -import org.apache.solr.schema.SchemaField; import org.apache.solr.search.ReturnFields; -public final class XMLWriter extends TextResponseWriter { +/** + * @lucene.internal + */ +public class XMLWriter extends TextResponseWriter { public static float CURRENT_VERSION=2.2f; @@ -54,13 +54,8 @@ public final class XMLWriter extends TextResponseWriter { private static final char[] XML_START2_NOSCHEMA=("\n").toCharArray(); - private boolean defaultIndent=false; final int version; - // temporary working objects... - // be careful not to use these recursively... - private final ArrayList tlst = new ArrayList(); - public static void writeResponse(Writer writer, SolrQueryRequest req, SolrQueryResponse rsp) throws IOException { XMLWriter xmlWriter = null; try { @@ -106,7 +101,7 @@ public final class XMLWriter extends TextResponseWriter { writer.write(XML_START2_NOSCHEMA); // dump response values - NamedList lst = rsp.getValues(); + NamedList lst = rsp.getValues(); Boolean omitHeader = req.getParams().getBool(CommonParams.OMIT_HEADER); if(omitHeader != null && omitHeader) lst.remove("responseHeader"); int sz = lst.size(); From a49fccc4b38f14caf5391b70c54cfc071fe8e008 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stanis=C5=82aw=20Osi=C5=84ski?= Date: Fri, 20 May 2011 19:39:42 +0000 Subject: [PATCH 15/27] SOLR-2448: Updating SimpleXML JAR to version 2.4.1 for consistency with the Maven dependency declaration git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1125526 13f79535-47bb-0310-9956-ffa450edef68 --- dev-tools/eclipse/dot.classpath | 2 +- solr/contrib/clustering/lib/simple-xml-2.3.5.jar | 2 -- solr/contrib/clustering/lib/simple-xml-2.4.1.jar | 2 ++ 3 files changed, 3 insertions(+), 3 deletions(-) delete mode 100644 solr/contrib/clustering/lib/simple-xml-2.3.5.jar create mode 100644 solr/contrib/clustering/lib/simple-xml-2.4.1.jar diff --git a/dev-tools/eclipse/dot.classpath b/dev-tools/eclipse/dot.classpath index 02598acf722..b6c03e55227 100644 --- a/dev-tools/eclipse/dot.classpath +++ b/dev-tools/eclipse/dot.classpath @@ -132,7 +132,7 @@ - + diff --git a/solr/contrib/clustering/lib/simple-xml-2.3.5.jar b/solr/contrib/clustering/lib/simple-xml-2.3.5.jar deleted file mode 100644 index 6abdb9ca750..00000000000 --- a/solr/contrib/clustering/lib/simple-xml-2.3.5.jar +++ /dev/null @@ -1,2 +0,0 @@ -AnyObjectId[f668bc86b1d42264758b54a0395e49eb564dfd27] was removed in git history. -Apache SVN contains full history. \ No newline at end of file diff --git a/solr/contrib/clustering/lib/simple-xml-2.4.1.jar b/solr/contrib/clustering/lib/simple-xml-2.4.1.jar new file mode 100644 index 00000000000..b1d2b1778e8 --- /dev/null +++ b/solr/contrib/clustering/lib/simple-xml-2.4.1.jar @@ -0,0 +1,2 @@ +AnyObjectId[d6fe35161bb0bf31fe8f90a20fb58181f102bae1] was removed in git history. +Apache SVN contains full history. \ No newline at end of file From 91c74bb6e70d53a18cdd46c824e0805ac5e0e482 Mon Sep 17 00:00:00 2001 From: Steven Rowe Date: Fri, 20 May 2011 20:55:31 +0000 Subject: [PATCH 16/27] SOLR-2421: remove unnecessary dependency git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1125567 13f79535-47bb-0310-9956-ffa450edef68 --- dev-tools/eclipse/dot.classpath | 1 - dev-tools/maven/pom.xml.template | 5 - .../dataimporthandler/src/pom.xml.template | 4 - dev-tools/maven/solr/src/pom.xml.template | 4 - .../maven/solr/src/solrj/pom.xml.template | 4 - solr/lib/geronimo-stax-LICENSE-ASL.txt | 203 ------------------ solr/lib/geronimo-stax-NOTICE.txt | 6 - solr/lib/geronimo-stax-api_1.0_spec-1.0.1.jar | 2 - 8 files changed, 229 deletions(-) delete mode 100644 solr/lib/geronimo-stax-LICENSE-ASL.txt delete mode 100644 solr/lib/geronimo-stax-NOTICE.txt delete mode 100755 solr/lib/geronimo-stax-api_1.0_spec-1.0.1.jar diff --git a/dev-tools/eclipse/dot.classpath b/dev-tools/eclipse/dot.classpath index b6c03e55227..d060e1455e6 100644 --- a/dev-tools/eclipse/dot.classpath +++ b/dev-tools/eclipse/dot.classpath @@ -107,7 +107,6 @@ - diff --git a/dev-tools/maven/pom.xml.template b/dev-tools/maven/pom.xml.template index 4d20e23c3be..f2d98038cc3 100644 --- a/dev-tools/maven/pom.xml.template +++ b/dev-tools/maven/pom.xml.template @@ -230,11 +230,6 @@ commons-compress 1.1 - - org.apache.geronimo.specs - geronimo-stax-api_1.0_spec - 1.0.1 - org.apache.solr solr-commons-csv diff --git a/dev-tools/maven/solr/contrib/dataimporthandler/src/pom.xml.template b/dev-tools/maven/solr/contrib/dataimporthandler/src/pom.xml.template index 302ac33827d..9241a01c980 100644 --- a/dev-tools/maven/solr/contrib/dataimporthandler/src/pom.xml.template +++ b/dev-tools/maven/solr/contrib/dataimporthandler/src/pom.xml.template @@ -69,10 +69,6 @@ servlet-api provided - - org.apache.geronimo.specs - geronimo-stax-api_1.0_spec - commons-io commons-io diff --git a/dev-tools/maven/solr/src/pom.xml.template b/dev-tools/maven/solr/src/pom.xml.template index b659a01383c..7a937a4b9b5 100644 --- a/dev-tools/maven/solr/src/pom.xml.template +++ b/dev-tools/maven/solr/src/pom.xml.template @@ -96,10 +96,6 @@ org.apache.solr solr-commons-csv - - org.apache.geronimo.specs - geronimo-stax-api_1.0_spec - commons-codec commons-codec diff --git a/dev-tools/maven/solr/src/solrj/pom.xml.template b/dev-tools/maven/solr/src/solrj/pom.xml.template index 3ae76473707..4f8d796f664 100644 --- a/dev-tools/maven/solr/src/solrj/pom.xml.template +++ b/dev-tools/maven/solr/src/solrj/pom.xml.template @@ -48,10 +48,6 @@ ${project.version} test - - org.apache.geronimo.specs - geronimo-stax-api_1.0_spec - org.apache.zookeeper zookeeper diff --git a/solr/lib/geronimo-stax-LICENSE-ASL.txt b/solr/lib/geronimo-stax-LICENSE-ASL.txt deleted file mode 100644 index 6b0b1270ff0..00000000000 --- a/solr/lib/geronimo-stax-LICENSE-ASL.txt +++ /dev/null @@ -1,203 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - diff --git a/solr/lib/geronimo-stax-NOTICE.txt b/solr/lib/geronimo-stax-NOTICE.txt deleted file mode 100644 index 3b4090d7e10..00000000000 --- a/solr/lib/geronimo-stax-NOTICE.txt +++ /dev/null @@ -1,6 +0,0 @@ -Apache Geronimo -Copyright 2003-2006 The Apache Software Foundation - -This product includes software developed by -The Apache Software Foundation (http://www.apache.org/). - diff --git a/solr/lib/geronimo-stax-api_1.0_spec-1.0.1.jar b/solr/lib/geronimo-stax-api_1.0_spec-1.0.1.jar deleted file mode 100755 index 77da03b5bd6..00000000000 --- a/solr/lib/geronimo-stax-api_1.0_spec-1.0.1.jar +++ /dev/null @@ -1,2 +0,0 @@ -AnyObjectId[ab1ee3ba605df11b3075677c808d092845dad123] was removed in git history. -Apache SVN contains full history. \ No newline at end of file From dbf5ab4af2013fd93284d3d61ed70c53497e4a8a Mon Sep 17 00:00:00 2001 From: Michael McCandless Date: Sat, 21 May 2011 14:21:31 +0000 Subject: [PATCH 17/27] test-only fixes: MockRandomCodec sometimes never picked an index term; TestNRTThreads creates way too many segments git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1125711 13f79535-47bb-0310-9956-ffa450edef68 --- .../index/codecs/mockrandom/MockRandomCodec.java | 2 +- .../org/apache/lucene/index/TestNRTThreads.java | 15 +++++++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/lucene/src/test-framework/org/apache/lucene/index/codecs/mockrandom/MockRandomCodec.java b/lucene/src/test-framework/org/apache/lucene/index/codecs/mockrandom/MockRandomCodec.java index 2fe6154bcf7..90883229eb4 100644 --- a/lucene/src/test-framework/org/apache/lucene/index/codecs/mockrandom/MockRandomCodec.java +++ b/lucene/src/test-framework/org/apache/lucene/index/codecs/mockrandom/MockRandomCodec.java @@ -196,7 +196,7 @@ public class MockRandomCodec extends Codec { @Override public boolean isIndexTerm(BytesRef term, TermStats stats) { - return rand.nextInt(gap) == 17; + return rand.nextInt(gap) == gap/2; } @Override diff --git a/lucene/src/test/org/apache/lucene/index/TestNRTThreads.java b/lucene/src/test/org/apache/lucene/index/TestNRTThreads.java index 587f60545ce..5f215a2d595 100644 --- a/lucene/src/test/org/apache/lucene/index/TestNRTThreads.java +++ b/lucene/src/test/org/apache/lucene/index/TestNRTThreads.java @@ -71,6 +71,21 @@ public class TestNRTThreads extends LuceneTestCase { final File tempDir = _TestUtil.getTempDir("nrtopenfiles"); final MockDirectoryWrapper dir = new MockDirectoryWrapper(random, FSDirectory.open(tempDir)); final IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)); + + if (LuceneTestCase.TEST_NIGHTLY) { + // newIWConfig makes smallish max seg size, which + // results in tons and tons of segments for this test + // when run nightly: + MergePolicy mp = conf.getMergePolicy(); + if (mp instanceof TieredMergePolicy) { + ((TieredMergePolicy) mp).setMaxMergedSegmentMB(5000.); + } else if (mp instanceof LogByteSizeMergePolicy) { + ((LogByteSizeMergePolicy) mp).setMaxMergeMB(1000.); + } else if (mp instanceof LogMergePolicy) { + ((LogMergePolicy) mp).setMaxMergeDocs(100000); + } + } + conf.setMergedSegmentWarmer(new IndexWriter.IndexReaderWarmer() { @Override public void warm(IndexReader reader) throws IOException { From 886e17c842fc898e8565b8281b096fe759657bfa Mon Sep 17 00:00:00 2001 From: Doron Cohen Date: Sun, 22 May 2011 12:00:42 +0000 Subject: [PATCH 18/27] SOLR-2500: TestSolrProperties sometimes fails with "no such core: core0" git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1125932 13f79535-47bb-0310-9956-ffa450edef68 --- .../solrj/embedded/TestSolrProperties.java | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/solr/src/test/org/apache/solr/client/solrj/embedded/TestSolrProperties.java b/solr/src/test/org/apache/solr/client/solrj/embedded/TestSolrProperties.java index cf3eff2f19a..2f08b1de0f7 100644 --- a/solr/src/test/org/apache/solr/client/solrj/embedded/TestSolrProperties.java +++ b/solr/src/test/org/apache/solr/client/solrj/embedded/TestSolrProperties.java @@ -37,6 +37,7 @@ import org.apache.solr.client.solrj.request.UpdateRequest; import org.apache.solr.client.solrj.request.UpdateRequest.ACTION; import org.apache.solr.client.solrj.response.CoreAdminResponse; import org.apache.solr.common.SolrInputDocument; +import org.apache.solr.common.util.FileUtils; import org.apache.solr.core.CoreContainer; import org.apache.solr.util.AbstractSolrTestCase; import org.junit.After; @@ -54,6 +55,7 @@ import org.w3c.dom.Node; public class TestSolrProperties extends LuceneTestCase { protected static Logger log = LoggerFactory.getLogger(TestSolrProperties.class); protected CoreContainer cores = null; + private File home; private File solrXml; private static final XPathFactory xpathFactory = XPathFactory.newInstance(); @@ -62,21 +64,27 @@ public class TestSolrProperties extends LuceneTestCase { return "solr/shared"; } - public String getSolrXml() { + public String getOrigSolrXml() { return "solr.xml"; } + public String getSolrXml() { + return "test-solr.xml"; + } + @Override @Before public void setUp() throws Exception { super.setUp(); System.setProperty("solr.solr.home", getSolrHome()); - File home = SolrTestCaseJ4.getFile(getSolrHome()); + home = SolrTestCaseJ4.getFile(getSolrHome()); System.setProperty("solr.solr.home", home.getAbsolutePath()); log.info("pwd: " + (new File(".")).getAbsolutePath()); + File origSolrXml = new File(home, getOrigSolrXml()); solrXml = new File(home, getSolrXml()); + FileUtils.copyFile(origSolrXml, solrXml); cores = new CoreContainer(home.getAbsolutePath(), solrXml); } @@ -85,7 +93,7 @@ public class TestSolrProperties extends LuceneTestCase { public void tearDown() throws Exception { if (cores != null) cores.shutdown(); - File dataDir = new File(getSolrHome() + "/data"); + File dataDir = new File(home,"data"); String skip = System.getProperty("solr.test.leavedatadir"); if (null != skip && 0 != skip.trim().length()) { log.info("NOTE: per solr.test.leavedatadir, dataDir will not be removed: " + dataDir.getAbsolutePath()); @@ -94,8 +102,9 @@ public class TestSolrProperties extends LuceneTestCase { log.warn("!!!! WARNING: best effort to remove " + dataDir.getAbsolutePath() + " FAILED !!!!!"); } } - File persistedFile = new File(getSolrHome() + File.separator + "solr-persist.xml"); - persistedFile.delete(); + File persistedFile = new File(home,"solr-persist.xml"); + assertTrue("Failed to delete "+persistedFile, persistedFile.delete()); + assertTrue("Failed to delete "+solrXml, solrXml.delete()); super.tearDown(); } From 3ec7abd684e257bc61292cb24b52996da0abe292 Mon Sep 17 00:00:00 2001 From: Michael McCandless Date: Sun, 22 May 2011 14:18:55 +0000 Subject: [PATCH 19/27] allow MockTokenizer to take max token length; default to MAX_INT (= no change) git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1125972 13f79535-47bb-0310-9956-ffa450edef68 --- .../apache/lucene/analysis/MockTokenizer.java | 19 +++++++++++++------ .../org/apache/lucene/analysis/TestToken.java | 2 +- 2 files changed, 14 insertions(+), 7 deletions(-) diff --git a/lucene/src/test-framework/org/apache/lucene/analysis/MockTokenizer.java b/lucene/src/test-framework/org/apache/lucene/analysis/MockTokenizer.java index fb4c0530ee1..37e0b36cda0 100644 --- a/lucene/src/test-framework/org/apache/lucene/analysis/MockTokenizer.java +++ b/lucene/src/test-framework/org/apache/lucene/analysis/MockTokenizer.java @@ -22,6 +22,7 @@ import java.io.Reader; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; +import org.apache.lucene.util.AttributeSource.AttributeFactory; import org.apache.lucene.util.automaton.CharacterRunAutomaton; import org.apache.lucene.util.automaton.RegExp; @@ -53,6 +54,8 @@ public class MockTokenizer extends Tokenizer { private final CharacterRunAutomaton runAutomaton; private final boolean lowerCase; + private final int maxTokenLength; + public static final int DEFAULT_MAX_TOKEN_LENGTH = Integer.MAX_VALUE; private int state; private final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class); @@ -74,20 +77,21 @@ public class MockTokenizer extends Tokenizer { private State streamState = State.CLOSE; private boolean enableChecks = true; - public MockTokenizer(AttributeFactory factory, Reader input, CharacterRunAutomaton runAutomaton, boolean lowerCase) { + public MockTokenizer(AttributeFactory factory, Reader input, CharacterRunAutomaton runAutomaton, boolean lowerCase, int maxTokenLength) { super(factory, input); this.runAutomaton = runAutomaton; this.lowerCase = lowerCase; this.state = runAutomaton.getInitialState(); this.streamState = State.SETREADER; + this.maxTokenLength = maxTokenLength; + } + + public MockTokenizer(Reader input, CharacterRunAutomaton runAutomaton, boolean lowerCase, int maxTokenLength) { + this(AttributeFactory.DEFAULT_ATTRIBUTE_FACTORY, input, runAutomaton, lowerCase, maxTokenLength); } public MockTokenizer(Reader input, CharacterRunAutomaton runAutomaton, boolean lowerCase) { - super(input); - this.runAutomaton = runAutomaton; - this.lowerCase = lowerCase; - this.state = runAutomaton.getInitialState(); - this.streamState = State.SETREADER; + this(input, runAutomaton, lowerCase, DEFAULT_MAX_TOKEN_LENGTH); } @Override @@ -107,6 +111,9 @@ public class MockTokenizer extends Tokenizer { for (int i = 0; i < chars.length; i++) termAtt.append(chars[i]); endOffset = off; + if (termAtt.length() >= maxTokenLength) { + break; + } cp = readCodePoint(); } while (cp >= 0 && isTokenChar(cp)); offsetAtt.setOffset(correctOffset(startOffset), correctOffset(endOffset)); diff --git a/lucene/src/test/org/apache/lucene/analysis/TestToken.java b/lucene/src/test/org/apache/lucene/analysis/TestToken.java index 29edc25a40a..11168c90e00 100644 --- a/lucene/src/test/org/apache/lucene/analysis/TestToken.java +++ b/lucene/src/test/org/apache/lucene/analysis/TestToken.java @@ -225,7 +225,7 @@ public class TestToken extends LuceneTestCase { } public void testTokenAttributeFactory() throws Exception { - TokenStream ts = new MockTokenizer(Token.TOKEN_ATTRIBUTE_FACTORY, new StringReader("foo bar"), MockTokenizer.WHITESPACE, false); + TokenStream ts = new MockTokenizer(Token.TOKEN_ATTRIBUTE_FACTORY, new StringReader("foo bar"), MockTokenizer.WHITESPACE, false, MockTokenizer.DEFAULT_MAX_TOKEN_LENGTH); assertTrue("SenselessAttribute is not implemented by SenselessAttributeImpl", ts.addAttribute(SenselessAttribute.class) instanceof SenselessAttributeImpl); From 84410237192f2095f9327a36d590c7f9808dbabf Mon Sep 17 00:00:00 2001 From: Michael McCandless Date: Sun, 22 May 2011 16:08:36 +0000 Subject: [PATCH 20/27] test fix: LuceneTestCase was reporting the wrong iter that failed git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1126022 13f79535-47bb-0310-9956-ffa450edef68 --- .../apache/lucene/util/LuceneTestCase.java | 21 ++++++++++++++----- 1 file changed, 16 insertions(+), 5 deletions(-) diff --git a/lucene/src/test-framework/org/apache/lucene/util/LuceneTestCase.java b/lucene/src/test-framework/org/apache/lucene/util/LuceneTestCase.java index 59608db452a..02e2b5cae97 100644 --- a/lucene/src/test-framework/org/apache/lucene/util/LuceneTestCase.java +++ b/lucene/src/test-framework/org/apache/lucene/util/LuceneTestCase.java @@ -66,6 +66,8 @@ import org.junit.runner.Description; import org.junit.runner.RunWith; import org.junit.runner.manipulation.Filter; import org.junit.runner.manipulation.NoTestsRemainException; +import org.junit.runner.notification.Failure; +import org.junit.runner.notification.RunListener; import org.junit.runner.notification.RunNotifier; import org.junit.runners.BlockJUnit4ClassRunner; import org.junit.runners.model.FrameworkMethod; @@ -1145,6 +1147,7 @@ public abstract class LuceneTestCase extends Assert { * with one that returns null for getSequentialSubReaders. */ public static IndexSearcher newSearcher(IndexReader r, boolean maybeWrap) throws IOException { + if (random.nextBoolean()) { if (maybeWrap && random.nextBoolean()) { return new IndexSearcher(new SlowMultiReaderWrapper(r)); @@ -1296,17 +1299,25 @@ public abstract class LuceneTestCase extends Assert { } // only print iteration info if the user requested more than one iterations - boolean verbose = VERBOSE && TEST_ITER > 1; + final boolean verbose = VERBOSE && TEST_ITER > 1; + + final int currentIter[] = new int[1]; + arg1.addListener(new RunListener() { + @Override + public void testFailure(Failure failure) throws Exception { + if (verbose) { + System.out.println("\nNOTE: iteration " + currentIter[0] + " failed! "); + } + } + }); for (int i = 0; i < TEST_ITER; i++) { + currentIter[0] = i; if (verbose) { System.out.println("\nNOTE: running iter=" + (1+i) + " of " + TEST_ITER); } super.runChild(arg0, arg1); if (testsFailed) { - if (i >= TEST_ITER_MIN - 1) { - if (verbose) { - System.out.println("\nNOTE: iteration " + i + " failed !"); - } + if (i >= TEST_ITER_MIN - 1) { // XXX is this still off-by-one? break; } } From ca7a6b69059902abe3be963e0013b264ad90a48b Mon Sep 17 00:00:00 2001 From: Michael McCandless Date: Sun, 22 May 2011 17:06:27 +0000 Subject: [PATCH 21/27] LUCENE-2972: must alloc OpenBitSet to numOrd not numDocs git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1126091 13f79535-47bb-0310-9956-ffa450edef68 --- .../java/org/apache/lucene/search/FieldCacheTermsFilter.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lucene/src/java/org/apache/lucene/search/FieldCacheTermsFilter.java b/lucene/src/java/org/apache/lucene/search/FieldCacheTermsFilter.java index 7c9099b6d2c..14fd836b4e5 100644 --- a/lucene/src/java/org/apache/lucene/search/FieldCacheTermsFilter.java +++ b/lucene/src/java/org/apache/lucene/search/FieldCacheTermsFilter.java @@ -127,7 +127,7 @@ public class FieldCacheTermsFilter extends Filter { public FieldCacheTermsFilterDocIdSet(FieldCache.DocTermsIndex fcsi) { this.fcsi = fcsi; - openBitSet = new OpenBitSet(this.fcsi.size()); + openBitSet = new OpenBitSet(this.fcsi.numOrd()); final BytesRef spare = new BytesRef(); for (int i=0;i Date: Sun, 22 May 2011 21:34:31 +0000 Subject: [PATCH 22/27] Add pattern to include solr/src/test-files/Top50KWiki.utf8 in resources copied to test classpath prior to testing git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1126232 13f79535-47bb-0310-9956-ffa450edef68 --- dev-tools/idea/.idea/compiler.xml | 1 + 1 file changed, 1 insertion(+) diff --git a/dev-tools/idea/.idea/compiler.xml b/dev-tools/idea/.idea/compiler.xml index 2e36fba99a6..9106e68d83e 100644 --- a/dev-tools/idea/.idea/compiler.xml +++ b/dev-tools/idea/.idea/compiler.xml @@ -27,6 +27,7 @@ + From 49fe358760709987cf3304d48cc8bd65e24047f4 Mon Sep 17 00:00:00 2001 From: Koji Sekiguchi Date: Sun, 22 May 2011 23:56:43 +0000 Subject: [PATCH 23/27] use prettify in grouping package javadoc git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1126275 13f79535-47bb-0310-9956-ffa450edef68 --- .../src/java/org/apache/lucene/search/grouping/package.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/grouping/src/java/org/apache/lucene/search/grouping/package.html b/modules/grouping/src/java/org/apache/lucene/search/grouping/package.html index 72ea783eba1..d7efa0ee01f 100644 --- a/modules/grouping/src/java/org/apache/lucene/search/grouping/package.html +++ b/modules/grouping/src/java/org/apache/lucene/search/grouping/package.html @@ -68,7 +68,7 @@ field fall into a single group.

Typical usage looks like this (using the {@link org.apache.lucene.search.CachingCollector}):

-
+
   FirstPassGroupingCollector c1 = new FirstPassGroupingCollector("author", groupSort, groupOffset+topNGroups);
 
   boolean cacheScores = true;

From d1219a57693c03ba5793e4f8386bbc4d8205efbb Mon Sep 17 00:00:00 2001
From: Robert Muir 
Date: Mon, 23 May 2011 00:10:44 +0000
Subject: [PATCH 24/27] LUCENE-2981: review/remove unused/unsupported contribs

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1126280 13f79535-47bb-0310-9956-ffa450edef68
---
 build.xml                                     |  16 -
 dev-tools/eclipse/dot.classpath               |  15 -
 lucene/build.xml                              |   9 -
 lucene/common-build.xml                       |   9 -
 lucene/contrib/CHANGES.txt                    |   4 +
 lucene/contrib/ant/build.xml                  |  56 --
 lucene/contrib/ant/example.xml                |  24 -
 lucene/contrib/ant/lib/JTIDY-LICENSE-MIT.txt  |  53 --
 lucene/contrib/ant/lib/jtidy-r938.jar         |   2 -
 .../ant/ConfigurableDocumentHandler.java      |  24 -
 .../apache/lucene/ant/DocumentHandler.java    |  40 --
 .../lucene/ant/DocumentHandlerException.java  |  83 ---
 .../ant/FileExtensionDocumentHandler.java     |  65 --
 .../org/apache/lucene/ant/HtmlDocument.java   | 294 --------
 .../java/org/apache/lucene/ant/IndexTask.java | 444 ------------
 .../org/apache/lucene/ant/TextDocument.java   |  98 ---
 .../java/org/apache/lucene/ant/package.html   |  22 -
 lucene/contrib/ant/src/java/overview.html     |  26 -
 .../org/apache/lucene/ant/antlib.xml          |  21 -
 .../apache/lucene/ant/DocumentTestCase.java   |  36 -
 .../apache/lucene/ant/HtmlDocumentTest.java   |  44 --
 .../org/apache/lucene/ant/IndexTaskTest.java  |  95 ---
 .../apache/lucene/ant/TextDocumentTest.java   |  43 --
 .../src/test/org/apache/lucene/ant/test.html  |   7 -
 .../src/test/org/apache/lucene/ant/test.txt   |   1 -
 lucene/contrib/db/bdb-je/build.xml            |  50 --
 lucene/contrib/db/bdb-je/lib/je-3.3.93.jar    |   2 -
 .../contrib/db/bdb-je/lib/je-LICENSE-FAKE.txt |   1 -
 .../contrib/db/bdb-je/lib/je-NOTICE-FAKE.txt  |   1 -
 .../org/apache/lucene/store/je/Block.java     |  83 ---
 .../java/org/apache/lucene/store/je/File.java | 237 -------
 .../apache/lucene/store/je/JEDirectory.java   | 213 ------
 .../apache/lucene/store/je/JEIndexInput.java  | 141 ----
 .../apache/lucene/store/je/JEIndexOutput.java | 147 ----
 .../org/apache/lucene/store/je/JELock.java    |  53 --
 .../org/apache/lucene/store/je/package.html   |  22 -
 .../contrib/db/bdb-je/src/java/overview.html  |  26 -
 .../apache/lucene/store/je/JEStoreTest.java   | 636 ------------------
 lucene/contrib/db/bdb/build.xml               |  77 ---
 lucene/contrib/db/bdb/lib/db--NOTICE-FAKE.txt |   1 -
 lucene/contrib/db/bdb/lib/db-4.7.25.jar       |   2 -
 lucene/contrib/db/bdb/lib/db-LICENSE-FAKE.txt |   1 -
 .../com/sleepycat/db/DbHandleExtractor.java   |  50 --
 .../org/apache/lucene/store/db/Block.java     |  92 ---
 .../apache/lucene/store/db/DbDirectory.java   | 246 -------
 .../apache/lucene/store/db/DbIndexInput.java  | 152 -----
 .../apache/lucene/store/db/DbIndexOutput.java | 156 -----
 .../org/apache/lucene/store/db/DbLock.java    |  55 --
 .../java/org/apache/lucene/store/db/File.java | 263 --------
 .../org/apache/lucene/store/db/package.html   |  22 -
 lucene/contrib/db/bdb/src/java/overview.html  |  26 -
 .../apache/lucene/store/db/DbStoreTest.java   | 413 ------------
 .../lucene/store/db/SanityLoadLibrary.java    |  36 -
 lucene/contrib/db/build.xml                   |  74 --
 lucene/contrib/lucli/README                   |  24 -
 lucene/contrib/lucli/build.xml                |  71 --
 .../lucli/lib/jline-LICENSE-BSD_LIKE.txt      |  33 -
 lucene/contrib/lucli/lib/jline-NOTICE.txt     |   2 -
 lucene/contrib/lucli/lib/jline.jar            |   2 -
 lucene/contrib/lucli/run.sh                   |  22 -
 .../lucli/src/java/lucli/LuceneMethods.java   | 406 -----------
 .../contrib/lucli/src/java/lucli/Lucli.java   | 321 ---------
 .../contrib/lucli/src/java/lucli/package.html |  22 -
 lucene/contrib/lucli/src/java/overview.html   |  26 -
 lucene/contrib/lucli/src/test/add.tests.here  |   0
 lucene/contrib/swing/build.xml                |  57 --
 lucene/contrib/swing/docs/index.html          |  97 ---
 .../lucene/swing/models/ListSearcher.java     | 311 ---------
 .../lucene/swing/models/TableSearcher.java    | 362 ----------
 .../apache/lucene/swing/models/package.html   |  22 -
 lucene/contrib/swing/src/java/overview.html   |  23 -
 .../lucene/swing/models/BaseListModel.java    |  55 --
 .../lucene/swing/models/BaseTableModel.java   | 104 ---
 .../apache/lucene/swing/models/DataStore.java | 202 ------
 .../swing/models/ListSearcherSimulator.java   |  88 ---
 .../lucene/swing/models/RestaurantInfo.java   | 101 ---
 .../swing/models/TableSearcherSimulator.java  |  81 ---
 .../lucene/swing/models/TestBasicList.java    |  53 --
 .../lucene/swing/models/TestBasicTable.java   |  60 --
 .../swing/models/TestSearchingList.java       |  47 --
 .../swing/models/TestSearchingTable.java      |  45 --
 .../lucene/swing/models/TestUpdatingList.java |  79 ---
 .../swing/models/TestUpdatingTable.java       |  79 ---
 .../content/xdocs/lucene-contrib/index.xml    |  20 -
 .../src/documentation/content/xdocs/site.xml  |  12 +-
 85 files changed, 5 insertions(+), 7631 deletions(-)
 delete mode 100644 lucene/contrib/ant/build.xml
 delete mode 100644 lucene/contrib/ant/example.xml
 delete mode 100644 lucene/contrib/ant/lib/JTIDY-LICENSE-MIT.txt
 delete mode 100644 lucene/contrib/ant/lib/jtidy-r938.jar
 delete mode 100644 lucene/contrib/ant/src/java/org/apache/lucene/ant/ConfigurableDocumentHandler.java
 delete mode 100644 lucene/contrib/ant/src/java/org/apache/lucene/ant/DocumentHandler.java
 delete mode 100644 lucene/contrib/ant/src/java/org/apache/lucene/ant/DocumentHandlerException.java
 delete mode 100644 lucene/contrib/ant/src/java/org/apache/lucene/ant/FileExtensionDocumentHandler.java
 delete mode 100644 lucene/contrib/ant/src/java/org/apache/lucene/ant/HtmlDocument.java
 delete mode 100644 lucene/contrib/ant/src/java/org/apache/lucene/ant/IndexTask.java
 delete mode 100644 lucene/contrib/ant/src/java/org/apache/lucene/ant/TextDocument.java
 delete mode 100644 lucene/contrib/ant/src/java/org/apache/lucene/ant/package.html
 delete mode 100644 lucene/contrib/ant/src/java/overview.html
 delete mode 100644 lucene/contrib/ant/src/resources/org/apache/lucene/ant/antlib.xml
 delete mode 100644 lucene/contrib/ant/src/test/org/apache/lucene/ant/DocumentTestCase.java
 delete mode 100644 lucene/contrib/ant/src/test/org/apache/lucene/ant/HtmlDocumentTest.java
 delete mode 100644 lucene/contrib/ant/src/test/org/apache/lucene/ant/IndexTaskTest.java
 delete mode 100644 lucene/contrib/ant/src/test/org/apache/lucene/ant/TextDocumentTest.java
 delete mode 100644 lucene/contrib/ant/src/test/org/apache/lucene/ant/test.html
 delete mode 100644 lucene/contrib/ant/src/test/org/apache/lucene/ant/test.txt
 delete mode 100644 lucene/contrib/db/bdb-je/build.xml
 delete mode 100644 lucene/contrib/db/bdb-je/lib/je-3.3.93.jar
 delete mode 100644 lucene/contrib/db/bdb-je/lib/je-LICENSE-FAKE.txt
 delete mode 100644 lucene/contrib/db/bdb-je/lib/je-NOTICE-FAKE.txt
 delete mode 100644 lucene/contrib/db/bdb-je/src/java/org/apache/lucene/store/je/Block.java
 delete mode 100644 lucene/contrib/db/bdb-je/src/java/org/apache/lucene/store/je/File.java
 delete mode 100644 lucene/contrib/db/bdb-je/src/java/org/apache/lucene/store/je/JEDirectory.java
 delete mode 100644 lucene/contrib/db/bdb-je/src/java/org/apache/lucene/store/je/JEIndexInput.java
 delete mode 100644 lucene/contrib/db/bdb-je/src/java/org/apache/lucene/store/je/JEIndexOutput.java
 delete mode 100644 lucene/contrib/db/bdb-je/src/java/org/apache/lucene/store/je/JELock.java
 delete mode 100644 lucene/contrib/db/bdb-je/src/java/org/apache/lucene/store/je/package.html
 delete mode 100644 lucene/contrib/db/bdb-je/src/java/overview.html
 delete mode 100644 lucene/contrib/db/bdb-je/src/test/org/apache/lucene/store/je/JEStoreTest.java
 delete mode 100644 lucene/contrib/db/bdb/build.xml
 delete mode 100644 lucene/contrib/db/bdb/lib/db--NOTICE-FAKE.txt
 delete mode 100644 lucene/contrib/db/bdb/lib/db-4.7.25.jar
 delete mode 100644 lucene/contrib/db/bdb/lib/db-LICENSE-FAKE.txt
 delete mode 100644 lucene/contrib/db/bdb/src/java/com/sleepycat/db/DbHandleExtractor.java
 delete mode 100644 lucene/contrib/db/bdb/src/java/org/apache/lucene/store/db/Block.java
 delete mode 100644 lucene/contrib/db/bdb/src/java/org/apache/lucene/store/db/DbDirectory.java
 delete mode 100644 lucene/contrib/db/bdb/src/java/org/apache/lucene/store/db/DbIndexInput.java
 delete mode 100644 lucene/contrib/db/bdb/src/java/org/apache/lucene/store/db/DbIndexOutput.java
 delete mode 100644 lucene/contrib/db/bdb/src/java/org/apache/lucene/store/db/DbLock.java
 delete mode 100644 lucene/contrib/db/bdb/src/java/org/apache/lucene/store/db/File.java
 delete mode 100644 lucene/contrib/db/bdb/src/java/org/apache/lucene/store/db/package.html
 delete mode 100644 lucene/contrib/db/bdb/src/java/overview.html
 delete mode 100644 lucene/contrib/db/bdb/src/test/org/apache/lucene/store/db/DbStoreTest.java
 delete mode 100644 lucene/contrib/db/bdb/src/test/org/apache/lucene/store/db/SanityLoadLibrary.java
 delete mode 100644 lucene/contrib/db/build.xml
 delete mode 100644 lucene/contrib/lucli/README
 delete mode 100644 lucene/contrib/lucli/build.xml
 delete mode 100644 lucene/contrib/lucli/lib/jline-LICENSE-BSD_LIKE.txt
 delete mode 100644 lucene/contrib/lucli/lib/jline-NOTICE.txt
 delete mode 100644 lucene/contrib/lucli/lib/jline.jar
 delete mode 100755 lucene/contrib/lucli/run.sh
 delete mode 100644 lucene/contrib/lucli/src/java/lucli/LuceneMethods.java
 delete mode 100644 lucene/contrib/lucli/src/java/lucli/Lucli.java
 delete mode 100644 lucene/contrib/lucli/src/java/lucli/package.html
 delete mode 100644 lucene/contrib/lucli/src/java/overview.html
 delete mode 100644 lucene/contrib/lucli/src/test/add.tests.here
 delete mode 100644 lucene/contrib/swing/build.xml
 delete mode 100644 lucene/contrib/swing/docs/index.html
 delete mode 100644 lucene/contrib/swing/src/java/org/apache/lucene/swing/models/ListSearcher.java
 delete mode 100644 lucene/contrib/swing/src/java/org/apache/lucene/swing/models/TableSearcher.java
 delete mode 100644 lucene/contrib/swing/src/java/org/apache/lucene/swing/models/package.html
 delete mode 100644 lucene/contrib/swing/src/java/overview.html
 delete mode 100644 lucene/contrib/swing/src/test/org/apache/lucene/swing/models/BaseListModel.java
 delete mode 100644 lucene/contrib/swing/src/test/org/apache/lucene/swing/models/BaseTableModel.java
 delete mode 100644 lucene/contrib/swing/src/test/org/apache/lucene/swing/models/DataStore.java
 delete mode 100644 lucene/contrib/swing/src/test/org/apache/lucene/swing/models/ListSearcherSimulator.java
 delete mode 100644 lucene/contrib/swing/src/test/org/apache/lucene/swing/models/RestaurantInfo.java
 delete mode 100644 lucene/contrib/swing/src/test/org/apache/lucene/swing/models/TableSearcherSimulator.java
 delete mode 100644 lucene/contrib/swing/src/test/org/apache/lucene/swing/models/TestBasicList.java
 delete mode 100644 lucene/contrib/swing/src/test/org/apache/lucene/swing/models/TestBasicTable.java
 delete mode 100644 lucene/contrib/swing/src/test/org/apache/lucene/swing/models/TestSearchingList.java
 delete mode 100644 lucene/contrib/swing/src/test/org/apache/lucene/swing/models/TestSearchingTable.java
 delete mode 100644 lucene/contrib/swing/src/test/org/apache/lucene/swing/models/TestUpdatingList.java
 delete mode 100644 lucene/contrib/swing/src/test/org/apache/lucene/swing/models/TestUpdatingTable.java

diff --git a/build.xml b/build.xml
index 3c91d19760a..679953b395f 100644
--- a/build.xml
+++ b/build.xml
@@ -77,14 +77,6 @@
     
     
-    Running Lucene contrib db/bdb-je task 'get-je-jar' ...
-    
-      
-    
-    Running Lucene contrib db/bdb task 'get-db-jar' ...
-    
-      
-    
     
       SUCCESS: You must right-click your project and choose Refresh
       Please note, your project must use a Java 6 JRE
@@ -95,14 +87,6 @@
     
       
     
-    Running Lucene contrib db/bdb-je task 'get-je-jar' ...
-    
-      
-    
-    Running Lucene contrib db/bdb task 'get-db-jar' ...
-    
-      
-    
     
       To complete IntelliJ IDEA setup, you must manually configure
       Project Structure | Project | Project SDK.
diff --git a/dev-tools/eclipse/dot.classpath b/dev-tools/eclipse/dot.classpath
index d060e1455e6..ccef914c405 100644
--- a/dev-tools/eclipse/dot.classpath
+++ b/dev-tools/eclipse/dot.classpath
@@ -3,21 +3,12 @@
 	
 	
 	
-	
-	
-	
-	
-	
-	
-	
 	
 	
 	
 	
 	
 	
-	
-	
 	
 	
 	
@@ -31,8 +22,6 @@
 	
 	
 	
-	
-	
 	
 	
 	
@@ -84,10 +73,6 @@
 	
 	
 	
-	
-	
-	
-	
 	
 	
 	
diff --git a/lucene/build.xml b/lucene/build.xml
index 3a0a522249a..eb05eb59e18 100644
--- a/lucene/build.xml
+++ b/lucene/build.xml
@@ -220,19 +220,14 @@
           
           		  
 
-          
-          
-          
           
           
           
-          
           
           
           
           
           
-          
           
           
           
@@ -244,20 +239,16 @@
   
           
     
-          
           
           
-          
           
           
-          
           
           
           
           
           
           
-          
           
           
           
diff --git a/lucene/common-build.xml b/lucene/common-build.xml
index c6bdb1627aa..1ce62bbb7c8 100644
--- a/lucene/common-build.xml
+++ b/lucene/common-build.xml
@@ -849,15 +849,6 @@
       
       
       
-      
-      
-      
-      
-      
-      
       
     
   
diff --git a/lucene/contrib/CHANGES.txt b/lucene/contrib/CHANGES.txt
index a0729dd5cd5..a1f08da0739 100644
--- a/lucene/contrib/CHANGES.txt
+++ b/lucene/contrib/CHANGES.txt
@@ -50,6 +50,10 @@ Bug Fixes
 
 ======================= Lucene 3.x (not yet released) =======================
 
+Changes in backwards compatibility policy
+
+ * LUCENE-2981: Removed the following contribs: ant, db, lucli, swing. (Robert Muir)
+
 Changes in runtime behavior
 
  * LUCENE-3086: ItalianAnalyzer now uses ElisionFilter with a set of Italian
diff --git a/lucene/contrib/ant/build.xml b/lucene/contrib/ant/build.xml
deleted file mode 100644
index 5b8d0b0684f..00000000000
--- a/lucene/contrib/ant/build.xml
+++ /dev/null
@@ -1,56 +0,0 @@
-
-
-
-
-
-
-  
-    Lucene Ant integration
-  
-
-  
-    
-    
-  
-
-  
-
-  
-
-  
-	
-  
-
-  
-	 
-	 
-  
-
-  
-
-  
-    
-      
-    
-  
-
-
diff --git a/lucene/contrib/ant/example.xml b/lucene/contrib/ant/example.xml
deleted file mode 100644
index 7995563d12a..00000000000
--- a/lucene/contrib/ant/example.xml
+++ /dev/null
@@ -1,24 +0,0 @@
-
-
-
-
-  
-    Lucene Ant index example
-  
-
-  
-  
-
-  
-    
-
-    
-      
-    
-  
-
-
diff --git a/lucene/contrib/ant/lib/JTIDY-LICENSE-MIT.txt b/lucene/contrib/ant/lib/JTIDY-LICENSE-MIT.txt
deleted file mode 100644
index 25cbfea5d39..00000000000
--- a/lucene/contrib/ant/lib/JTIDY-LICENSE-MIT.txt
+++ /dev/null
@@ -1,53 +0,0 @@
-/**
-*  Java HTML Tidy - JTidy
-*  HTML parser and pretty printer
-*
-*  Copyright (c) 1998-2000 World Wide Web Consortium (Massachusetts
-*  Institute of Technology, Institut National de Recherche en
-*  Informatique et en Automatique, Keio University). All Rights
-*  Reserved.
-*
-*  Contributing Author(s):
-*
-*     Dave Raggett 
-*     Andy Quick  (translation to Java)
-*     Gary L Peskin  (Java development)
-*     Sami Lempinen  (release management)
-*     Fabrizio Giustina 
-*
-*  The contributing author(s) would like to thank all those who
-*  helped with testing, bug fixes, and patience.  This wouldn't
-*  have been possible without all of you.
-*
-*  COPYRIGHT NOTICE:
-* 
-*  This software and documentation is provided "as is," and
-*  the copyright holders and contributing author(s) make no
-*  representations or warranties, express or implied, including
-*  but not limited to, warranties of merchantability or fitness
-*  for any particular purpose or that the use of the software or
-*  documentation will not infringe any third party patents,
-*  copyrights, trademarks or other rights. 
-*
-*  The copyright holders and contributing author(s) will not be
-*  liable for any direct, indirect, special or consequential damages
-*  arising out of any use of the software or documentation, even if
-*  advised of the possibility of such damage.
-*
-*  Permission is hereby granted to use, copy, modify, and distribute
-*  this source code, or portions hereof, documentation and executables,
-*  for any purpose, without fee, subject to the following restrictions:
-*
-*  1. The origin of this source code must not be misrepresented.
-*  2. Altered versions must be plainly marked as such and must
-*     not be misrepresented as being the original source.
-*  3. This Copyright notice may not be removed or altered from any
-*     source or altered source distribution.
-* 
-*  The copyright holders and contributing author(s) specifically
-*  permit, without fee, and encourage the use of this source code
-*  as a component for supporting the Hypertext Markup Language in
-*  commercial products. If you use this source code in a product,
-*  acknowledgment is not required but would be appreciated.
-*
-*/
diff --git a/lucene/contrib/ant/lib/jtidy-r938.jar b/lucene/contrib/ant/lib/jtidy-r938.jar
deleted file mode 100644
index 6931b0db378..00000000000
--- a/lucene/contrib/ant/lib/jtidy-r938.jar
+++ /dev/null
@@ -1,2 +0,0 @@
-AnyObjectId[efde902f3e9b180ee7bed0e849b4be8d6c15aaff] was removed in git history.
-Apache SVN contains full history.
\ No newline at end of file
diff --git a/lucene/contrib/ant/src/java/org/apache/lucene/ant/ConfigurableDocumentHandler.java b/lucene/contrib/ant/src/java/org/apache/lucene/ant/ConfigurableDocumentHandler.java
deleted file mode 100644
index 741f61d405c..00000000000
--- a/lucene/contrib/ant/src/java/org/apache/lucene/ant/ConfigurableDocumentHandler.java
+++ /dev/null
@@ -1,24 +0,0 @@
-package org.apache.lucene.ant;
-
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.util.Properties;
-
-public interface ConfigurableDocumentHandler extends DocumentHandler {
-    void configure(Properties props);
-}
diff --git a/lucene/contrib/ant/src/java/org/apache/lucene/ant/DocumentHandler.java b/lucene/contrib/ant/src/java/org/apache/lucene/ant/DocumentHandler.java
deleted file mode 100644
index 6fecde517e7..00000000000
--- a/lucene/contrib/ant/src/java/org/apache/lucene/ant/DocumentHandler.java
+++ /dev/null
@@ -1,40 +0,0 @@
-package org.apache.lucene.ant;
-
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.lucene.document.Document;
-
-import java.io.File;
-
-/**
- *  Allows a class to act as a Lucene document handler
- *
- *@since    October 27, 2001
- */
-public interface DocumentHandler {
-
-    /**
-     *  Gets the document attribute of the DocumentHandler object
-     *
-     *@param  file  Description of Parameter
-     *@return       The document value
-     */
-    Document getDocument(File file)
-            throws DocumentHandlerException;
-}
-
diff --git a/lucene/contrib/ant/src/java/org/apache/lucene/ant/DocumentHandlerException.java b/lucene/contrib/ant/src/java/org/apache/lucene/ant/DocumentHandlerException.java
deleted file mode 100644
index 2973ae24d30..00000000000
--- a/lucene/contrib/ant/src/java/org/apache/lucene/ant/DocumentHandlerException.java
+++ /dev/null
@@ -1,83 +0,0 @@
-package org.apache.lucene.ant;
-
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.io.PrintStream;
-import java.io.PrintWriter;
-
-
-public class DocumentHandlerException extends Exception {
-    private Throwable cause;
-
-    /**
-     * Default constructor.
-     */
-    public DocumentHandlerException() {
-        super();
-    }
-
-    /**
-     * Constructs with message.
-     */
-    public DocumentHandlerException(String message) {
-        super(message);
-    }
-
-    /**
-     * Constructs with chained exception.
-     */
-    public DocumentHandlerException(Throwable cause) {
-        super(cause.toString());
-        this.cause = cause;
-    }
-
-    /**
-     * Retrieves nested exception.
-     */
-    public Throwable getException() {
-        return cause;
-    }
-
-    @Override
-    public void printStackTrace() {
-        printStackTrace(System.err);
-    }
-
-    @Override
-    public void printStackTrace(PrintStream ps) {
-        synchronized (ps) {
-            super.printStackTrace(ps);
-            if (cause != null) {
-                ps.println("--- Nested Exception ---");
-                cause.printStackTrace(ps);
-            }
-        }
-    }
-
-    @Override
-    public void printStackTrace(PrintWriter pw) {
-        synchronized (pw) {
-            super.printStackTrace(pw);
-            if (cause != null) {
-                pw.println("--- Nested Exception ---");
-                cause.printStackTrace(pw);
-            }
-        }
-    }
-}
-
diff --git a/lucene/contrib/ant/src/java/org/apache/lucene/ant/FileExtensionDocumentHandler.java b/lucene/contrib/ant/src/java/org/apache/lucene/ant/FileExtensionDocumentHandler.java
deleted file mode 100644
index a9e6a791381..00000000000
--- a/lucene/contrib/ant/src/java/org/apache/lucene/ant/FileExtensionDocumentHandler.java
+++ /dev/null
@@ -1,65 +0,0 @@
-package org.apache.lucene.ant;
-
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.lucene.document.Document;
-
-import java.io.File;
-
-/**
- *  A DocumentHandler implementation to delegate responsibility to
- *  based on a files extension.  Currently only .html and .txt
- *  files are handled, other extensions ignored.
- *
- *@since      October 28, 2001
- *TODO: Implement dynamic document type lookup
- */
-public class FileExtensionDocumentHandler
-        implements DocumentHandler {
-    /**
-     *  Gets the document attribute of the
-     *  FileExtensionDocumentHandler object
-     *
-     *@param  file                          Description of
-     *      Parameter
-     *@return                               The document value
-     *@exception  DocumentHandlerException  Description of
-     *      Exception
-     */
-    public Document getDocument(File file)
-            throws DocumentHandlerException {
-        Document doc = null;
-
-        String name = file.getName();
-
-        try {
-            if (name.endsWith(".txt")) {
-                doc = TextDocument.Document(file);
-            }
-
-            if (name.endsWith(".html")) {
-                doc = HtmlDocument.Document(file);
-            }
-        } catch (java.io.IOException e) {
-            throw new DocumentHandlerException(e);
-        }
-
-        return doc;
-    }
-}
-
diff --git a/lucene/contrib/ant/src/java/org/apache/lucene/ant/HtmlDocument.java b/lucene/contrib/ant/src/java/org/apache/lucene/ant/HtmlDocument.java
deleted file mode 100644
index 003e0ca6264..00000000000
--- a/lucene/contrib/ant/src/java/org/apache/lucene/ant/HtmlDocument.java
+++ /dev/null
@@ -1,294 +0,0 @@
-package org.apache.lucene.ant;
-
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.lucene.document.Field;
-import org.w3c.dom.Element;
-import org.w3c.dom.Node;
-import org.w3c.dom.NodeList;
-import org.w3c.dom.Text;
-import org.w3c.tidy.Tidy;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileReader;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.StringWriter;
-
-/**
- *  The HtmlDocument class creates a Lucene {@link
- *  org.apache.lucene.document.Document} from an HTML document. 

- * - * It does this by using JTidy package. It can take input input - * from {@link java.io.File} or {@link java.io.InputStream}. - * - */ -public class HtmlDocument { - private Element rawDoc; - - - //------------------------------------------------------------- - // Constructors - //------------------------------------------------------------- - - /** - * Constructs an HtmlDocument from a {@link - * java.io.File}. - * - *@param file the File containing the - * HTML to parse - *@exception IOException if an I/O exception occurs - */ - public HtmlDocument(File file) throws IOException { - Tidy tidy = new Tidy(); - tidy.setQuiet(true); - tidy.setShowWarnings(false); - org.w3c.dom.Document root = null; - InputStream is = new FileInputStream(file); - try { - root = tidy.parseDOM(is, null); - } finally { - is.close(); - } - rawDoc = root.getDocumentElement(); - } - - - /** - * Constructs an HtmlDocument from an {@link - * java.io.InputStream}. - * - *@param is the InputStream - * containing the HTML - */ - public HtmlDocument(InputStream is) { - Tidy tidy = new Tidy(); - tidy.setQuiet(true); - tidy.setShowWarnings(false); - org.w3c.dom.Document root = tidy.parseDOM(is, null); - rawDoc = root.getDocumentElement(); - } - - - /** - * Constructs an HtmlDocument from a - * {@link java.io.File}. - * @param file the File containing the - * HTML to parse - * @param tidyConfigFile the String - * containing the full path to the Tidy config file - * @exception IOException if an I/O exception occurs */ - public HtmlDocument(File file, String tidyConfigFile) throws IOException { - Tidy tidy = new Tidy(); - tidy.setConfigurationFromFile(tidyConfigFile); - tidy.setQuiet(true); - tidy.setShowWarnings(false); - org.w3c.dom.Document root = - tidy.parseDOM(new FileInputStream(file), null); - rawDoc = root.getDocumentElement(); - } - - /** - * Creates a Lucene Document from a - * {@link java.io.File}. - * @param file - * @param tidyConfigFile the full path to the Tidy - * config file - * @exception IOException */ - public static org.apache.lucene.document.Document - Document(File file, String tidyConfigFile) throws IOException { - - HtmlDocument htmlDoc = new HtmlDocument(file, tidyConfigFile); - - org.apache.lucene.document.Document luceneDoc = new org.apache.lucene.document.Document(); - - luceneDoc.add(new Field("title", htmlDoc.getTitle(), Field.Store.YES, Field.Index.ANALYZED)); - luceneDoc.add(new Field("contents", htmlDoc.getBody(), Field.Store.YES, Field.Index.ANALYZED)); - - String contents = null; - BufferedReader br = - new BufferedReader(new FileReader(file)); - StringWriter sw = new StringWriter(); - String line = br.readLine(); - while (line != null) { - sw.write(line); - line = br.readLine(); - } - br.close(); - contents = sw.toString(); - sw.close(); - - luceneDoc.add(new Field("rawcontents", contents, Field.Store.YES, Field.Index.NO)); - - return luceneDoc; - } - - /** - * Creates a Lucene Document from an {@link - * java.io.InputStream}. - * - *@param is - */ - public static org.apache.lucene.document.Document - getDocument(InputStream is) { - HtmlDocument htmlDoc = new HtmlDocument(is); - org.apache.lucene.document.Document luceneDoc = - new org.apache.lucene.document.Document(); - - luceneDoc.add(new Field("title", htmlDoc.getTitle(), Field.Store.YES, Field.Index.ANALYZED)); - luceneDoc.add(new Field("contents", htmlDoc.getBody(), Field.Store.YES, Field.Index.ANALYZED)); - - return luceneDoc; - } - - - //------------------------------------------------------------- - // Public methods - //------------------------------------------------------------- - - /** - * Creates a Lucene Document from a {@link - * java.io.File}. - * - *@param file - *@exception IOException - */ - public static org.apache.lucene.document.Document - Document(File file) throws IOException { - HtmlDocument htmlDoc = new HtmlDocument(file); - org.apache.lucene.document.Document luceneDoc = - new org.apache.lucene.document.Document(); - - luceneDoc.add(new Field("title", htmlDoc.getTitle(), Field.Store.YES, Field.Index.ANALYZED)); - luceneDoc.add(new Field("contents", htmlDoc.getBody(), Field.Store.YES, Field.Index.ANALYZED)); - - String contents = null; - BufferedReader br = - new BufferedReader(new FileReader(file)); - StringWriter sw = new StringWriter(); - String line = br.readLine(); - while (line != null) { - sw.write(line); - line = br.readLine(); - } - br.close(); - contents = sw.toString(); - sw.close(); - - luceneDoc.add(new Field("rawcontents", contents, Field.Store.YES, Field.Index.NO)); - - return luceneDoc; - } - - - //------------------------------------------------------------- - // Private methods - //------------------------------------------------------------- - - /** - * Runs HtmlDocument on the files specified on - * the command line. - * - *@param args Command line arguments - *@exception Exception Description of Exception - */ - public static void main(String args[]) throws Exception { -// HtmlDocument doc = new HtmlDocument(new File(args[0])); -// System.out.println("Title = " + doc.getTitle()); -// System.out.println("Body = " + doc.getBody()); - - HtmlDocument doc = - new HtmlDocument(new FileInputStream(new File(args[0]))); - System.out.println("Title = " + doc.getTitle()); - System.out.println("Body = " + doc.getBody()); - } - - - /** - * Gets the title attribute of the HtmlDocument - * object. - * - *@return the title value - */ - public String getTitle() { - if (rawDoc == null) { - return null; - } - - String title = ""; - - NodeList nl = rawDoc.getElementsByTagName("title"); - if (nl.getLength() > 0) { - Element titleElement = ((Element) nl.item(0)); - Text text = (Text) titleElement.getFirstChild(); - if (text != null) { - title = text.getData(); - } - } - return title; - } - - - /** - * Gets the bodyText attribute of the - * HtmlDocument object. - * - *@return the bodyText value - */ - public String getBody() { - if (rawDoc == null) { - return null; - } - - String body = ""; - NodeList nl = rawDoc.getElementsByTagName("body"); - if (nl.getLength() > 0) { - body = getBodyText(nl.item(0)); - } - return body; - } - - - /** - * Gets the bodyText attribute of the - * HtmlDocument object. - * - *@param node a DOM Node - *@return The bodyText value - */ - private String getBodyText(Node node) { - NodeList nl = node.getChildNodes(); - StringBuilder buffer = new StringBuilder(); - for (int i = 0; i < nl.getLength(); i++) { - Node child = nl.item(i); - switch (child.getNodeType()) { - case Node.ELEMENT_NODE: - buffer.append(getBodyText(child)); - buffer.append(" "); - break; - case Node.TEXT_NODE: - buffer.append(((Text) child).getData()); - break; - } - } - return buffer.toString(); - } -} - diff --git a/lucene/contrib/ant/src/java/org/apache/lucene/ant/IndexTask.java b/lucene/contrib/ant/src/java/org/apache/lucene/ant/IndexTask.java deleted file mode 100644 index 9e1c7480df5..00000000000 --- a/lucene/contrib/ant/src/java/org/apache/lucene/ant/IndexTask.java +++ /dev/null @@ -1,444 +0,0 @@ -package org.apache.lucene.ant; - -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import java.io.File; -import java.io.IOException; -import java.text.ParseException; -import java.util.Date; -import java.util.HashMap; -import java.util.Iterator; -import java.util.Map; -import java.util.Properties; -import java.util.Set; -import java.util.Vector; -import java.lang.reflect.Constructor; - -import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.core.SimpleAnalyzer; -import org.apache.lucene.analysis.core.StopAnalyzer; -import org.apache.lucene.analysis.core.WhitespaceAnalyzer; -import org.apache.lucene.analysis.standard.StandardAnalyzer; -import org.apache.lucene.document.DateTools; -import org.apache.lucene.document.Document; -import org.apache.lucene.document.Field; -import org.apache.lucene.index.IndexWriter; -import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.index.TieredMergePolicy; -import org.apache.lucene.index.Term; -import org.apache.lucene.index.IndexWriterConfig.OpenMode; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.ScoreDoc; -import org.apache.lucene.search.TermQuery; -import org.apache.lucene.store.FSDirectory; -import org.apache.lucene.util.Version; -import org.apache.tools.ant.BuildException; -import org.apache.tools.ant.DynamicConfigurator; -import org.apache.tools.ant.Project; -import org.apache.tools.ant.Task; -import org.apache.tools.ant.types.EnumeratedAttribute; -import org.apache.tools.ant.types.FileSet; -import org.apache.tools.ant.types.Resource; -import org.apache.tools.ant.types.ResourceCollection; -import org.apache.tools.ant.types.resources.FileResource; - -/** - * Ant task to index files with Lucene - * - */ -public class IndexTask extends Task { - /** - * resources - */ - protected Vector rcs = new Vector(); - - /** - * overwrite index? - */ - private boolean overwrite = false; - - /** - * index path - */ - private File indexDir; - - /** - * document handler classname - */ - private String handlerClassName = - FileExtensionDocumentHandler.class.getName(); - - /** - * document handler instance - */ - private DocumentHandler handler; - - - /** - * - */ - private String analyzerClassName = - StandardAnalyzer.class.getName(); - - /** - * analyzer instance - */ - private Analyzer analyzer; - - /** - * Lucene merge factor - */ - private int mergeFactor = 20; - - private HandlerConfig handlerConfig; - - private boolean useCompoundIndex = true; - - - /** - * Creates new instance - */ - public IndexTask() { - } - - - /** - * Specifies the directory where the index will be stored - */ - public void setIndex(File indexDir) { - this.indexDir = indexDir; - } - - - /** - * Sets the mergeFactor attribute of the IndexTask object - * - *@param mergeFactor The new mergeFactor value - */ - public void setMergeFactor(int mergeFactor) { - this.mergeFactor = mergeFactor; - } - - - /** - * Sets the overwrite attribute of the IndexTask object - * - *@param overwrite The new overwrite value - */ - public void setOverwrite(boolean overwrite) { - this.overwrite = overwrite; - } - - - /** - * If creating a new index and this is set to true, the - * index will be created in compound format. - */ - public void setUseCompoundIndex(boolean useCompoundIndex) { - this.useCompoundIndex = useCompoundIndex; - } - - /** - * Sets the documentHandler attribute of the IndexTask object - * - *@param classname The new documentHandler value - */ - public void setDocumentHandler(String classname) { - handlerClassName = classname; - } - - /** - * Sets the analyzer based on the builtin Lucene analyzer types. - * - * TODO: Enforce analyzer and analyzerClassName to be mutually exclusive - */ - public void setAnalyzer(AnalyzerType type) { - analyzerClassName = type.getClassname(); - } - - public void setAnalyzerClassName(String classname) { - analyzerClassName = classname; - } - - /** - * Adds a set of files (nested fileset attribute). - * - *@param set FileSet to be added - */ - public void addFileset(FileSet set) { - add(set); - } - - /** - * Add a collection of files to copy. - * @param res a resource collection to copy. - * @since Ant 1.7 - */ - public void add(ResourceCollection res) { - rcs.add(res); - } - - /** - * Sets custom properties for a configurable document handler. - */ - public void addConfig(HandlerConfig config) throws BuildException { - if (handlerConfig != null) { - throw new BuildException("Only one config element allowed"); - } - - handlerConfig = config; - } - - private static final Analyzer createAnalyzer(String className) throws Exception{ - final Class clazz = Class.forName(className).asSubclass(Analyzer.class); - try { - // first try to use a ctor with version parameter (needed for many new Analyzers that have no default one anymore - Constructor cnstr = clazz.getConstructor(Version.class); - return cnstr.newInstance(Version.LUCENE_CURRENT); - } catch (NoSuchMethodException nsme) { - // otherwise use default ctor - return clazz.newInstance(); - } - } - - /** - * Begins the indexing - * - *@exception BuildException If an error occurs indexing the - * fileset - */ - @Override - public void execute() throws BuildException { - - // construct handler and analyzer dynamically - try { - handler = Class.forName(handlerClassName).asSubclass(DocumentHandler.class).newInstance(); - - analyzer = IndexTask.createAnalyzer(analyzerClassName); - } catch (Exception e) { - throw new BuildException(e); - } - - log("Document handler = " + handler.getClass(), Project.MSG_VERBOSE); - log("Analyzer = " + analyzer.getClass(), Project.MSG_VERBOSE); - - if (handler instanceof ConfigurableDocumentHandler) { - ((ConfigurableDocumentHandler) handler).configure(handlerConfig.getProperties()); - } - - try { - indexDocs(); - } catch (IOException e) { - throw new BuildException(e); - } - } - - - /** - * Index the fileset. - * - *@exception IOException if Lucene I/O exception - *TODO: refactor!!!!! - */ - private void indexDocs() throws IOException { - Date start = new Date(); - - boolean create = overwrite; - // If the index directory doesn't exist, - // create it and force create mode - if (indexDir.mkdirs() && !overwrite) { - create = true; - } - - FSDirectory dir = FSDirectory.open(indexDir); - try { - IndexSearcher searcher = null; - boolean checkLastModified = false; - if (!create) { - try { - searcher = new IndexSearcher(dir, true); - checkLastModified = true; - } catch (IOException ioe) { - log("IOException: " + ioe.getMessage()); - // Empty - ignore, which indicates to index all - // documents - } - } - - log("checkLastModified = " + checkLastModified, Project.MSG_VERBOSE); - - IndexWriterConfig conf = new IndexWriterConfig( - Version.LUCENE_CURRENT, analyzer).setOpenMode( - create ? OpenMode.CREATE : OpenMode.APPEND); - TieredMergePolicy tmp = (TieredMergePolicy) conf.getMergePolicy(); - tmp.setUseCompoundFile(useCompoundIndex); - tmp.setMaxMergeAtOnce(mergeFactor); - IndexWriter writer = new IndexWriter(dir, conf); - int totalFiles = 0; - int totalIndexed = 0; - int totalIgnored = 0; - try { - - for (int i = 0; i < rcs.size(); i++) { - ResourceCollection rc = rcs.elementAt(i); - if (rc.isFilesystemOnly()) { - Iterator resources = rc.iterator(); - while (resources.hasNext()) { - Resource r = (Resource) resources.next(); - if (!r.isExists() || !(r instanceof FileResource)) { - continue; - } - - totalFiles++; - - File file = ((FileResource) r).getFile(); - - if (!file.exists() || !file.canRead()) { - throw new BuildException("File \"" + - file.getAbsolutePath() - + "\" does not exist or is not readable."); - } - - boolean indexIt = true; - - if (checkLastModified) { - Term pathTerm = - new Term("path", file.getPath()); - TermQuery query = - new TermQuery(pathTerm); - ScoreDoc[] hits = searcher.search(query, null, 1).scoreDocs; - - // if document is found, compare the - // indexed last modified time with the - // current file - // - don't index if up to date - if (hits.length > 0) { - Document doc = searcher.doc(hits[0].doc); - String indexModified = - doc.get("modified").trim(); - if (indexModified != null) { - long lastModified = 0; - try { - lastModified = DateTools.stringToTime(indexModified); - } catch (ParseException e) { - // if modified time is not parsable, skip - } - if (lastModified == file.lastModified()) { - // TODO: remove existing document - indexIt = false; - } - } - } - } - - if (indexIt) { - try { - log("Indexing " + file.getPath(), - Project.MSG_VERBOSE); - Document doc = - handler.getDocument(file); - - if (doc == null) { - totalIgnored++; - } else { - // Add the path of the file as a field named "path". Use a Keyword field, so - // that the index stores the path, and so that the path is searchable - doc.add(new Field("path", file.getPath(), Field.Store.YES, Field.Index.NOT_ANALYZED)); - - // Add the last modified date of the file a field named "modified". Use a - // Keyword field, so that it's searchable, but so that no attempt is made - // to tokenize the field into words. - doc.add(new Field("modified", DateTools.timeToString(file.lastModified(), DateTools.Resolution.MILLISECOND), Field.Store.YES, Field.Index.NOT_ANALYZED)); - - writer.addDocument(doc); - totalIndexed++; - } - } catch (DocumentHandlerException e) { - throw new BuildException(e); - } - } - } - // for j - } - // if (fs != null) - } - // for i - - writer.optimize(); - } - //try - finally { - // always make sure everything gets closed, - // no matter how we exit. - writer.close(); - if (searcher != null) { - searcher.close(); - } - } - - Date end = new Date(); - - log(totalIndexed + " out of " + totalFiles + " indexed (" + - totalIgnored + " ignored) in " + (end.getTime() - start.getTime()) + - " milliseconds"); - } finally { - dir.close(); - } - } - - public static class HandlerConfig implements DynamicConfigurator { - Properties props = new Properties(); - - public void setDynamicAttribute(String attributeName, String value) throws BuildException { - props.setProperty(attributeName, value); - } - - public Object createDynamicElement(String elementName) throws BuildException { - throw new BuildException("Sub elements not supported"); - } - - public Properties getProperties() { - return props; - } - } - - public static class AnalyzerType extends EnumeratedAttribute { - private static Map analyzerLookup = new HashMap(); - - static { - analyzerLookup.put("simple", SimpleAnalyzer.class.getName()); - analyzerLookup.put("standard", StandardAnalyzer.class.getName()); - analyzerLookup.put("stop", StopAnalyzer.class.getName()); - analyzerLookup.put("whitespace", WhitespaceAnalyzer.class.getName()); - } - - /** - * @see EnumeratedAttribute#getValues - */ - @Override - public String[] getValues() { - Set keys = analyzerLookup.keySet(); - return keys.toArray(new String[0]); - } - - public String getClassname() { - return analyzerLookup.get(getValue()); - } - } -} - diff --git a/lucene/contrib/ant/src/java/org/apache/lucene/ant/TextDocument.java b/lucene/contrib/ant/src/java/org/apache/lucene/ant/TextDocument.java deleted file mode 100644 index 26146adaec1..00000000000 --- a/lucene/contrib/ant/src/java/org/apache/lucene/ant/TextDocument.java +++ /dev/null @@ -1,98 +0,0 @@ -package org.apache.lucene.ant; - -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import org.apache.lucene.document.Document; -import org.apache.lucene.document.Field; - -import java.io.BufferedReader; -import java.io.File; -import java.io.FileReader; -import java.io.IOException; -import java.io.StringWriter; - -/** - * A utility for making Lucene Documents from a File. - * - *@since December 6, 2001 - *TODO: Fix JavaDoc comments here - */ - -public class TextDocument { - private String contents; - - - /** - * Constructor for the TextDocument object - * - *@param file Description of Parameter - *@exception IOException Description of Exception - */ - public TextDocument(File file) throws IOException { - BufferedReader br = - new BufferedReader(new FileReader(file)); - StringWriter sw = new StringWriter(); - - String line = br.readLine(); - while (line != null) { - sw.write(line); - line = br.readLine(); - } - br.close(); - - contents = sw.toString(); - sw.close(); - } - - - /** - * Makes a document for a File.

- * - * The document has a single field: - *

    - *
  • contents--containing the full contents - * of the file, as a Text field; - * - *@param f Description of Parameter - *@return Description of the Returned Value - *@exception IOException Description of Exception - */ - public static Document Document(File f) throws IOException { - - TextDocument textDoc = new TextDocument(f); - // make a new, empty document - Document doc = new Document(); - - doc.add(new Field("title", f.getName(), Field.Store.YES, Field.Index.ANALYZED)); - doc.add(new Field("contents", textDoc.getContents(), Field.Store.YES, Field.Index.ANALYZED)); - doc.add(new Field("rawcontents", textDoc.getContents(), Field.Store.YES, Field.Index.NO)); - - // return the document - return doc; - } - - - /** - *@return The contents value - *TODO: finish this method - */ - public String getContents() { - return contents; - } -} - diff --git a/lucene/contrib/ant/src/java/org/apache/lucene/ant/package.html b/lucene/contrib/ant/src/java/org/apache/lucene/ant/package.html deleted file mode 100644 index f076cfcaf4e..00000000000 --- a/lucene/contrib/ant/src/java/org/apache/lucene/ant/package.html +++ /dev/null @@ -1,22 +0,0 @@ - - - - -Ant task to create Lucene indexes. - - diff --git a/lucene/contrib/ant/src/java/overview.html b/lucene/contrib/ant/src/java/overview.html deleted file mode 100644 index d81244ebe8f..00000000000 --- a/lucene/contrib/ant/src/java/overview.html +++ /dev/null @@ -1,26 +0,0 @@ - - - - - Apache Lucene Ant task to create Lucene indexes. - - - - Ant task to create Lucene indexes. - - \ No newline at end of file diff --git a/lucene/contrib/ant/src/resources/org/apache/lucene/ant/antlib.xml b/lucene/contrib/ant/src/resources/org/apache/lucene/ant/antlib.xml deleted file mode 100644 index 89315608c19..00000000000 --- a/lucene/contrib/ant/src/resources/org/apache/lucene/ant/antlib.xml +++ /dev/null @@ -1,21 +0,0 @@ - - - - - - diff --git a/lucene/contrib/ant/src/test/org/apache/lucene/ant/DocumentTestCase.java b/lucene/contrib/ant/src/test/org/apache/lucene/ant/DocumentTestCase.java deleted file mode 100644 index e0c11eee695..00000000000 --- a/lucene/contrib/ant/src/test/org/apache/lucene/ant/DocumentTestCase.java +++ /dev/null @@ -1,36 +0,0 @@ -package org.apache.lucene.ant; - -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import java.io.File; -import java.io.IOException; -import java.net.URLDecoder; - -import org.apache.lucene.util.LuceneTestCase; - -public abstract class DocumentTestCase extends LuceneTestCase -{ - protected File getFile(String filename) throws IOException { - String fullname = - this.getClass().getResource(filename).getFile(); - - File file = new File(URLDecoder.decode(fullname, "UTF-8")); - - return file; - } -} diff --git a/lucene/contrib/ant/src/test/org/apache/lucene/ant/HtmlDocumentTest.java b/lucene/contrib/ant/src/test/org/apache/lucene/ant/HtmlDocumentTest.java deleted file mode 100644 index 149cc0d9d7e..00000000000 --- a/lucene/contrib/ant/src/test/org/apache/lucene/ant/HtmlDocumentTest.java +++ /dev/null @@ -1,44 +0,0 @@ -package org.apache.lucene.ant; - -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import org.apache.lucene.ant.DocumentTestCase; -import org.apache.lucene.ant.HtmlDocument; - -public class HtmlDocumentTest extends DocumentTestCase -{ - HtmlDocument doc; - - @Override - public void setUp() throws Exception { - super.setUp(); - doc = new HtmlDocument(getFile("test.html")); - } - - public void testDoc() { - assertEquals("Title", "Test Title", doc.getTitle()); - assertTrue("Body", doc.getBody().startsWith("This is some test")); - } - - @Override - public void tearDown() throws Exception { - doc = null; - super.tearDown(); - } -} - diff --git a/lucene/contrib/ant/src/test/org/apache/lucene/ant/IndexTaskTest.java b/lucene/contrib/ant/src/test/org/apache/lucene/ant/IndexTaskTest.java deleted file mode 100644 index ffe205f50f1..00000000000 --- a/lucene/contrib/ant/src/test/org/apache/lucene/ant/IndexTaskTest.java +++ /dev/null @@ -1,95 +0,0 @@ -package org.apache.lucene.ant; - -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import java.io.File; -import java.io.IOException; // javadoc - -import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.core.StopAnalyzer; -import org.apache.lucene.queryParser.QueryParser; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.Query; -import org.apache.lucene.store.Directory; -import org.apache.tools.ant.Project; -import org.apache.tools.ant.types.FileSet; -import org.apache.lucene.util.LuceneTestCase; - -/** - * Test cases for index task - * - */ -public class IndexTaskTest extends LuceneTestCase { - private final static String docHandler = - "org.apache.lucene.ant.FileExtensionDocumentHandler"; - - private IndexSearcher searcher; - private Analyzer analyzer; - private Directory dir; - - - /** - * The JUnit setup method - * - *@exception IOException Description of Exception - */ - @Override - public void setUp() throws Exception { - super.setUp(); - // slightly hackish way to get the src/test dir - String docsDir = getDataFile("test.txt").getParent(); - File indexDir = TEMP_DIR; - Project project = new Project(); - - IndexTask task = new IndexTask(); - FileSet fs = new FileSet(); - fs.setProject(project); - fs.setDir(new File(docsDir)); - task.addFileset(fs); - task.setOverwrite(true); - task.setDocumentHandler(docHandler); - task.setIndex(indexDir); - task.setProject(project); - task.execute(); - - dir = newFSDirectory(indexDir); - searcher = new IndexSearcher(dir, true); - analyzer = new StopAnalyzer(TEST_VERSION_CURRENT); - } - - - public void testSearch() throws Exception { - Query query = new QueryParser(TEST_VERSION_CURRENT, "contents",analyzer).parse("test"); - - int numHits = searcher.search(query, null, 1000).totalHits; - - assertEquals("Find document(s)", 2, numHits); - } - - /** - * The teardown method for JUnit - * TODO: remove indexDir? - */ - @Override - public void tearDown() throws Exception { - searcher.close(); - dir.close(); - super.tearDown(); - } -} - diff --git a/lucene/contrib/ant/src/test/org/apache/lucene/ant/TextDocumentTest.java b/lucene/contrib/ant/src/test/org/apache/lucene/ant/TextDocumentTest.java deleted file mode 100644 index b3f6b9db850..00000000000 --- a/lucene/contrib/ant/src/test/org/apache/lucene/ant/TextDocumentTest.java +++ /dev/null @@ -1,43 +0,0 @@ -package org.apache.lucene.ant; - -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import org.apache.lucene.ant.DocumentTestCase; -import org.apache.lucene.ant.TextDocument; - -public class TextDocumentTest extends DocumentTestCase -{ - TextDocument doc; - - @Override - public void setUp() throws Exception { - super.setUp(); - doc = new TextDocument(getFile("test.txt")); - } - - public void testDoc() { - assertEquals("Contents", "Test Contents", doc.getContents()); - } - - @Override - public void tearDown() throws Exception { - doc = null; - super.tearDown(); - } -} - diff --git a/lucene/contrib/ant/src/test/org/apache/lucene/ant/test.html b/lucene/contrib/ant/src/test/org/apache/lucene/ant/test.html deleted file mode 100644 index b743a12449c..00000000000 --- a/lucene/contrib/ant/src/test/org/apache/lucene/ant/test.html +++ /dev/null @@ -1,7 +0,0 @@ - - - Test Title - - - This is sometest - diff --git a/lucene/contrib/ant/src/test/org/apache/lucene/ant/test.txt b/lucene/contrib/ant/src/test/org/apache/lucene/ant/test.txt deleted file mode 100644 index bd76ed9bbf0..00000000000 --- a/lucene/contrib/ant/src/test/org/apache/lucene/ant/test.txt +++ /dev/null @@ -1 +0,0 @@ -Test Contents diff --git a/lucene/contrib/db/bdb-je/build.xml b/lucene/contrib/db/bdb-je/build.xml deleted file mode 100644 index cc8c1c8d96b..00000000000 --- a/lucene/contrib/db/bdb-je/build.xml +++ /dev/null @@ -1,50 +0,0 @@ - - - - - - Lucene Berkeley DB Java Edition integration - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/lucene/contrib/db/bdb-je/lib/je-3.3.93.jar b/lucene/contrib/db/bdb-je/lib/je-3.3.93.jar deleted file mode 100644 index 4ceafc9209a..00000000000 --- a/lucene/contrib/db/bdb-je/lib/je-3.3.93.jar +++ /dev/null @@ -1,2 +0,0 @@ -AnyObjectId[9a9ff077cdd36a96e7e0506986edd4e52b90a22f] was removed in git history. -Apache SVN contains full history. \ No newline at end of file diff --git a/lucene/contrib/db/bdb-je/lib/je-LICENSE-FAKE.txt b/lucene/contrib/db/bdb-je/lib/je-LICENSE-FAKE.txt deleted file mode 100644 index a1defaa3da4..00000000000 --- a/lucene/contrib/db/bdb-je/lib/je-LICENSE-FAKE.txt +++ /dev/null @@ -1 +0,0 @@ -No bdb jars are shipped with lucene. This is a fake license to work around the automated license checking. diff --git a/lucene/contrib/db/bdb-je/lib/je-NOTICE-FAKE.txt b/lucene/contrib/db/bdb-je/lib/je-NOTICE-FAKE.txt deleted file mode 100644 index a1defaa3da4..00000000000 --- a/lucene/contrib/db/bdb-je/lib/je-NOTICE-FAKE.txt +++ /dev/null @@ -1 +0,0 @@ -No bdb jars are shipped with lucene. This is a fake license to work around the automated license checking. diff --git a/lucene/contrib/db/bdb-je/src/java/org/apache/lucene/store/je/Block.java b/lucene/contrib/db/bdb-je/src/java/org/apache/lucene/store/je/Block.java deleted file mode 100644 index 143c9f86496..00000000000 --- a/lucene/contrib/db/bdb-je/src/java/org/apache/lucene/store/je/Block.java +++ /dev/null @@ -1,83 +0,0 @@ -package org.apache.lucene.store.je; - -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import java.io.IOException; - -import com.sleepycat.je.DatabaseEntry; -import com.sleepycat.je.DatabaseException; - -/** - * Port of Andi Vajda's DbDirectory to Java Edition of Berkeley Database - * - */ - -public class Block extends Object { - protected DatabaseEntry key, data; - - protected Block(File file) throws IOException { - byte[] fileKey = file.getKey(); - - key = new DatabaseEntry(new byte[fileKey.length + 8]); - data = new DatabaseEntry(new byte[JEIndexOutput.BLOCK_LEN]); - - System.arraycopy(fileKey, 0, key.getData(), 0, fileKey.length); - seek(0L); - } - - protected byte[] getKey() { - return key.getData(); - } - - protected byte[] getData() { - return data.getData(); - } - - protected void seek(long position) throws IOException { - byte[] data = key.getData(); - int index = data.length - 8; - - position >>>= JEIndexOutput.BLOCK_SHIFT; - - data[index + 0] = (byte) (0xff & (position >>> 56)); - data[index + 1] = (byte) (0xff & (position >>> 48)); - data[index + 2] = (byte) (0xff & (position >>> 40)); - data[index + 3] = (byte) (0xff & (position >>> 32)); - data[index + 4] = (byte) (0xff & (position >>> 24)); - data[index + 5] = (byte) (0xff & (position >>> 16)); - data[index + 6] = (byte) (0xff & (position >>> 8)); - data[index + 7] = (byte) (0xff & (position >>> 0)); - } - - protected void get(JEDirectory directory) throws IOException { - try { - // TODO check LockMode - directory.blocks.get(directory.txn, key, data, null); - } catch (DatabaseException e) { - throw new IOException(e.getMessage()); - } - } - - protected void put(JEDirectory directory) throws IOException { - try { - directory.blocks.put(directory.txn, key, data); - } catch (DatabaseException e) { - throw new IOException(e.getMessage()); - } - } -} diff --git a/lucene/contrib/db/bdb-je/src/java/org/apache/lucene/store/je/File.java b/lucene/contrib/db/bdb-je/src/java/org/apache/lucene/store/je/File.java deleted file mode 100644 index b34e3579326..00000000000 --- a/lucene/contrib/db/bdb-je/src/java/org/apache/lucene/store/je/File.java +++ /dev/null @@ -1,237 +0,0 @@ -package org.apache.lucene.store.je; - -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.IOException; -import java.util.Random; - -import com.sleepycat.je.Cursor; -import com.sleepycat.je.Database; -import com.sleepycat.je.DatabaseEntry; -import com.sleepycat.je.DatabaseException; -import com.sleepycat.je.OperationStatus; -import com.sleepycat.je.Transaction; - -/** - * Port of Andi Vajda's DbDirectory to Java Edition of Berkeley Database - * - */ - -public class File extends Object { - - static protected Random random = new Random(); - - protected DatabaseEntry key, data; - - protected long length, timeModified; - - protected String name; - - protected byte[] uuid; - - protected File(String name) throws IOException { - setName(name); - - data = new DatabaseEntry(new byte[32]); - } - - protected File(JEDirectory directory, String name, boolean create) - throws IOException { - this(name); - - if (!exists(directory)) { - if (!create) - throw new IOException("File does not exist: " + name); - else { - DatabaseEntry key = new DatabaseEntry(new byte[24]); - DatabaseEntry data = new DatabaseEntry(null); - Database blocks = directory.blocks; - Transaction txn = directory.txn; - - data.setPartial(true); - - uuid = new byte[16]; - - try { - do { - /* generate a v.4 random-uuid unique to this db */ - random.nextBytes(uuid); - uuid[6] = (byte) ((byte) 0x40 | (uuid[6] & (byte) 0x0f)); - uuid[8] = (byte) ((byte) 0x80 | (uuid[8] & (byte) 0x3f)); - System.arraycopy(uuid, 0, key.getData(), 0, 16); - // TODO check LockMode - } while (blocks.get(txn, key, data, null) != OperationStatus.NOTFOUND); - } catch (DatabaseException e) { - throw new IOException(e.getMessage()); - } - } - } else if (create) - length = 0L; - } - - protected String getName() { - return name; - } - - private void setName(String name) throws IOException { - ByteArrayOutputStream buffer = new ByteArrayOutputStream(128); - DataOutputStream out = new DataOutputStream(buffer); - - out.writeUTF(name); - out.close(); - - key = new DatabaseEntry(buffer.toByteArray()); - this.name = name; - } - - protected byte[] getKey() throws IOException { - if (uuid == null) - throw new IOException("Uninitialized file"); - - return uuid; - } - - protected long getLength() { - return length; - } - - protected long getTimeModified() { - return timeModified; - } - - protected boolean exists(JEDirectory directory) throws IOException { - Database files = directory.files; - Transaction txn = directory.txn; - try { - // TODO check LockMode - if (files.get(txn, key, data, null) == OperationStatus.NOTFOUND) - return false; - } catch (DatabaseException e) { - throw new IOException(e.getMessage()); - } - - byte[] bytes = data.getData(); - ByteArrayInputStream buffer = new ByteArrayInputStream(bytes); - DataInputStream in = new DataInputStream(buffer); - - length = in.readLong(); - timeModified = in.readLong(); - in.close(); - - uuid = new byte[16]; - System.arraycopy(bytes, 16, uuid, 0, 16); - - return true; - } - - protected void modify(JEDirectory directory, long length, long timeModified) - throws IOException { - ByteArrayOutputStream buffer = new ByteArrayOutputStream(32); - DataOutputStream out = new DataOutputStream(buffer); - Database files = directory.files; - Transaction txn = directory.txn; - - out.writeLong(length); - out.writeLong(timeModified); - out.write(getKey()); - out.close(); - - System.arraycopy(buffer.toByteArray(), 0, data.getData(), 0, 32); - - try { - files.put(txn, key, data); - } catch (DatabaseException e) { - throw new IOException(e.getMessage()); - } - - this.length = length; - this.timeModified = timeModified; - } - - protected void delete(JEDirectory directory) throws IOException { - if (!exists(directory)) - throw new IOException("File does not exist: " + getName()); - - Cursor cursor = null; - - try { - try { - byte[] bytes = getKey(); - int ulen = bytes.length + 8; - byte[] cursorBytes = new byte[ulen]; - DatabaseEntry cursorKey = new DatabaseEntry(cursorBytes); - DatabaseEntry cursorData = new DatabaseEntry(null); - Database files = directory.files; - Database blocks = directory.blocks; - Transaction txn = directory.txn; - - System.arraycopy(bytes, 0, cursorBytes, 0, bytes.length); - - cursorData.setPartial(true); - - cursor = blocks.openCursor(txn, null); - - if (cursor.getSearchKey(cursorKey, cursorData, null) != OperationStatus.NOTFOUND) { - cursor.delete(); - advance: while (cursor.getNext(cursorKey, cursorData, null) != OperationStatus.NOTFOUND) { - byte[] temp = cursorKey.getData(); - for (int i = 0; i < bytes.length; i++) - if (bytes[i] != temp[i]) { - break advance; - } - cursor.delete(); - } - } - - files.delete(txn, key); - } finally { - if (cursor != null) - cursor.close(); - } - } catch (DatabaseException e) { - throw new IOException(e.getMessage()); - } - - } - - protected void rename(JEDirectory directory, String name) - throws IOException { - if (!exists(directory)) - throw new IOException("File does not exist: " + getName()); - - File newFile = new File(name); - - if (newFile.exists(directory)) - newFile.delete(directory); - - try { - Database files = directory.files; - Transaction txn = directory.txn; - - files.delete(txn, key); - setName(name); - files.put(txn, key, data); - } catch (DatabaseException e) { - throw new IOException(e.getMessage()); - } - } -} diff --git a/lucene/contrib/db/bdb-je/src/java/org/apache/lucene/store/je/JEDirectory.java b/lucene/contrib/db/bdb-je/src/java/org/apache/lucene/store/je/JEDirectory.java deleted file mode 100644 index 0bbeda51f8e..00000000000 --- a/lucene/contrib/db/bdb-je/src/java/org/apache/lucene/store/je/JEDirectory.java +++ /dev/null @@ -1,213 +0,0 @@ -package org.apache.lucene.store.je; - -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import java.io.ByteArrayInputStream; -import java.io.DataInputStream; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.util.*; - -import org.apache.lucene.store.Directory; -import org.apache.lucene.store.IndexInput; -import org.apache.lucene.store.IndexOutput; -import org.apache.lucene.store.Lock; - -import com.sleepycat.je.Cursor; -import com.sleepycat.je.Database; -import com.sleepycat.je.DatabaseEntry; -import com.sleepycat.je.DatabaseException; -import com.sleepycat.je.OperationStatus; -import com.sleepycat.je.Transaction; - -/** - * Port of Andi Vajda's DbDirectory to to Java Edition of Berkeley Database - * - * A JEDirectory is a Berkeley DB JE based implementation of - * {@link org.apache.lucene.store.Directory Directory}. It uses two - * {@link com.sleepycat.je.Database Db} database handles, one for storing file - * records and another for storing file data blocks. - * - */ - -public class JEDirectory extends Directory { - - protected Set openFiles = Collections.synchronizedSet(new HashSet()); - - protected Database files, blocks; - - protected Transaction txn; - - protected int flags; - - /** - * Instantiate a DbDirectory. The same threading rules that apply to - * Berkeley DB handles apply to instances of DbDirectory. - * - * @param txn - * a transaction handle that is going to be used for all db - * operations done by this instance. This parameter may be - * null. - * @param files - * a db handle to store file records. - * @param blocks - * a db handle to store file data blocks. - * @param flags - * flags used for db read operations. - */ - - public JEDirectory(Transaction txn, Database files, Database blocks, - int flags) { - super(); - - this.txn = txn; - this.files = files; - this.blocks = blocks; - this.flags = flags; - } - - public JEDirectory(Transaction txn, Database files, Database blocks) { - this(txn, files, blocks, 0); - } - - @Override - public void close() throws IOException { - flush(); - } - - /** - * Flush the currently open files. After they have been flushed it is safe - * to commit the transaction without closing this DbDirectory instance - * first. - * - * @see #setTransaction - */ - public void flush() throws IOException { - Iterator iterator = openFiles.iterator(); - - while (iterator.hasNext()) { - System.out - .println(iterator.next().file.getName()); - // ((IndexOutput) iterator.next()).flush(); - } - } - - @Override - public IndexOutput createOutput(String name) throws IOException { - return new JEIndexOutput(this, name, true); - } - - @Override - public void deleteFile(String name) throws IOException { - new File(name).delete(this); - } - - @Override - public boolean fileExists(String name) throws IOException { - return new File(name).exists(this); - } - - @Override - public long fileLength(String name) throws IOException { - File file = new File(name); - - if (file.exists(this)) - return file.getLength(); - - throw new FileNotFoundException(name); - } - - @Override - public long fileModified(String name) throws IOException { - File file = new File(name); - - if (file.exists(this)) - return file.getTimeModified(); - - throw new IOException("File does not exist: " + name); - } - - @Override - public String[] listAll() throws IOException { - Cursor cursor = null; - List list = new ArrayList(); - - try { - try { - DatabaseEntry key = new DatabaseEntry(new byte[0]); - DatabaseEntry data = new DatabaseEntry(null); - - data.setPartial(true); - // TODO see if cursor needs configuration - cursor = files.openCursor(txn, null); - // TODO see if LockMode should be set - if (cursor.getNext(key, data, null) != OperationStatus.NOTFOUND) { - ByteArrayInputStream buffer = new ByteArrayInputStream(key - .getData()); - DataInputStream in = new DataInputStream(buffer); - String name = in.readUTF(); - - in.close(); - list.add(name); - - while (cursor.getNext(key, data, null) != OperationStatus.NOTFOUND) { - buffer = new ByteArrayInputStream(key.getData()); - in = new DataInputStream(buffer); - name = in.readUTF(); - in.close(); - - list.add(name); - } - } - } finally { - if (cursor != null) - cursor.close(); - } - } catch (DatabaseException e) { - throw new IOException(e.getMessage()); - } - - return list.toArray(new String[list.size()]); - } - - @Override - public IndexInput openInput(String name) throws IOException { - return new JEIndexInput(this, name); - } - - @Override - public void sync(Collection names) throws IOException { - } - - @Override - public Lock makeLock(String name) { - return new JELock(); - } - - /** - * Once a transaction handle was committed it is no longer valid. In order - * to continue using this JEDirectory instance after a commit, the - * transaction handle has to be replaced. - * - * @param txn - * the new transaction handle to use - */ - public void setTransaction(Transaction txn) { - this.txn = txn; - } -} diff --git a/lucene/contrib/db/bdb-je/src/java/org/apache/lucene/store/je/JEIndexInput.java b/lucene/contrib/db/bdb-je/src/java/org/apache/lucene/store/je/JEIndexInput.java deleted file mode 100644 index ae6c3a90cec..00000000000 --- a/lucene/contrib/db/bdb-je/src/java/org/apache/lucene/store/je/JEIndexInput.java +++ /dev/null @@ -1,141 +0,0 @@ -package org.apache.lucene.store.je; - -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import java.io.IOException; -import org.apache.lucene.store.IndexInput; - -/** - * Port of Andi Vajda's DbDirectory to Java Edition of Berkeley Database - * - */ - -public class JEIndexInput extends IndexInput { - - protected long position = 0L, length = 0L; - - protected JEDirectory directory; - - protected Block block; - - protected File file; - - protected JEIndexInput(JEDirectory directory, String name) - throws IOException { - super(); - - this.directory = directory; - - this.file = new File(name); - if (!file.exists(directory)) - throw new IOException("File does not exist: " + name); - - length = file.getLength(); - - block = new Block(file); - block.get(directory); - } - - @Override - public Object clone() { - try { - JEIndexInput clone = (JEIndexInput) super.clone(); - - clone.block = new Block(file); - clone.block.seek(position); - clone.block.get(directory); - - return clone; - } catch (IOException e) { - throw new RuntimeException(e.getMessage()); - } - } - - @Override - public void close() throws IOException { - } - - @Override - public long length() { - return length; - } - - @Override - public byte readByte() throws IOException { - if (position + 1 > length) - throw new IOException(file.getName() + ": Reading past end of file"); - - int blockPos = (int) (position++ & JEIndexOutput.BLOCK_MASK); - byte b = block.getData()[blockPos]; - - if (blockPos + 1 == JEIndexOutput.BLOCK_LEN) { - block.seek(position); - block.get(directory); - } - - return b; - } - - @Override - public void readBytes(byte[] b, int offset, int len) throws IOException { - if (position + len > length) - throw new IOException("Reading past end of file"); - else { - int blockPos = (int) (position & JEIndexOutput.BLOCK_MASK); - - while (blockPos + len >= JEIndexOutput.BLOCK_LEN) { - int blockLen = JEIndexOutput.BLOCK_LEN - blockPos; - - System - .arraycopy(block.getData(), blockPos, b, offset, - blockLen); - - len -= blockLen; - offset += blockLen; - position += blockLen; - - block.seek(position); - block.get(directory); - blockPos = 0; - } - - if (len > 0) { - System.arraycopy(block.getData(), blockPos, b, offset, len); - position += len; - } - } - } - - @Override - public void seek(long pos) throws IOException { - if (pos > length) - throw new IOException("seeking past end of file"); - - if ((pos >>> JEIndexOutput.BLOCK_SHIFT) != (position >>> JEIndexOutput.BLOCK_SHIFT)) { - block.seek(pos); - block.get(directory); - } - - position = pos; - } - - @Override - public long getFilePointer() { - return position; - } -} diff --git a/lucene/contrib/db/bdb-je/src/java/org/apache/lucene/store/je/JEIndexOutput.java b/lucene/contrib/db/bdb-je/src/java/org/apache/lucene/store/je/JEIndexOutput.java deleted file mode 100644 index 3c68f4617e0..00000000000 --- a/lucene/contrib/db/bdb-je/src/java/org/apache/lucene/store/je/JEIndexOutput.java +++ /dev/null @@ -1,147 +0,0 @@ -package org.apache.lucene.store.je; - -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import java.io.IOException; - -import org.apache.lucene.store.IndexOutput; - -/** - * Port of Andi Vajda's DbDirectory to Java Edition of Berkeley Database - * - */ - -public class JEIndexOutput extends IndexOutput { - - /** - * The size of data blocks, currently 16k (2^14), is determined by this - * constant. - */ - static public final int BLOCK_SHIFT = 14; - - static public final int BLOCK_LEN = 1 << BLOCK_SHIFT; - - static public final int BLOCK_MASK = BLOCK_LEN - 1; - - protected long position = 0L, length = 0L; - - protected JEDirectory directory; - - protected Block block; - - protected File file; - - protected JEIndexOutput(JEDirectory directory, String name, boolean create) - throws IOException { - super(); - - this.directory = directory; - - file = new File(directory, name, create); - block = new Block(file); - length = file.getLength(); - - seek(length); - block.get(directory); - - directory.openFiles.add(this); - } - - @Override - public void close() throws IOException { - flush(); - file.modify(directory, length, System.currentTimeMillis()); - - directory.openFiles.remove(this); - } - - @Override - public void flush() throws IOException { - if (length > 0) - block.put(directory); - } - - @Override - public void writeByte(byte b) throws IOException { - int blockPos = (int) (position++ & BLOCK_MASK); - - block.getData()[blockPos] = b; - - if (blockPos + 1 == BLOCK_LEN) { - block.put(directory); - block.seek(position); - block.get(directory); - } - - if (position > length) - length = position; - } - - @Override - public void writeBytes(byte[] b, int offset, int len) throws IOException { - int blockPos = (int) (position & BLOCK_MASK); - - while (blockPos + len >= BLOCK_LEN) { - int blockLen = BLOCK_LEN - blockPos; - - System.arraycopy(b, offset, block.getData(), blockPos, blockLen); - block.put(directory); - - len -= blockLen; - offset += blockLen; - position += blockLen; - - block.seek(position); - block.get(directory); - blockPos = 0; - } - - if (len > 0) { - System.arraycopy(b, offset, block.getData(), blockPos, len); - position += len; - } - - if (position > length) - length = position; - } - - @Override - public long length() throws IOException { - return length; - } - - @Override - public void seek(long pos) throws IOException { - if (pos > length) - throw new IOException("seeking past end of file"); - - if ((pos >>> BLOCK_SHIFT) == (position >>> BLOCK_SHIFT)) - position = pos; - else { - block.put(directory); - block.seek(pos); - block.get(directory); - position = pos; - } - } - - @Override - public long getFilePointer() { - return position; - } -} diff --git a/lucene/contrib/db/bdb-je/src/java/org/apache/lucene/store/je/JELock.java b/lucene/contrib/db/bdb-je/src/java/org/apache/lucene/store/je/JELock.java deleted file mode 100644 index 24a61054bc8..00000000000 --- a/lucene/contrib/db/bdb-je/src/java/org/apache/lucene/store/je/JELock.java +++ /dev/null @@ -1,53 +0,0 @@ -package org.apache.lucene.store.je; - -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import org.apache.lucene.store.Lock; - -/** - * Port of Andi Vajda's DbDirectory to Java Edition of Berkeley Database - * - */ - -public class JELock extends Lock { - - boolean isLocked = false; - - public JELock() - { - } - - @Override - public boolean obtain() - { - return (isLocked = true); - } - - @Override - public void release() - { - isLocked = false; - } - - @Override - public boolean isLocked() - { - return isLocked; - } -} - diff --git a/lucene/contrib/db/bdb-je/src/java/org/apache/lucene/store/je/package.html b/lucene/contrib/db/bdb-je/src/java/org/apache/lucene/store/je/package.html deleted file mode 100644 index 21d0a39cbe2..00000000000 --- a/lucene/contrib/db/bdb-je/src/java/org/apache/lucene/store/je/package.html +++ /dev/null @@ -1,22 +0,0 @@ - - - - -Berkeley DB Java Edition based implementation of {@link org.apache.lucene.store.Directory Directory}. - - diff --git a/lucene/contrib/db/bdb-je/src/java/overview.html b/lucene/contrib/db/bdb-je/src/java/overview.html deleted file mode 100644 index a74a2002e62..00000000000 --- a/lucene/contrib/db/bdb-je/src/java/overview.html +++ /dev/null @@ -1,26 +0,0 @@ - - - - - bdb-je - - - - bdb-je - - \ No newline at end of file diff --git a/lucene/contrib/db/bdb-je/src/test/org/apache/lucene/store/je/JEStoreTest.java b/lucene/contrib/db/bdb-je/src/test/org/apache/lucene/store/je/JEStoreTest.java deleted file mode 100644 index 827c5e34411..00000000000 --- a/lucene/contrib/db/bdb-je/src/test/org/apache/lucene/store/je/JEStoreTest.java +++ /dev/null @@ -1,636 +0,0 @@ -package org.apache.lucene.store.je; - -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import java.io.File; -import java.io.IOException; -import java.util.Arrays; -import java.util.Date; -import java.util.Random; - -import org.apache.lucene.store.Directory; -import org.apache.lucene.store.IndexInput; -import org.apache.lucene.store.IndexOutput; -import org.apache.lucene.util.LuceneTestCase; - -import com.sleepycat.je.Cursor; -import com.sleepycat.je.Database; -import com.sleepycat.je.DatabaseConfig; -import com.sleepycat.je.DatabaseEntry; -import com.sleepycat.je.DatabaseException; -import com.sleepycat.je.Environment; -import com.sleepycat.je.EnvironmentConfig; -import com.sleepycat.je.LockMode; -import com.sleepycat.je.OperationStatus; -import com.sleepycat.je.Transaction; - -/** - * Tests {@link JEDirectory}. - * - * Adapted from Andi Vajda's org.apache.lucene.db.DbStoreTest. - * - */ -public class JEStoreTest extends LuceneTestCase { - protected File dbHome = new File(TEMP_DIR,"index"); - - protected Environment env; - - protected Database index, blocks; - - @Override - public void setUp() throws Exception { - super.setUp(); - - if (!dbHome.exists()) - dbHome.mkdir(); - else { - File[] files = dbHome.listFiles(); - - for (int i = 0; i < files.length; i++) { - String name = files[i].getName(); - if (name.endsWith("jdb") || name.equals("je.lck")) - files[i].delete(); - } - } - - EnvironmentConfig envConfig = new EnvironmentConfig(); - DatabaseConfig dbConfig = new DatabaseConfig(); - - envConfig.setTransactional(true); - envConfig.setAllowCreate(true); - dbConfig.setAllowCreate(true); - dbConfig.setTransactional(true); - - env = new Environment(dbHome, envConfig); - - Transaction txn = null; - - try { - txn = env.beginTransaction(null, null); - index = env.openDatabase(txn, "__index__", dbConfig); - blocks = env.openDatabase(txn, "__blocks__", dbConfig); - } catch (DatabaseException e) { - if (txn != null) { - txn.abort(); - txn = null; - } - index = null; - blocks = null; - throw e; - } finally { - if (txn != null) - txn.commit(); - txn = null; - } - } - - @Override - public void tearDown() throws Exception { - - if (index != null) - index.close(); - if (blocks != null) - blocks.close(); - if (env != null) - env.close(); - super.tearDown(); - } - - public void testBytes() throws Exception { - final int count = 250; - final int LENGTH_MASK = 0xffff; - - Random r = random; - final long seed = r.nextLong(); - Random gen = new Random(seed); - int totalLength = 0; - int duration; - Date end; - - Date veryStart = new Date(); - Date start = new Date(); - Transaction txn = env.beginTransaction(null, null); - Directory store = null; - - if (VERBOSE) System.out.println("Writing files byte by byte"); - - try { - store = new JEDirectory(txn, index, blocks); - - for (int i = 0; i < count; i++) { - String name = i + ".dat"; - int length = gen.nextInt() & LENGTH_MASK; - IndexOutput file = store.createOutput(name); - - totalLength += length; - - for (int j = 0; j < length; j++) { - byte b = (byte) (gen.nextInt() & 0x7F); - file.writeByte(b); - } - - file.close(); - } - } catch (IOException e) { - txn.abort(); - txn = null; - throw e; - } finally { - if (txn != null) - txn.commit(); - - store.close(); - } - - end = new Date(); - - if (VERBOSE) { - duration = (int) (end.getTime() - start.getTime()); - System.out.print(duration); - System.out.print(" total milliseconds to create, "); - System.out.print(totalLength / duration); - System.out.println(" kb/s"); - } - - try { - txn = env.beginTransaction(null, null); - store = new JEDirectory(txn, index, blocks); - - gen = new Random(seed); - start = new Date(); - - for (int i = 0; i < count; i++) { - String name = i + ".dat"; - int length = gen.nextInt() & LENGTH_MASK; - IndexInput file = store.openInput(name); - - if (file.length() != length) - throw new Exception("length incorrect"); - - for (int j = 0; j < length; j++) { - byte b = (byte) (gen.nextInt() & 0x7F); - - if (file.readByte() != b) - throw new Exception("contents incorrect"); - } - - file.close(); - } - } catch (IOException e) { - txn.abort(); - txn = null; - throw e; - } catch (DatabaseException e) { - if (txn != null) { - txn.abort(); - txn = null; - } - throw e; - } finally { - if (txn != null) - txn.commit(); - - store.close(); - } - - end = new Date(); - - if (VERBOSE) { - duration = (int) (end.getTime() - start.getTime()); - System.out.print(duration); - System.out.print(" total milliseconds to read, "); - System.out.print(totalLength / duration); - System.out.println(" kb/s"); - } - - try { - txn = env.beginTransaction(null, null); - store = new JEDirectory(txn, index, blocks); - - gen = new Random(seed); - start = new Date(); - - for (int i = 0; i < count; i++) { - String name = i + ".dat"; - store.deleteFile(name); - } - } catch (IOException e) { - txn.abort(); - txn = null; - throw e; - } catch (DatabaseException e) { - if (txn != null) { - txn.abort(); - txn = null; - } - throw e; - } finally { - if (txn != null) - txn.commit(); - - store.close(); - } - - end = new Date(); - - if (VERBOSE) { - System.out.print(end.getTime() - start.getTime()); - System.out.println(" total milliseconds to delete"); - - System.out.print(end.getTime() - veryStart.getTime()); - System.out.println(" total milliseconds"); - } - } - - public void testDelete() throws Exception { - final int count = 250; - final int LENGTH_MASK = 0xffff; - - Random r = random; - final long seed = r.nextLong(); - Random gen = new Random(seed); - int totalLength = 0; - int duration; - Date end; - - Date veryStart = new Date(); - Date start = new Date(); - Transaction txn = env.beginTransaction(null, null); - Directory store = null; - - if (VERBOSE) System.out.println("Writing files byte by byte"); - - try { - store = new JEDirectory(txn, index, blocks); - - for (int i = 0; i < count; i++) { - String name = i + ".dat"; - int length = gen.nextInt() & LENGTH_MASK; - IndexOutput file = store.createOutput(name); - - totalLength += length; - - for (int j = 0; j < length; j++) { - byte b = (byte) (gen.nextInt() & 0x7F); - file.writeByte(b); - } - - file.close(); - } - } catch (IOException e) { - txn.abort(); - txn = null; - throw e; - } finally { - if (txn != null) - txn.commit(); - - store.close(); - } - - end = new Date(); - - if (VERBOSE) { - duration = (int) (end.getTime() - start.getTime()); - System.out.print(duration); - System.out.print(" total milliseconds to read, "); - System.out.print(totalLength / duration); - System.out.println(" kb/s"); - } - - try { - txn = env.beginTransaction(null, null); - store = new JEDirectory(txn, index, blocks); - - gen = new Random(seed); - start = new Date(); - - for (int i = 0; i < count; i++) { - if (i % 2 == 0) { - String name = i + ".dat"; - store.deleteFile(name); - } - } - } catch (IOException e) { - txn.abort(); - txn = null; - throw e; - } catch (DatabaseException e) { - if (txn != null) { - txn.abort(); - txn = null; - } - throw e; - } finally { - if (txn != null) - txn.commit(); - - store.close(); - } - - end = new Date(); - - if (VERBOSE) { - System.out.print(end.getTime() - start.getTime()); - System.out.println(" total milliseconds to delete even files"); - - duration = (int) (end.getTime() - start.getTime()); - System.out.print(duration); - System.out.print(" total milliseconds to create, "); - System.out.print(totalLength / duration); - System.out.println(" kb/s"); - } - - try { - txn = env.beginTransaction(null, null); - store = new JEDirectory(txn, index, blocks); - - gen = new Random(seed); - start = new Date(); - - for (int i = 0; i < count; i++) { - int length = gen.nextInt() & LENGTH_MASK; - - if (i % 2 != 0) { - String name = i + ".dat"; - IndexInput file = store.openInput(name); - if (file.length() != length) - throw new Exception("length incorrect"); - - for (int j = 0; j < length; j++) { - byte b = (byte) (gen.nextInt() & 0x7F); - - if (file.readByte() != b) - throw new Exception("contents incorrect"); - } - - file.close(); - } else { - for (int j = 0; j < length; j++) { - gen.nextInt(); - } - } - } - } catch (IOException e) { - txn.abort(); - txn = null; - throw e; - } catch (DatabaseException e) { - if (txn != null) { - txn.abort(); - txn = null; - } - throw e; - } finally { - if (txn != null) - txn.commit(); - - store.close(); - } - - end = new Date(); - - if (VERBOSE) { - duration = (int) (end.getTime() - start.getTime()); - System.out.print(duration); - System.out.print(" total milliseconds to read, "); - System.out.print(totalLength / duration); - System.out.println(" kb/s"); - } - - try { - txn = env.beginTransaction(null, null); - store = new JEDirectory(txn, index, blocks); - - gen = new Random(seed); - start = new Date(); - - for (int i = 0; i < count; i++) { - if (i % 2 != 0) { - String name = i + ".dat"; - store.deleteFile(name); - } - } - - } catch (IOException e) { - txn.abort(); - txn = null; - throw e; - } catch (DatabaseException e) { - if (txn != null) { - txn.abort(); - txn = null; - } - throw e; - } finally { - if (txn != null) - txn.commit(); - - store.close(); - } - - end = new Date(); - - if (VERBOSE) { - System.out.print(end.getTime() - start.getTime()); - System.out.println(" total milliseconds to delete"); - - System.out.print(end.getTime() - veryStart.getTime()); - System.out.println(" total milliseconds"); - } - - Cursor cursor = null; - try { - cursor = index.openCursor(null, null); - - DatabaseEntry foundKey = new DatabaseEntry(); - DatabaseEntry foundData = new DatabaseEntry(); - - if (cursor.getNext(foundKey, foundData, LockMode.DEFAULT) == OperationStatus.SUCCESS) { - fail("index database is not empty"); - } - } catch (DatabaseException e) { - throw e; - } finally { - if (cursor != null) - cursor.close(); - } - - cursor = null; - try { - cursor = blocks.openCursor(null, null); - - DatabaseEntry foundKey = new DatabaseEntry(); - DatabaseEntry foundData = new DatabaseEntry(); - - if (cursor.getNext(foundKey, foundData, LockMode.DEFAULT) == OperationStatus.SUCCESS) { - fail("blocks database is not empty"); - } - } catch (DatabaseException e) { - throw e; - } finally { - if (cursor != null) - cursor.close(); - } - } - - public void testArrays() throws Exception { - final int count = 250; - final int LENGTH_MASK = 0xffff; - - Random r = random; - final long seed = r.nextLong(); - Random gen = new Random(seed); - int totalLength = 0; - int duration; - Date end; - - Date veryStart = new Date(); - Date start = new Date(); - Transaction txn = env.beginTransaction(null, null); - Directory store = null; - - if (VERBOSE) System.out.println("Writing files as one byte array"); - - try { - store = new JEDirectory(txn, index, blocks); - - for (int i = 0; i < count; i++) { - String name = i + ".dat"; - int length = gen.nextInt() & LENGTH_MASK; - IndexOutput file = store.createOutput(name); - byte[] data = new byte[length]; - - totalLength += length; - gen.nextBytes(data); - file.writeBytes(data, length); - - file.close(); - } - } catch (IOException e) { - txn.abort(); - txn = null; - throw e; - } finally { - if (txn != null) - txn.commit(); - - store.close(); - } - - end = new Date(); - - if (VERBOSE) { - duration = (int) (end.getTime() - start.getTime()); - System.out.print(duration); - System.out.print(" total milliseconds to create, "); - System.out.print(totalLength / duration); - System.out.println(" kb/s"); - } - - try { - txn = env.beginTransaction(null, null); - store = new JEDirectory(txn, index, blocks); - - gen = new Random(seed); - start = new Date(); - - for (int i = 0; i < count; i++) { - String name = i + ".dat"; - int length = gen.nextInt() & LENGTH_MASK; - IndexInput file = store.openInput(name); - - if (file.length() != length) - throw new Exception("length incorrect"); - - byte[] data = new byte[length]; - byte[] read = new byte[length]; - gen.nextBytes(data); - file.readBytes(read, 0, length); - - if (!Arrays.equals(data, read)) - throw new Exception("contents incorrect"); - - file.close(); - } - } catch (IOException e) { - txn.abort(); - txn = null; - throw e; - } catch (DatabaseException e) { - if (txn != null) { - txn.abort(); - txn = null; - } - throw e; - } finally { - if (txn != null) - txn.commit(); - - store.close(); - } - - end = new Date(); - - if (VERBOSE) { - duration = (int) (end.getTime() - start.getTime()); - System.out.print(duration); - System.out.print(" total milliseconds to read, "); - System.out.print(totalLength / duration); - System.out.println(" kb/s"); - } - - try { - txn = env.beginTransaction(null, null); - store = new JEDirectory(txn, index, blocks); - - gen = new Random(seed); - start = new Date(); - for (int i = 0; i < count; i++) { - String name = i + ".dat"; - store.deleteFile(name); - } - - } catch (IOException e) { - txn.abort(); - txn = null; - throw e; - } catch (DatabaseException e) { - if (txn != null) { - txn.abort(); - txn = null; - } - throw e; - } finally { - if (txn != null) - txn.commit(); - - store.close(); - } - - end = new Date(); - - if (VERBOSE) { - System.out.print(end.getTime() - start.getTime()); - System.out.println(" total milliseconds to delete"); - - System.out.print(end.getTime() - veryStart.getTime()); - System.out.println(" total milliseconds"); - } - } -} diff --git a/lucene/contrib/db/bdb/build.xml b/lucene/contrib/db/bdb/build.xml deleted file mode 100644 index 4f7c74207f0..00000000000 --- a/lucene/contrib/db/bdb/build.xml +++ /dev/null @@ -1,77 +0,0 @@ - - - - - - Lucene Berkeley DB integration - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Unit Tests Skipped: Could not sanity check Native Library - - - - - - - - - diff --git a/lucene/contrib/db/bdb/lib/db--NOTICE-FAKE.txt b/lucene/contrib/db/bdb/lib/db--NOTICE-FAKE.txt deleted file mode 100644 index a1defaa3da4..00000000000 --- a/lucene/contrib/db/bdb/lib/db--NOTICE-FAKE.txt +++ /dev/null @@ -1 +0,0 @@ -No bdb jars are shipped with lucene. This is a fake license to work around the automated license checking. diff --git a/lucene/contrib/db/bdb/lib/db-4.7.25.jar b/lucene/contrib/db/bdb/lib/db-4.7.25.jar deleted file mode 100644 index fedd3e2adf2..00000000000 --- a/lucene/contrib/db/bdb/lib/db-4.7.25.jar +++ /dev/null @@ -1,2 +0,0 @@ -AnyObjectId[99baf20bacd712cae91dd6e4e1f46224cafa1a37] was removed in git history. -Apache SVN contains full history. \ No newline at end of file diff --git a/lucene/contrib/db/bdb/lib/db-LICENSE-FAKE.txt b/lucene/contrib/db/bdb/lib/db-LICENSE-FAKE.txt deleted file mode 100644 index a1defaa3da4..00000000000 --- a/lucene/contrib/db/bdb/lib/db-LICENSE-FAKE.txt +++ /dev/null @@ -1 +0,0 @@ -No bdb jars are shipped with lucene. This is a fake license to work around the automated license checking. diff --git a/lucene/contrib/db/bdb/src/java/com/sleepycat/db/DbHandleExtractor.java b/lucene/contrib/db/bdb/src/java/com/sleepycat/db/DbHandleExtractor.java deleted file mode 100644 index b86900c17b5..00000000000 --- a/lucene/contrib/db/bdb/src/java/com/sleepycat/db/DbHandleExtractor.java +++ /dev/null @@ -1,50 +0,0 @@ -package com.sleepycat.db; - -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import com.sleepycat.db.internal.Db; -import com.sleepycat.db.internal.DbTxn; - - -/** - * This class is a hack to workaround the need to rewrite the entire - * org.apache.lucene.store.db package after Sleepycat radically changed its - * Java API from version 4.2.52 to version 4.3.21. - * - * The code below extracts the package-accessible internal handle instances - * that were the entrypoint objects in the pre-4.3 Java API and that wrap the - * actual Berkeley DB C objects via SWIG. - * - */ - -public class DbHandleExtractor { - - private DbHandleExtractor() - { - } - - static public Db getDb(Database database) - { - return database.db; - } - - static public DbTxn getDbTxn(Transaction transaction) - { - return transaction.txn; - } -} diff --git a/lucene/contrib/db/bdb/src/java/org/apache/lucene/store/db/Block.java b/lucene/contrib/db/bdb/src/java/org/apache/lucene/store/db/Block.java deleted file mode 100644 index bd04512adf6..00000000000 --- a/lucene/contrib/db/bdb/src/java/org/apache/lucene/store/db/Block.java +++ /dev/null @@ -1,92 +0,0 @@ -package org.apache.lucene.store.db; - -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import java.io.IOException; - -import com.sleepycat.db.DatabaseEntry; -import com.sleepycat.db.DatabaseException; - - - -public class Block extends Object { - protected DatabaseEntry key, data; - - protected Block(File file) - throws IOException - { - byte[] fileKey = file.getKey(); - - key = new DatabaseEntry(new byte[fileKey.length + 8]); - key.setUserBuffer(fileKey.length + 8, true); - - data = new DatabaseEntry(new byte[DbIndexOutput.BLOCK_LEN]); - data.setUserBuffer(data.getSize(), true); - - System.arraycopy(fileKey, 0, key.getData(), 0, fileKey.length); - seek(0L); - } - - protected byte[] getKey() - { - return key.getData(); - } - - protected byte[] getData() - { - return data.getData(); - } - - protected void seek(long position) - throws IOException - { - byte[] data = key.getData(); - int index = data.length - 8; - - position >>>= DbIndexOutput.BLOCK_SHIFT; - - data[index + 0] = (byte) (0xff & (position >>> 56)); - data[index + 1] = (byte) (0xff & (position >>> 48)); - data[index + 2] = (byte) (0xff & (position >>> 40)); - data[index + 3] = (byte) (0xff & (position >>> 32)); - data[index + 4] = (byte) (0xff & (position >>> 24)); - data[index + 5] = (byte) (0xff & (position >>> 16)); - data[index + 6] = (byte) (0xff & (position >>> 8)); - data[index + 7] = (byte) (0xff & (position >>> 0)); - } - - protected void get(DbDirectory directory) - throws IOException - { - try { - directory.blocks.get(directory.txn, key, data, directory.flags); - } catch (DatabaseException e) { - throw new IOException(e.getMessage()); - } - } - - protected void put(DbDirectory directory) - throws IOException - { - try { - directory.blocks.put(directory.txn, key, data, 0); - } catch (DatabaseException e) { - throw new IOException(e.getMessage()); - } - } -} diff --git a/lucene/contrib/db/bdb/src/java/org/apache/lucene/store/db/DbDirectory.java b/lucene/contrib/db/bdb/src/java/org/apache/lucene/store/db/DbDirectory.java deleted file mode 100644 index 2e84262855e..00000000000 --- a/lucene/contrib/db/bdb/src/java/org/apache/lucene/store/db/DbDirectory.java +++ /dev/null @@ -1,246 +0,0 @@ -package org.apache.lucene.store.db; - -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import java.io.FileNotFoundException; -import java.io.IOException; -import java.io.ByteArrayInputStream; -import java.io.DataInputStream; -import java.util.*; - -import org.apache.lucene.store.Directory; -import org.apache.lucene.store.Lock; -import org.apache.lucene.store.IndexOutput; -import org.apache.lucene.store.IndexInput; - -import com.sleepycat.db.internal.Db; -import com.sleepycat.db.internal.DbConstants; -import com.sleepycat.db.DatabaseEntry; -import com.sleepycat.db.internal.Dbc; -import com.sleepycat.db.internal.DbTxn; -import com.sleepycat.db.DatabaseException; - -import com.sleepycat.db.Database; -import com.sleepycat.db.Transaction; -import com.sleepycat.db.DbHandleExtractor; - -/** - * A DbDirectory is a Berkeley DB 4.3 based implementation of - * {@link org.apache.lucene.store.Directory Directory}. It uses two - * {@link com.sleepycat.db.internal.Db Db} database handles, one for storing file - * records and another for storing file data blocks. - * - */ - -public class DbDirectory extends Directory { - - protected Set openFiles = Collections.synchronizedSet(new HashSet()); - protected Db files, blocks; - protected DbTxn txn; - protected int flags; - - /** - * Instantiate a DbDirectory. The same threading rules that apply to - * Berkeley DB handles apply to instances of DbDirectory. - * - * @param txn a transaction handle that is going to be used for all db - * operations done by this instance. This parameter may be - * null. - * @param files a db handle to store file records. - * @param blocks a db handle to store file data blocks. - * @param flags flags used for db read operations. - */ - - public DbDirectory(DbTxn txn, Db files, Db blocks, int flags) - { - super(); - - this.txn = txn; - this.files = files; - this.blocks = blocks; - this.flags = flags; - } - - public DbDirectory(Transaction txn, Database files, Database blocks, - int flags) - { - super(); - - this.txn = txn != null ? DbHandleExtractor.getDbTxn(txn) : null; - this.files = DbHandleExtractor.getDb(files); - this.blocks = DbHandleExtractor.getDb(blocks); - this.flags = flags; - } - - public DbDirectory(Transaction txn, Database files, Database blocks) - { - this(txn, files, blocks, 0); - } - - @Override - public void close() - throws IOException - { - flush(); - } - - /** - * Flush the currently open files. After they have been flushed it is - * safe to commit the transaction without closing this DbDirectory - * instance first. - * @see #setTransaction - */ - public void flush() - throws IOException - { - Iterator iterator = openFiles.iterator(); - - while (iterator.hasNext()) - iterator.next().flush(); - } - - @Override - public IndexOutput createOutput(String name) - throws IOException - { - return new DbIndexOutput(this, name, true); - } - - @Override - public void deleteFile(String name) - throws IOException - { - new File(name).delete(this); - } - - @Override - public boolean fileExists(String name) - throws IOException - { - return new File(name).exists(this); - } - - @Override - public long fileLength(String name) throws IOException { - File file = new File(name); - - if (file.exists(this)) - return file.getLength(); - - throw new FileNotFoundException(name); - } - - @Override - public long fileModified(String name) - throws IOException - { - File file = new File(name); - - if (file.exists(this)) - return file.getTimeModified(); - - throw new IOException("File does not exist: " + name); - } - - @Override - public String[] listAll() - throws IOException - { - Dbc cursor = null; - List list = new ArrayList(); - - try { - try { - DatabaseEntry key = new DatabaseEntry(new byte[0]); - DatabaseEntry data = new DatabaseEntry((byte[]) null); - - data.setPartial(true); - - cursor = files.cursor(txn, flags); - - if (cursor.get(key, data, - DbConstants.DB_SET_RANGE | flags) != DbConstants.DB_NOTFOUND) - { - ByteArrayInputStream buffer = - new ByteArrayInputStream(key.getData()); - DataInputStream in = new DataInputStream(buffer); - String name = in.readUTF(); - - in.close(); - list.add(name); - - while (cursor.get(key, data, - DbConstants.DB_NEXT | flags) != DbConstants.DB_NOTFOUND) { - buffer = new ByteArrayInputStream(key.getData()); - in = new DataInputStream(buffer); - name = in.readUTF(); - in.close(); - - list.add(name); - } - } - } finally { - if (cursor != null) - cursor.close(); - } - } catch (DatabaseException e) { - throw new IOException(e.getMessage()); - } - - return list.toArray(new String[list.size()]); - } - - @Override - public void sync(Collection names) throws IOException { - } - - @Override - public IndexInput openInput(String name) - throws IOException - { - return new DbIndexInput(this, name); - } - - @Override - public Lock makeLock(String name) - { - return new DbLock(); - } - - /** - * Once a transaction handle was committed it is no longer valid. In - * order to continue using this DbDirectory instance after a commit, the - * transaction handle has to be replaced. - * @param txn the new transaction handle to use - */ - public void setTransaction(Transaction txn) - { - setTransaction(txn != null ? DbHandleExtractor.getDbTxn(txn) : null); - } - - /** - * Once a transaction handle was committed it is no longer valid. In - * order to continue using this DbDirectory instance after a commit, the - * transaction handle has to be replaced. - * @param txn the new transaction handle to use - */ - public void setTransaction(DbTxn txn) - { - this.txn = txn; - } -} diff --git a/lucene/contrib/db/bdb/src/java/org/apache/lucene/store/db/DbIndexInput.java b/lucene/contrib/db/bdb/src/java/org/apache/lucene/store/db/DbIndexInput.java deleted file mode 100644 index b41203ea0c0..00000000000 --- a/lucene/contrib/db/bdb/src/java/org/apache/lucene/store/db/DbIndexInput.java +++ /dev/null @@ -1,152 +0,0 @@ -package org.apache.lucene.store.db; - -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import java.io.IOException; -import org.apache.lucene.store.IndexInput; - - - - -public class DbIndexInput extends IndexInput { - - protected long position = 0L, length = 0L; - protected DbDirectory directory; - protected Block block; - protected File file; - - protected DbIndexInput(DbDirectory directory, String name) - throws IOException - { - super(); - - this.directory = directory; - - this.file = new File(name); - if (!file.exists(directory)) - throw new IOException("File does not exist: " + name); - - length = file.getLength(); - - block = new Block(file); - block.get(directory); - } - - @Override - public Object clone() - { - try { - DbIndexInput clone = (DbIndexInput) super.clone(); - - clone.block = new Block(file); - clone.block.seek(position); - clone.block.get(directory); - - return clone; - } catch (IOException e) { - throw new RuntimeException(e.getMessage()); - } - } - - @Override - public void close() - throws IOException - { - } - - @Override - public long length() - { - return length; - } - - @Override - public byte readByte() - throws IOException - { - if (position + 1 > length) - throw new IOException("Reading past end of file"); - - int blockPos = (int) (position++ & DbIndexOutput.BLOCK_MASK); - byte b = block.getData()[blockPos]; - - if (blockPos + 1 == DbIndexOutput.BLOCK_LEN) - { - block.seek(position); - block.get(directory); - } - - return b; - } - - @Override - public void readBytes(byte[] b, int offset, int len) - throws IOException - { - if (position + len > length) - throw new IOException("Reading past end of file"); - else - { - int blockPos = (int) (position & DbIndexOutput.BLOCK_MASK); - - while (blockPos + len >= DbIndexOutput.BLOCK_LEN) { - int blockLen = DbIndexOutput.BLOCK_LEN - blockPos; - - System.arraycopy(block.getData(), blockPos, - b, offset, blockLen); - - len -= blockLen; - offset += blockLen; - position += blockLen; - - block.seek(position); - block.get(directory); - blockPos = 0; - } - - if (len > 0) - { - System.arraycopy(block.getData(), blockPos, b, offset, len); - position += len; - } - } - } - - @Override - public void seek(long pos) - throws IOException - { - if (pos > length) - throw new IOException("seeking past end of file"); - - if ((pos >>> DbIndexOutput.BLOCK_SHIFT) != - (position >>> DbIndexOutput.BLOCK_SHIFT)) - { - block.seek(pos); - block.get(directory); - } - - position = pos; - } - - @Override - public long getFilePointer() - { - return position; - } -} diff --git a/lucene/contrib/db/bdb/src/java/org/apache/lucene/store/db/DbIndexOutput.java b/lucene/contrib/db/bdb/src/java/org/apache/lucene/store/db/DbIndexOutput.java deleted file mode 100644 index 5e3a9e10a56..00000000000 --- a/lucene/contrib/db/bdb/src/java/org/apache/lucene/store/db/DbIndexOutput.java +++ /dev/null @@ -1,156 +0,0 @@ -package org.apache.lucene.store.db; - -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import java.io.IOException; -import org.apache.lucene.store.IndexOutput; - - - - -public class DbIndexOutput extends IndexOutput { - - /** - * The size of data blocks, currently 16k (2^14), is determined by this - * constant. - */ - static public final int BLOCK_SHIFT = 14; - static public final int BLOCK_LEN = 1 << BLOCK_SHIFT; - static public final int BLOCK_MASK = BLOCK_LEN - 1; - - protected long position = 0L, length = 0L; - protected DbDirectory directory; - protected Block block; - protected File file; - - protected DbIndexOutput(DbDirectory directory, String name, boolean create) - throws IOException - { - super(); - - this.directory = directory; - - file = new File(directory, name, create); - block = new Block(file); - length = file.getLength(); - - seek(length); - block.get(directory); - - directory.openFiles.add(this); - } - - @Override - public void close() - throws IOException - { - flush(); - file.modify(directory, length, System.currentTimeMillis()); - - directory.openFiles.remove(this); - } - - @Override - public void flush() - throws IOException - { - if (length > 0) - block.put(directory); - } - - @Override - public void writeByte(byte b) - throws IOException - { - int blockPos = (int) (position++ & BLOCK_MASK); - - block.getData()[blockPos] = b; - - if (blockPos + 1 == BLOCK_LEN) - { - block.put(directory); - block.seek(position); - block.get(directory); - } - - if (position > length) - length = position; - } - - @Override - public void writeBytes(byte[] b, int offset, int len) - throws IOException - { - int blockPos = (int) (position & BLOCK_MASK); - - while (blockPos + len >= BLOCK_LEN) { - int blockLen = BLOCK_LEN - blockPos; - - System.arraycopy(b, offset, block.getData(), blockPos, blockLen); - block.put(directory); - - len -= blockLen; - offset += blockLen; - position += blockLen; - - block.seek(position); - block.get(directory); - blockPos = 0; - } - - if (len > 0) - { - System.arraycopy(b, offset, block.getData(), blockPos, len); - position += len; - } - - if (position > length) - length = position; - } - - @Override - public long length() - throws IOException - { - return length; - } - - @Override - public void seek(long pos) - throws IOException - { - if (pos > length) - throw new IOException("seeking past end of file"); - - if ((pos >>> BLOCK_SHIFT) == (position >>> BLOCK_SHIFT)) - position = pos; - else - { - block.put(directory); - block.seek(pos); - block.get(directory); - position = pos; - } - } - - @Override - public long getFilePointer() - { - return position; - } -} diff --git a/lucene/contrib/db/bdb/src/java/org/apache/lucene/store/db/DbLock.java b/lucene/contrib/db/bdb/src/java/org/apache/lucene/store/db/DbLock.java deleted file mode 100644 index 7c6c2c1b6cb..00000000000 --- a/lucene/contrib/db/bdb/src/java/org/apache/lucene/store/db/DbLock.java +++ /dev/null @@ -1,55 +0,0 @@ -package org.apache.lucene.store.db; - -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import org.apache.lucene.store.Lock; - -/** - * This implementation of {@link org.apache.lucene.store.Lock Lock} is - * trivial as {@link DbDirectory} operations are managed by the Berkeley DB - * locking system. - * - */ - -public class DbLock extends Lock { - - boolean isLocked = false; - - public DbLock() - { - } - - @Override - public boolean obtain() - { - return (isLocked = true); - } - - @Override - public void release() - { - isLocked = false; - } - - @Override - public boolean isLocked() - { - return isLocked; - } -} - diff --git a/lucene/contrib/db/bdb/src/java/org/apache/lucene/store/db/File.java b/lucene/contrib/db/bdb/src/java/org/apache/lucene/store/db/File.java deleted file mode 100644 index 15fa90cb3d9..00000000000 --- a/lucene/contrib/db/bdb/src/java/org/apache/lucene/store/db/File.java +++ /dev/null @@ -1,263 +0,0 @@ -package org.apache.lucene.store.db; - -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import java.io.IOException; -import java.io.ByteArrayOutputStream; -import java.io.DataOutputStream; -import java.io.ByteArrayInputStream; -import java.io.DataInputStream; -import java.util.Random; - -import com.sleepycat.db.DatabaseEntry; -import com.sleepycat.db.internal.DbConstants; -import com.sleepycat.db.internal.Dbc; -import com.sleepycat.db.internal.Db; -import com.sleepycat.db.internal.DbTxn; -import com.sleepycat.db.DatabaseException; - - - -public class File extends Object { - - static protected Random random = new Random(); - - protected DatabaseEntry key, data; - protected long length, timeModified; - protected String name; - protected byte[] uuid; - - protected File(String name) - throws IOException - { - setName(name); - - data = new DatabaseEntry(new byte[32]); - data.setUserBuffer(data.getSize(), true); - } - - protected File(DbDirectory directory, String name, boolean create) - throws IOException - { - this(name); - - if (!exists(directory)) - { - if (!create) - throw new IOException("File does not exist: " + name); - else - { - DatabaseEntry key = new DatabaseEntry(new byte[24]); - DatabaseEntry data = new DatabaseEntry((byte[]) null); - Db blocks = directory.blocks; - DbTxn txn = directory.txn; - int flags = directory.flags; - - key.setUserBuffer(24, true); - data.setPartial(true); - - uuid = new byte[16]; - - try { - do { - /* generate a v.4 random-uuid unique to this db */ - random.nextBytes(uuid); - uuid[6] = (byte) ((byte) 0x40 | - (uuid[6] & (byte) 0x0f)); - uuid[8] = (byte) ((byte) 0x80 | - (uuid[8] & (byte) 0x3f)); - System.arraycopy(uuid, 0, key.getData(), 0, 16); - } while (blocks.get(txn, key, data, - flags) != DbConstants.DB_NOTFOUND); - } catch (DatabaseException e) { - throw new IOException(e.getMessage()); - } - } - } - else if (create) - length = 0L; - } - - protected String getName() - { - return name; - } - - private void setName(String name) - throws IOException - { - ByteArrayOutputStream buffer = new ByteArrayOutputStream(128); - DataOutputStream out = new DataOutputStream(buffer); - - out.writeUTF(name); - out.close(); - - key = new DatabaseEntry(buffer.toByteArray()); - key.setUserBuffer(key.getSize(), true); - - this.name = name; - } - - protected byte[] getKey() - throws IOException - { - if (uuid == null) - throw new IOException("Uninitialized file"); - - return uuid; - } - - protected long getLength() - { - return length; - } - - protected long getTimeModified() - { - return timeModified; - } - - protected boolean exists(DbDirectory directory) - throws IOException - { - Db files = directory.files; - DbTxn txn = directory.txn; - int flags = directory.flags; - - try { - if (files.get(txn, key, data, flags) == DbConstants.DB_NOTFOUND) - return false; - } catch (DatabaseException e) { - throw new IOException(e.getMessage()); - } - - byte[] bytes = data.getData(); - ByteArrayInputStream buffer = new ByteArrayInputStream(bytes); - DataInputStream in = new DataInputStream(buffer); - - length = in.readLong(); - timeModified = in.readLong(); - in.close(); - - uuid = new byte[16]; - System.arraycopy(bytes, 16, uuid, 0, 16); - - return true; - } - - protected void modify(DbDirectory directory, long length, long timeModified) - throws IOException - { - ByteArrayOutputStream buffer = new ByteArrayOutputStream(32); - DataOutputStream out = new DataOutputStream(buffer); - Db files = directory.files; - DbTxn txn = directory.txn; - - out.writeLong(length); - out.writeLong(timeModified); - out.write(getKey()); - out.close(); - - System.arraycopy(buffer.toByteArray(), 0, data.getData(), 0, 32); - - try { - files.put(txn, key, data, 0); - } catch (DatabaseException e) { - throw new IOException(e.getMessage()); - } - - this.length = length; - this.timeModified = timeModified; - } - - protected void delete(DbDirectory directory) - throws IOException - { - if (!exists(directory)) - throw new IOException("File does not exist: " + getName()); - - Dbc cursor = null; - - try { - try { - byte[] bytes = getKey(); - int ulen = bytes.length + 8; - byte[] cursorBytes = new byte[ulen]; - DatabaseEntry cursorKey = new DatabaseEntry(cursorBytes); - DatabaseEntry cursorData = new DatabaseEntry((byte[]) null); - Db files = directory.files; - Db blocks = directory.blocks; - DbTxn txn = directory.txn; - int flags = directory.flags; - - System.arraycopy(bytes, 0, cursorBytes, 0, bytes.length); - cursorKey.setUserBuffer(ulen, true); - cursorData.setPartial(true); - - cursor = blocks.cursor(txn, flags); - - if (cursor.get(cursorKey, cursorData, - DbConstants.DB_SET_RANGE | flags) != DbConstants.DB_NOTFOUND) - { - cursor.del(0); - - outer: - while (cursor.get(cursorKey, cursorData, - DbConstants.DB_NEXT | flags) != DbConstants.DB_NOTFOUND) - { - for (int i = 0; i < bytes.length; i++) - if (bytes[i] != cursorBytes[i]) - break outer; - - cursor.del(0); - } - } - - files.del(txn, key, 0); - } finally { - if (cursor != null) - cursor.close(); - } - } catch (DatabaseException e) { - throw new IOException(e.getMessage()); - } - } - - protected void rename(DbDirectory directory, String name) - throws IOException - { - if (!exists(directory)) - throw new IOException("File does not exist: " + getName()); - - File newFile = new File(name); - - if (newFile.exists(directory)) - newFile.delete(directory); - - try { - Db files = directory.files; - DbTxn txn = directory.txn; - - files.del(txn, key, 0); - setName(name); - files.put(txn, key, data, 0); - } catch (DatabaseException e) { - throw new IOException(e.getMessage()); - } - } -} diff --git a/lucene/contrib/db/bdb/src/java/org/apache/lucene/store/db/package.html b/lucene/contrib/db/bdb/src/java/org/apache/lucene/store/db/package.html deleted file mode 100644 index 2d499a4e6f9..00000000000 --- a/lucene/contrib/db/bdb/src/java/org/apache/lucene/store/db/package.html +++ /dev/null @@ -1,22 +0,0 @@ - - - - -Berkeley DB 4.3 based implementation of {@link org.apache.lucene.store.Directory Directory}. - - diff --git a/lucene/contrib/db/bdb/src/java/overview.html b/lucene/contrib/db/bdb/src/java/overview.html deleted file mode 100644 index cdf0ea43327..00000000000 --- a/lucene/contrib/db/bdb/src/java/overview.html +++ /dev/null @@ -1,26 +0,0 @@ - - - - - bdb - - - - bdb - - \ No newline at end of file diff --git a/lucene/contrib/db/bdb/src/test/org/apache/lucene/store/db/DbStoreTest.java b/lucene/contrib/db/bdb/src/test/org/apache/lucene/store/db/DbStoreTest.java deleted file mode 100644 index 4ca79ef95ee..00000000000 --- a/lucene/contrib/db/bdb/src/test/org/apache/lucene/store/db/DbStoreTest.java +++ /dev/null @@ -1,413 +0,0 @@ -package org.apache.lucene.store.db; - -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import java.util.Date; -import java.util.Random; -import java.util.Arrays; - -import java.io.File; -import java.io.IOException; - -import com.sleepycat.db.EnvironmentConfig; -import com.sleepycat.db.Environment; -import com.sleepycat.db.Transaction; -import com.sleepycat.db.Database; -import com.sleepycat.db.DatabaseConfig; -import com.sleepycat.db.DatabaseType; -import com.sleepycat.db.DatabaseException; - -import org.apache.lucene.store.Directory; -import org.apache.lucene.store.IndexInput; -import org.apache.lucene.store.IndexOutput; -import org.apache.lucene.util.LuceneTestCase; - -/** - * Tests {@link DbDirectory}. - * - * Adapted from org.apache.lucene.StoreTest with larger files and random bytes. - */ -public class DbStoreTest extends LuceneTestCase { - protected File dbHome = new File(TEMP_DIR,"index"); - protected Environment env; - protected Database index, blocks; - - @Override - public void setUp() throws Exception { - super.setUp(); - if (!dbHome.exists()) - dbHome.mkdir(); - else - { - File[] files = dbHome.listFiles(); - - for (int i = 0; i < files.length; i++) { - String name = files[i].getName(); - if (name.startsWith("__") || name.startsWith("log.")) - files[i].delete(); - } - } - - EnvironmentConfig envConfig = new EnvironmentConfig(); - DatabaseConfig dbConfig = new DatabaseConfig(); - - envConfig.setTransactional(true); - envConfig.setInitializeCache(true); - envConfig.setInitializeLocking(true); - envConfig.setInitializeLogging(true); - envConfig.setAllowCreate(true); - envConfig.setThreaded(true); - dbConfig.setAllowCreate(true); - dbConfig.setType(DatabaseType.BTREE); - - env = new Environment(dbHome, envConfig); - - Transaction txn = null; - - try { - txn = env.beginTransaction(null, null); - index = env.openDatabase(txn, "__index__", null, dbConfig); - blocks = env.openDatabase(txn, "__blocks__", null, dbConfig); - } catch (DatabaseException e) { - if (txn != null) - { - txn.abort(); - txn = null; - } - index = null; - blocks = null; - throw e; - } finally { - if (txn != null) - txn.commit(); - txn = null; - } - } - - @Override - public void tearDown() throws Exception { - if (index != null) - index.close(); - if (blocks != null) - blocks.close(); - if (env != null) - env.close(); - super.tearDown(); - } - - public void testBytes() - throws Exception - { - final int count = 250; - final int LENGTH_MASK = 0xffff; - - Random r = random; - final long seed = r.nextLong(); - - Random gen = new Random(seed); - int totalLength = 0; - int duration; - Date end; - - Date veryStart = new Date(); - Date start = new Date(); - Transaction txn = env.beginTransaction(null, null); - Directory store = null; - - if (VERBOSE) System.out.println("Writing files byte by byte"); - - try { - store = new DbDirectory(txn, index, blocks); - - for (int i = 0; i < count; i++) { - String name = i + ".dat"; - int length = gen.nextInt() & LENGTH_MASK; - IndexOutput file = store.createOutput(name); - - totalLength += length; - - for (int j = 0; j < length; j++) { - byte b = (byte)(gen.nextInt() & 0x7F); - file.writeByte(b); - } - - file.close(); - } - } catch (IOException e) { - txn.abort(); - txn = null; - throw e; - } finally { - if (txn != null) - txn.commit(); - - store.close(); - } - - end = new Date(); - - if (VERBOSE) { - duration = (int) (end.getTime() - start.getTime()); - System.out.print(duration); - System.out.print(" total milliseconds to create, "); - System.out.print(totalLength / duration); - System.out.println(" kb/s"); - } - - try { - txn = env.beginTransaction(null, null); - store = new DbDirectory(txn, index, blocks); - - gen = new Random(seed); - start = new Date(); - - for (int i = 0; i < count; i++) { - String name = i + ".dat"; - int length = gen.nextInt() & LENGTH_MASK; - IndexInput file = store.openInput(name); - - if (file.length() != length) - throw new Exception("length incorrect"); - - for (int j = 0; j < length; j++) { - byte b = (byte)(gen.nextInt() & 0x7F); - - if (file.readByte() != b) - throw new Exception("contents incorrect"); - } - - file.close(); - } - } catch (IOException e) { - txn.abort(); - txn = null; - throw e; - } catch (DatabaseException e) { - if (txn != null) - { - txn.abort(); - txn = null; - } - throw e; - } finally { - if (txn != null) - txn.commit(); - - store.close(); - } - - end = new Date(); - - if (VERBOSE) { - duration = (int) (end.getTime() - start.getTime()); - System.out.print(duration); - System.out.print(" total milliseconds to read, "); - System.out.print(totalLength / duration); - System.out.println(" kb/s"); - } - - try { - txn = env.beginTransaction(null, null); - store = new DbDirectory(txn, index, blocks); - - gen = new Random(seed); - start = new Date(); - - for (int i = 0; i < count; i++) { - String name = i + ".dat"; - store.deleteFile(name); - } - } catch (IOException e) { - txn.abort(); - txn = null; - throw e; - } catch (DatabaseException e) { - if (txn != null) - { - txn.abort(); - txn = null; - } - throw e; - } finally { - if (txn != null) - txn.commit(); - - store.close(); - } - - end = new Date(); - - if (VERBOSE) { - System.out.print(end.getTime() - start.getTime()); - System.out.println(" total milliseconds to delete"); - - System.out.print(end.getTime() - veryStart.getTime()); - System.out.println(" total milliseconds"); - } - } - - public void testArrays() - throws Exception - { - final int count = 250; - final int LENGTH_MASK = 0xffff; - - Random r = random; - final long seed = r.nextLong(); - - Random gen = new Random(seed); - int totalLength = 0; - int duration; - Date end; - - Date veryStart = new Date(); - Date start = new Date(); - Transaction txn = env.beginTransaction(null, null); - Directory store = null; - - if (VERBOSE) System.out.println("Writing files as one byte array"); - - try { - store = new DbDirectory(txn, index, blocks); - - for (int i = 0; i < count; i++) { - String name = i + ".dat"; - int length = gen.nextInt() & LENGTH_MASK; - IndexOutput file = store.createOutput(name); - byte[] data = new byte[length]; - - totalLength += length; - gen.nextBytes(data); - file.writeBytes(data, length); - - file.close(); - } - } catch (IOException e) { - txn.abort(); - txn = null; - throw e; - } finally { - if (txn != null) - txn.commit(); - - store.close(); - } - - end = new Date(); - - if (VERBOSE) { - duration = (int) (end.getTime() - start.getTime()); - System.out.print(duration); - System.out.print(" total milliseconds to create, "); - System.out.print(totalLength / duration); - System.out.println(" kb/s"); - } - - try { - txn = env.beginTransaction(null, null); - store = new DbDirectory(txn, index, blocks); - - gen = new Random(seed); - start = new Date(); - - for (int i = 0; i < count; i++) { - String name = i + ".dat"; - int length = gen.nextInt() & LENGTH_MASK; - IndexInput file = store.openInput(name); - - if (file.length() != length) - throw new Exception("length incorrect"); - - byte[] data = new byte[length]; - byte[] read = new byte[length]; - gen.nextBytes(data); - file.readBytes(read, 0, length); - - if (!Arrays.equals(data, read)) - throw new Exception("contents incorrect"); - - file.close(); - } - } catch (IOException e) { - txn.abort(); - txn = null; - throw e; - } catch (DatabaseException e) { - if (txn != null) - { - txn.abort(); - txn = null; - } - throw e; - } finally { - if (txn != null) - txn.commit(); - - store.close(); - } - - end = new Date(); - - if (VERBOSE) { - duration = (int) (end.getTime() - start.getTime()); - System.out.print(duration); - System.out.print(" total milliseconds to read, "); - System.out.print(totalLength / duration); - System.out.println(" kb/s"); - } - - try { - txn = env.beginTransaction(null, null); - store = new DbDirectory(txn, index, blocks); - - gen = new Random(seed); - start = new Date(); - - for (int i = 0; i < count; i++) { - String name = i + ".dat"; - store.deleteFile(name); - } - } catch (IOException e) { - txn.abort(); - txn = null; - throw e; - } catch (DatabaseException e) { - if (txn != null) - { - txn.abort(); - txn = null; - } - throw e; - } finally { - if (txn != null) - txn.commit(); - - store.close(); - } - - end = new Date(); - - if (VERBOSE) { - System.out.print(end.getTime() - start.getTime()); - System.out.println(" total milliseconds to delete"); - - System.out.print(end.getTime() - veryStart.getTime()); - System.out.println(" total milliseconds"); - } - } -} diff --git a/lucene/contrib/db/bdb/src/test/org/apache/lucene/store/db/SanityLoadLibrary.java b/lucene/contrib/db/bdb/src/test/org/apache/lucene/store/db/SanityLoadLibrary.java deleted file mode 100644 index e2956c8cae2..00000000000 --- a/lucene/contrib/db/bdb/src/test/org/apache/lucene/store/db/SanityLoadLibrary.java +++ /dev/null @@ -1,36 +0,0 @@ -package org.apache.lucene.store.db; - -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import com.sleepycat.db.EnvironmentConfig; -import com.sleepycat.db.Environment; - -/** - * Simple sanity testing application to verify that the underlying - * native library can be loaded cleanly. - * - * For use in the build.xml of this contrib, to determine if tests - * should be skipped. - */ -public class SanityLoadLibrary { - public static void main(String[] ignored) throws Exception { - EnvironmentConfig envConfig = EnvironmentConfig.DEFAULT; - envConfig.setAllowCreate(false); - new Environment(null, envConfig); - } -} diff --git a/lucene/contrib/db/build.xml b/lucene/contrib/db/build.xml deleted file mode 100644 index 6680863b73f..00000000000 --- a/lucene/contrib/db/build.xml +++ /dev/null @@ -1,74 +0,0 @@ - - - - - - - - Lucene DB integration - - bdb: using the Java interface of C Berkeley DB - - bdb-je: using Berkeley DB Java Edition - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/lucene/contrib/lucli/README b/lucene/contrib/lucli/README deleted file mode 100644 index 38f8d6592d1..00000000000 --- a/lucene/contrib/lucli/README +++ /dev/null @@ -1,24 +0,0 @@ -lucli (pronounced Luckily) is the Lucene Command Line Interface. - -INSTALLATION - -Call "ant", then call the run.sh shell script. If it doesn't work right away: - Edit JAVA_HOME to point to your java directory. - Edit LUCLI to point to where you installed lucli. - Edit LUCLI_MEMORY and set it to the maximum amount of memory you want to allocate to lucli - You can also replace the Lucene jar file that came with lucli with your own. - - -ENABLING READLINE - -Readline support should automatically work thanks to JLine, see http://jline.sourceforge.net/ - - -Documentation - -There is none :-). Type help at the command line or read the code. - -Enjoy - -Dror Matalon -dror@zapatec.com. diff --git a/lucene/contrib/lucli/build.xml b/lucene/contrib/lucli/build.xml deleted file mode 100644 index 51fdc04cee3..00000000000 --- a/lucene/contrib/lucli/build.xml +++ /dev/null @@ -1,71 +0,0 @@ - - - - - - - - Lucene Command Line Interface - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/lucene/contrib/lucli/lib/jline-LICENSE-BSD_LIKE.txt b/lucene/contrib/lucli/lib/jline-LICENSE-BSD_LIKE.txt deleted file mode 100644 index 7e7f94f893e..00000000000 --- a/lucene/contrib/lucli/lib/jline-LICENSE-BSD_LIKE.txt +++ /dev/null @@ -1,33 +0,0 @@ -Copyright (c) 2002, 2003, 2004, 2005, Marc Prud'hommeaux -All rights reserved. - -Redistribution and use in source and binary forms, with or -without modification, are permitted provided that the following -conditions are met: - -Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - -Redistributions in binary form must reproduce the above copyright -notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with -the distribution. - -Neither the name of JLine nor the names of its contributors -may be used to endorse or promote products derived from this -software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, -BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY -AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO -EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, -OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, -PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED -AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING -IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED -OF THE POSSIBILITY OF SUCH DAMAGE. - diff --git a/lucene/contrib/lucli/lib/jline-NOTICE.txt b/lucene/contrib/lucli/lib/jline-NOTICE.txt deleted file mode 100644 index 006d6b0663f..00000000000 --- a/lucene/contrib/lucli/lib/jline-NOTICE.txt +++ /dev/null @@ -1,2 +0,0 @@ -JLine (under contrib/lucli/lib/jline.jar) is licensed under the BSD License. -See http://jline.sourceforge.net/ \ No newline at end of file diff --git a/lucene/contrib/lucli/lib/jline.jar b/lucene/contrib/lucli/lib/jline.jar deleted file mode 100644 index a966c41df55..00000000000 --- a/lucene/contrib/lucli/lib/jline.jar +++ /dev/null @@ -1,2 +0,0 @@ -AnyObjectId[b841cf167aa3ca6c82f2e40ba2fd8732e829525f] was removed in git history. -Apache SVN contains full history. \ No newline at end of file diff --git a/lucene/contrib/lucli/run.sh b/lucene/contrib/lucli/run.sh deleted file mode 100755 index 60fe360f72c..00000000000 --- a/lucene/contrib/lucli/run.sh +++ /dev/null @@ -1,22 +0,0 @@ - -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -LUCLI=. -LUCLI_MEMORY=128M -#JAVA_HOME=/home/dror/j2sdk1.4.1_03/ -CLASSPATH=${CLASSPATH}:$LUCLI/lib/jline.jar:$LUCLI/lib/lucene.jar:$LUCLI/dist/lucli-dev.jar -export CLASSPATH -$JAVA_HOME/bin/java -Xmx${LUCLI_MEMORY} lucli.Lucli diff --git a/lucene/contrib/lucli/src/java/lucli/LuceneMethods.java b/lucene/contrib/lucli/src/java/lucli/LuceneMethods.java deleted file mode 100644 index 8d5c1e5d45c..00000000000 --- a/lucene/contrib/lucli/src/java/lucli/LuceneMethods.java +++ /dev/null @@ -1,406 +0,0 @@ -package lucli; - -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import java.io.File; -import java.io.IOException; -import java.io.Reader; -import java.io.StringReader; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Comparator; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.TreeMap; - -import jline.ConsoleReader; - -import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.analysis.standard.StandardAnalyzer; -import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; -import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; -import org.apache.lucene.document.Document; -import org.apache.lucene.document.Fieldable; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.IndexReader.AtomicReaderContext; -import org.apache.lucene.index.IndexWriter; -import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.index.Fields; -import org.apache.lucene.index.FieldsEnum; -import org.apache.lucene.index.TermsEnum; -import org.apache.lucene.index.MultiFields; -import org.apache.lucene.index.IndexReader.FieldOption; -import org.apache.lucene.index.IndexWriterConfig.OpenMode; -import org.apache.lucene.queryParser.MultiFieldQueryParser; -import org.apache.lucene.queryParser.ParseException; -import org.apache.lucene.search.Collector; -import org.apache.lucene.search.Explanation; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.ScoreDoc; -import org.apache.lucene.search.Scorer; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.store.FSDirectory; -import org.apache.lucene.util.Version; -import org.apache.lucene.util.BytesRef; - -/** - * Various methods that interact with Lucene and provide info about the - * index, search, etc. Parts adapted from Lucene demo. - */ -class LuceneMethods { - - private int numDocs; - private final FSDirectory indexName; //directory of this index - private List fields; //Fields as a vector - private List indexedFields; //Fields as a vector - private String fieldsArray[]; //Fields as an array - private IndexSearcher searcher; - private Query query; //current query string - private String analyzerClassFQN = null; // Analyzer class, if NULL, use default Analyzer - - public LuceneMethods(String index) throws IOException { - indexName = FSDirectory.open(new File(index)); - message("Lucene CLI. Using directory '" + indexName + "'. Type 'help' for instructions."); - } - - private Analyzer createAnalyzer() { - if (analyzerClassFQN == null) return new StandardAnalyzer(Version.LUCENE_CURRENT); - try { - return Class.forName(analyzerClassFQN).asSubclass(Analyzer.class).newInstance(); - } catch (ClassCastException cce) { - message("Given class is not an Analyzer: " + analyzerClassFQN); - return new StandardAnalyzer(Version.LUCENE_CURRENT); - } catch (Exception e) { - message("Unable to use Analyzer " + analyzerClassFQN); - return new StandardAnalyzer(Version.LUCENE_CURRENT); - } - } - - - public void info() throws java.io.IOException { - IndexReader indexReader = IndexReader.open(indexName, true); - - - getFieldInfo(); - numDocs = indexReader.numDocs(); - message("Index has " + numDocs + " documents "); - message("All Fields:" + fields.toString()); - message("Indexed Fields:" + indexedFields.toString()); - - if (IndexWriter.isLocked(indexName)) { - message("Index is locked"); - } - //IndexReader.getCurrentVersion(indexName); - //System.out.println("Version:" + version); - - indexReader.close(); - } - - - public void search(String queryString, boolean explain, boolean showTokens, ConsoleReader cr) - throws java.io.IOException, org.apache.lucene.queryParser.ParseException { - initSearch(queryString); - int numHits = computeCount(query); - message(numHits + " total matching documents"); - if (explain) { - query = explainQuery(queryString); - } - - final int HITS_PER_PAGE = 10; - message("--------------------------------------"); - for (int start = 0; start < numHits; start += HITS_PER_PAGE) { - int end = Math.min(numHits, start + HITS_PER_PAGE); - ScoreDoc[] hits = search(query, end); - for (int ii = start; ii < end; ii++) { - Document doc = searcher.doc(hits[ii].doc); - message("---------------- " + (ii + 1) + " score:" + hits[ii].score + "---------------------"); - printHit(doc); - if (showTokens) { - invertDocument(doc); - } - if (explain) { - Explanation exp = searcher.explain(query, hits[ii].doc); - message("Explanation:" + exp.toString()); - } - } - message("#################################################"); - - if (numHits > end) { - // TODO: don't let the input end up in the command line history - queryString = cr.readLine("more (y/n) ? "); - if (queryString.length() == 0 || queryString.charAt(0) == 'n') - break; - } - } - searcher.close(); - } - - /** - * TODO: Allow user to specify what field(s) to display - */ - private void printHit(Document doc) { - for (int ii = 0; ii < fieldsArray.length; ii++) { - String currField = fieldsArray[ii]; - String[] result = doc.getValues(currField); - if (result != null) { - for (int i = 0; i < result.length; i++) { - message(currField + ":" + result[i]); - } - } else { - message(currField + ": "); - } - } - //another option is to just do message(doc); - } - - public void optimize() throws IOException { - //open the index writer. False: don't create a new one - IndexWriter indexWriter = new IndexWriter(indexName, new IndexWriterConfig( - Version.LUCENE_CURRENT, createAnalyzer()).setOpenMode( - OpenMode.APPEND)); - message("Starting to optimize index."); - long start = System.currentTimeMillis(); - indexWriter.optimize(); - message("Done optimizing index. Took " + (System.currentTimeMillis() - start) + " msecs"); - indexWriter.close(); - } - - - private Query explainQuery(String queryString) throws IOException, ParseException { - - searcher = new IndexSearcher(indexName, true); - Analyzer analyzer = createAnalyzer(); - getFieldInfo(); - - int arraySize = indexedFields.size(); - String indexedArray[] = new String[arraySize]; - for (int ii = 0; ii < arraySize; ii++) { - indexedArray[ii] = indexedFields.get(ii); - } - MultiFieldQueryParser parser = new MultiFieldQueryParser(Version.LUCENE_CURRENT, indexedArray, analyzer); - query = parser.parse(queryString); - message("Searching for: " + query.toString()); - return (query); - - } - - /** - * TODO: Allow user to specify analyzer - */ - private void initSearch(String queryString) throws IOException, ParseException { - - searcher = new IndexSearcher(indexName, true); - Analyzer analyzer = createAnalyzer(); - getFieldInfo(); - - int arraySize = fields.size(); - fieldsArray = new String[arraySize]; - for (int ii = 0; ii < arraySize; ii++) { - fieldsArray[ii] = fields.get(ii); - } - MultiFieldQueryParser parser = new MultiFieldQueryParser(Version.LUCENE_CURRENT, fieldsArray, analyzer); - query = parser.parse(queryString); - System.out.println("Searching for: " + query.toString()); - } - - final static class CountingCollector extends Collector { - public int numHits = 0; - - @Override - public void setScorer(Scorer scorer) throws IOException {} - @Override - public void collect(int doc) throws IOException { - numHits++; - } - - @Override - public void setNextReader(AtomicReaderContext context) {} - @Override - public boolean acceptsDocsOutOfOrder() { - return true; - } - } - - private int computeCount(Query q) throws IOException { - CountingCollector countingCollector = new CountingCollector(); - - searcher.search(q, countingCollector); - return countingCollector.numHits; - } - - public void count(String queryString) throws java.io.IOException, ParseException { - initSearch(queryString); - message(computeCount(query) + " total documents"); - searcher.close(); - } - - private ScoreDoc[] search(Query q, int numHits) throws IOException { - return searcher.search(query, numHits).scoreDocs; - } - - static public void message(String s) { - System.out.println(s); - } - - private void getFieldInfo() throws IOException { - IndexReader indexReader = IndexReader.open(indexName, true); - fields = new ArrayList(); - indexedFields = new ArrayList(); - - //get the list of all field names - for(String field : indexReader.getFieldNames(FieldOption.ALL)) { - if (field != null && !field.equals("")) - fields.add(field.toString()); - } - // - //get the list of indexed field names - for(String field : indexReader.getFieldNames(FieldOption.INDEXED)) { - if (field != null && !field.equals("")) - indexedFields.add(field.toString()); - } - indexReader.close(); - } - - - // Copied from DocumentWriter - // Tokenizes the fields of a document into Postings. - private void invertDocument(Document doc) - throws IOException { - - Map tokenMap = new HashMap(); - final int maxFieldLength = 10000; - - Analyzer analyzer = createAnalyzer(); - for (Fieldable field : doc.getFields()) { - String fieldName = field.name(); - if (field.isIndexed()) { - if (field.isTokenized()) { // un-tokenized field - Reader reader; // find or make Reader - if (field.readerValue() != null) - reader = field.readerValue(); - else if (field.stringValue() != null) - reader = new StringReader(field.stringValue()); - else - throw new IllegalArgumentException - ("field must have either String or Reader value"); - - int position = 0; - // Tokenize field and add to postingTable - TokenStream stream = analyzer.reusableTokenStream(fieldName, reader); - CharTermAttribute termAtt = stream.addAttribute(CharTermAttribute.class); - PositionIncrementAttribute posIncrAtt = stream.addAttribute(PositionIncrementAttribute.class); - - try { - stream.reset(); - while (stream.incrementToken()) { - position += (posIncrAtt.getPositionIncrement() - 1); - position++; - String name = termAtt.toString(); - Integer Count = tokenMap.get(name); - if (Count == null) { // not in there yet - tokenMap.put(name, Integer.valueOf(1)); //first one - } else { - int count = Count.intValue(); - tokenMap.put(name, Integer.valueOf(count + 1)); - } - if (position > maxFieldLength) break; - } - stream.end(); - } finally { - stream.close(); - } - } - - } - } - Map.Entry[] sortedHash = getSortedMapEntries(tokenMap); - for (int ii = 0; ii < sortedHash.length && ii < 10; ii++) { - Map.Entry currentEntry = sortedHash[ii]; - message((ii + 1) + ":" + currentEntry.getKey() + " " + currentEntry.getValue()); - } - } - - - /** Provides a list of the top terms of the index. - * - * @param field - the name of the command or null for all of them. - */ - public void terms(String field) throws IOException { - TreeMap termMap = new TreeMap(); - IndexReader indexReader = IndexReader.open(indexName, true); - Fields fields = MultiFields.getFields(indexReader); - if (fields != null) { - FieldsEnum fieldsEnum = fields.iterator(); - String curField; - while((curField = fieldsEnum.next()) != null) { - TermsEnum terms = fieldsEnum.terms(); - BytesRef text; - while ((text = terms.next()) != null) { - //message(term.field() + ":" + term.text() + " freq:" + terms.docFreq()); - //if we're either not looking by field or we're matching the specific field - if ((field == null) || field.equals(curField)) { - termMap.put(curField + ":" + text.utf8ToString(), Integer.valueOf((terms.docFreq()))); - } - } - } - } - - Iterator termIterator = termMap.keySet().iterator(); - for (int ii = 0; termIterator.hasNext() && ii < 100; ii++) { - String termDetails = termIterator.next(); - Integer termFreq = termMap.get(termDetails); - message(termDetails + ": " + termFreq); - } - indexReader.close(); - } - - /** Sort Map values - * @param m the map we're sorting - * from http://developer.java.sun.com/developer/qow/archive/170/index.jsp - */ - @SuppressWarnings("unchecked") - public static > Map.Entry[] - getSortedMapEntries(Map m) { - Set> set = m.entrySet(); - Map.Entry[] entries = - set.toArray(new Map.Entry[set.size()]); - Arrays.sort(entries, new Comparator>() { - public int compare(Map.Entry o1, Map.Entry o2) { - V v1 = o1.getValue(); - V v2 = o2.getValue(); - return v2.compareTo(v1); //descending order - } - }); - return entries; - } - - public void analyzer(String word) { - if ("current".equals(word)) { - String current = analyzerClassFQN == null ? "StandardAnalyzer" : analyzerClassFQN; - message("The currently used Analyzer class is: " + current); - return; - } - analyzerClassFQN = word; - message("Switched to Analyzer class " + analyzerClassFQN); - } -} - diff --git a/lucene/contrib/lucli/src/java/lucli/Lucli.java b/lucene/contrib/lucli/src/java/lucli/Lucli.java deleted file mode 100644 index e51d789417a..00000000000 --- a/lucene/contrib/lucli/src/java/lucli/Lucli.java +++ /dev/null @@ -1,321 +0,0 @@ -package lucli; - -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import java.io.File; -import java.io.IOException; -import java.io.UnsupportedEncodingException; -import java.util.Iterator; -import java.util.Set; -import java.util.StringTokenizer; -import java.util.TreeMap; - -import jline.ArgumentCompletor; -import jline.Completor; -import jline.ConsoleReader; -import jline.FileNameCompletor; -import jline.History; -import jline.SimpleCompletor; - -import org.apache.lucene.queryParser.ParseException; - -/** - * Main class for lucli: the Lucene Command Line Interface. - * This class handles mostly the actual CLI part, command names, help, etc. - */ -public class Lucli { - - final static String DEFAULT_INDEX = "index"; //directory "index" under the current directory - final static String HISTORYFILE = ".lucli"; //history file in user's home directory - public final static int MAX_TERMS = 100; //Maximum number of terms we're going to show - - // List of commands - // To add another command, add it in here, in the list of addcomand(), and in the switch statement - final static int NOCOMMAND = -2; - final static int UNKOWN = -1; - final static int INFO = 0; - final static int SEARCH = 1; - final static int OPTIMIZE = 2; - final static int QUIT = 3; - final static int HELP = 4; - final static int COUNT = 5; - final static int TERMS = 6; - final static int INDEX = 7; - final static int TOKENS = 8; - final static int EXPLAIN = 9; - final static int ANALYZER = 10; - - String historyFile; - TreeMap commandMap = new TreeMap(); - LuceneMethods luceneMethods; //current cli class we're using - boolean enableReadline; //false: use plain java. True: shared library readline - - /** - Main entry point. The first argument can be a filename with an - application initialization file. - */ - - public Lucli(String[] args) throws IOException { - String line; - - historyFile = System.getProperty("user.home") + File.separator + HISTORYFILE; - - /* - * Initialize the list of commands - */ - addCommand("info", INFO, "Display info about the current Lucene index. Example: info"); - addCommand("search", SEARCH, "Search the current index. Example: search foo", 1); - addCommand("count", COUNT, "Return the number of hits for a search. Example: count foo", 1); - addCommand("optimize", OPTIMIZE, "Optimize the current index"); - addCommand("quit", QUIT, "Quit/exit the program"); - addCommand("help", HELP, "Display help about commands"); - addCommand("terms", TERMS, "Show the first " + MAX_TERMS + " terms in this index. Supply a field name to only show terms in a specific field. Example: terms"); - addCommand("index", INDEX, "Choose a different lucene index. Example index my_index", 1); - addCommand("tokens", TOKENS, "Does a search and shows the top 10 tokens for each document. Verbose! Example: tokens foo", 1); - addCommand("explain", EXPLAIN, "Explanation that describes how the document scored against query. Example: explain foo", 1); - addCommand("analyzer", ANALYZER, "Specifies the Analyzer class to be used. Example: analyzer org.apache.lucene.analysis.SimpleAnalyzer", 1); - - //parse command line arguments - parseArgs(args); - - ConsoleReader cr = new ConsoleReader(); - //Readline.readHistoryFile(fullPath); - cr.setHistory(new History(new File(historyFile))); - - // set completer with list of words - Completor[] comp = new Completor[]{ - new SimpleCompletor(getCommandsAsArray()), - new FileNameCompletor() - }; - cr.addCompletor (new ArgumentCompletor(comp)); - - // main input loop - luceneMethods = new LuceneMethods(DEFAULT_INDEX); - while (true) { - try { - line = cr.readLine("lucli> "); - if (line != null) { - handleCommand(line, cr); - } - } catch (java.io.EOFException eof) { - System.out.println("");//new line - exit(); - } catch (UnsupportedEncodingException enc) { - enc.printStackTrace(System.err); - } catch (ParseException pe) { - pe.printStackTrace(System.err); - } catch (IOException ioe) { - ioe.printStackTrace(System.err); - } - } - } - - private String[] getCommandsAsArray() { - Set commandSet = commandMap.keySet(); - String[] commands = new String[commandMap.size()]; - int i = 0; - for (Iterator iter = commandSet.iterator(); iter.hasNext();) { - String cmd = iter.next(); - commands[i++] = cmd; - } - return commands; - } - - public static void main(String[] args) throws IOException { - new Lucli(args); - } - - - private void handleCommand(String line, ConsoleReader cr) throws IOException, ParseException { - String [] words = tokenizeCommand(line); - if (words.length == 0) - return; //white space - String query = ""; - if (line.trim().startsWith("#")) // # = comment - return; - //Command name and number of arguments - switch (getCommandId(words[0], words.length - 1)) { - case INFO: - luceneMethods.info(); - break; - case SEARCH: - for (int ii = 1; ii < words.length; ii++) { - query += words[ii] + " "; - } - luceneMethods.search(query, false, false, cr); - break; - case COUNT: - for (int ii = 1; ii < words.length; ii++) { - query += words[ii] + " "; - } - luceneMethods.count(query); - break; - case QUIT: - exit(); - break; - case TERMS: - if(words.length > 1) - luceneMethods.terms(words[1]); - else - luceneMethods.terms(null); - break; - case INDEX: - LuceneMethods newLm = new LuceneMethods(words[1]); - try { - newLm.info(); //will fail if can't open the index - luceneMethods = newLm; //OK, so we'll use the new one - } catch (IOException ioe) { - //problem we'll keep using the old one - error(ioe.toString()); - } - break; - case OPTIMIZE: - luceneMethods.optimize(); - break; - case TOKENS: - for (int ii = 1; ii < words.length; ii++) { - query += words[ii] + " "; - } - luceneMethods.search(query, false, true, cr); - break; - case EXPLAIN: - for (int ii = 1; ii < words.length; ii++) { - query += words[ii] + " "; - } - luceneMethods.search(query, true, false, cr); - break; - case ANALYZER: - luceneMethods.analyzer(words[1]); - break; - case HELP: - help(); - break; - case NOCOMMAND: //do nothing - break; - case UNKOWN: - System.out.println("Unknown command: " + words[0] + ". Type help to get a list of commands."); - break; - } - } - - private String [] tokenizeCommand(String line) { - StringTokenizer tokenizer = new StringTokenizer(line, " \t"); - int size = tokenizer.countTokens(); - String [] tokens = new String[size]; - for (int ii = 0; tokenizer.hasMoreTokens(); ii++) { - tokens[ii] = tokenizer.nextToken(); - } - return tokens; - } - - private void exit() { - System.exit(0); - } - - /** - * Add a command to the list of commands for the interpreter for a - * command that doesn't take any parameters. - * @param name - the name of the command - * @param id - the unique id of the command - * @param help - the help message for this command - */ - private void addCommand(String name, int id, String help) { - addCommand(name, id, help, 0); - } - - /** - * Add a command to the list of commands for the interpreter. - * @param name - the name of the command - * @param id - the unique id of the command - * @param help - the help message for this command - * @param params - the minimum number of required params if any - */ - private void addCommand(String name, int id, String help, int params) { - Command command = new Command(name, id, help, params); - commandMap.put(name, command); - } - - private int getCommandId(String name, int params) { - name = name.toLowerCase(); //treat uppercase and lower case commands the same - Command command = commandMap.get(name); - if (command == null) { - return(UNKOWN); - } - else { - if(command.params > params) { - error(command.name + " needs at least " + command.params + " arguments."); - return (NOCOMMAND); - } - return (command.id); - } - } - - private void help() { - Iterator commands = commandMap.keySet().iterator(); - while (commands.hasNext()) { - Command command = commandMap.get(commands.next()); - System.out.println("\t" + command.name + ": " + command.help); - - } - } - - private void error(String message) { - System.err.println("Error:" + message); - } - - private void message(String text) { - System.out.println(text); - } - - /* - * Parse command line arguments (currently none) - */ - private void parseArgs(String[] args) { - if (args.length > 0) { - usage(); - System.exit(1); - } - } - - private void usage() { - message("Usage: lucli.Lucli"); - message("(currently, no parameters are supported)"); - } - - private class Command { - String name; - int id; - String help; - int params; - - Command(String name, int id, String help, int params) { - this.name = name; - this.id = id; - this.help = help; - this.params = params; - } - - /** - * Prints out a usage message for this command. - */ - public String commandUsage() { - return (name + ":" + help + ". Command takes " + params + " params"); - } - - } -} diff --git a/lucene/contrib/lucli/src/java/lucli/package.html b/lucene/contrib/lucli/src/java/lucli/package.html deleted file mode 100644 index c96cd059e78..00000000000 --- a/lucene/contrib/lucli/src/java/lucli/package.html +++ /dev/null @@ -1,22 +0,0 @@ - - - - -Lucene Command Line Interface - - diff --git a/lucene/contrib/lucli/src/java/overview.html b/lucene/contrib/lucli/src/java/overview.html deleted file mode 100644 index 4e95640f6ea..00000000000 --- a/lucene/contrib/lucli/src/java/overview.html +++ /dev/null @@ -1,26 +0,0 @@ - - - - - lucli - - - - lucli - - \ No newline at end of file diff --git a/lucene/contrib/lucli/src/test/add.tests.here b/lucene/contrib/lucli/src/test/add.tests.here deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/lucene/contrib/swing/build.xml b/lucene/contrib/swing/build.xml deleted file mode 100644 index 3dcecc898d4..00000000000 --- a/lucene/contrib/swing/build.xml +++ /dev/null @@ -1,57 +0,0 @@ - - - - - - - - Swing Models - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/lucene/contrib/swing/docs/index.html b/lucene/contrib/swing/docs/index.html deleted file mode 100644 index 49389012f34..00000000000 --- a/lucene/contrib/swing/docs/index.html +++ /dev/null @@ -1,97 +0,0 @@ - - - -Lucene Powered Swing Data Models - - - -

    Lucene Powered Swing Data Models

    -

    by Jonathan Simon

    -

     

    -

    What it is.

    -

    This package contains classes that help you easily integrate Lucene based searching - into your Swing components. Currently there are classes to index and search - JTables and JLists. This is done using model decorators rather than custom models - to make it easier to search current models as well as new ones.

    -

    These models do not actually contain any data. Rather, the ListModel - decorator (ListSearcher) and the TableModel decorator (TableSearcher) take a - model in the constructor and delegate all calls to it (after a little alteration, - but we'll get to that). That said, these are not full fledged models themselves. - You still have to have another model to decorate with the searching models. - If you are adding searching to a pre-existing model, you can use your pre-existing - model directly. Otherwise, you can implement a model from scratch or use a pre-existing - one to get started.

    -

    What it isn't.

    -

    A complete component: These are just models. They are not complete components - with search fields and buttons laid out like a searchable interface. You still - have to build that since the UI changes drastically between applciations.

    -

    A complete model: There are just model decorators. You can't just set the model - of a JList or JTable to one of these models, and you can't add data directly - to these models.

    -

    A front end for a lucene index: In other words, you can't use these classes - to point a JTable directly to a Lucene index. Although that's interesting in - its own right, this is not that.

    -

    Usage:

    -

    Coding to both models nearly identical. They both take the model to decorate - at construction time. Here is the code from the demo to decorate a JTable model - with the TableSearcher and set it as the table model.

    -
    //make a new JTable
    -JTable table = new JTable();
    -//make my base model, the model with the data
    -BaseTableModel tableModel = new BaseTableModel(DataStore.getRestaurants());
    -//decorate the tableModel with the TableSearcher 
    -TableSearcher searchTableModel = new TableSearcher(tableModel);
    -//set the TableModel in the table to the TableSearcher
    -table.setModel(searchTableModel);
    -
    -

    Initially, you won't notice a difference. This is because there is no active - search which displays all data from the underlying model. You search by calling - the search() method passing a search string. This filters the data - set down without changing the underlying data model -- one of the main reasons - for decorating in the first place. Any valid Lucene search string should work - (see notes for more info on this). You'll probaby have some code somewhere like - this in your app to connect a text field and search button to the model.

    -
    //create components
    -final JTextField searchField = new JTextField();
    -JButton searchButton = new JButton("Go");
    -
    -//make an action listener
    -ActionListener searchListener = new ActionListener() {
    -	public void actionPerformed(ActionEvent e) {
    -	   searchTableModel.search(searchField.getText().trim().toLowerCase());
    -	}
    -};
    -
    -//register listeners
    -searchButton.addActionListener(searchListener);
    -searchField.addActionListener(searchListener);
    -

    You also might want to have a clear search button, working the same way. But - to keep things simple, if you search will a null String or an empty - String, the search clears and you will once again see all of your data.

    -

    Demo notes:

    -

    The list demo does real time searching. In other words, as you type, searches - run and the result set updates. The table demo has a search button, and only - searches when the button is clicked. They both work, I just implemented them - this way to show the different UI metaphors and that they both work.

    -

    Implementation notes:

    -

    This code started as a proof of concept so it's not a fully featured - model. Don't get me wrong, it fully works, but it could use some improvement - that it will hopefully get over time. I just wanted to get it out there and - get people using it. I'm also trying to keep everything as simple as possible. - Here are some of the issues.

    -
      -
    • You can't change the model after the Searcher is constructed.
    • -
    • The search model decorators do update when the decorated model - is updated, but not in a very efficient way. The whole search model is reindexed - when anything changes. This is a definite scaling issue.
    • -
    • The indexing and searching logic needs to be generally more configurable - to allow custom tailoring of searched and indexing.
    • -
    • The TableSearcher uses column names to index column values. This could be - an issue with multiple word column names.
    • -
    • The ListSearcher uses MultiFieldQueryParser even though its not really indexing - multiple fields.
    • -
    -

     

    -

     

    - - diff --git a/lucene/contrib/swing/src/java/org/apache/lucene/swing/models/ListSearcher.java b/lucene/contrib/swing/src/java/org/apache/lucene/swing/models/ListSearcher.java deleted file mode 100644 index e8d2b7765c0..00000000000 --- a/lucene/contrib/swing/src/java/org/apache/lucene/swing/models/ListSearcher.java +++ /dev/null @@ -1,311 +0,0 @@ -package org.apache.lucene.swing.models; - -/** - * Copyright 2005 The Apache Software Foundation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import java.io.IOException; -import java.util.ArrayList; - -import javax.swing.AbstractListModel; -import javax.swing.ListModel; -import javax.swing.event.ListDataEvent; -import javax.swing.event.ListDataListener; - -import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.core.WhitespaceAnalyzer; -import org.apache.lucene.document.Document; -import org.apache.lucene.document.Field; -import org.apache.lucene.document.Fieldable; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.IndexWriter; -import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.index.IndexReader.AtomicReaderContext; -import org.apache.lucene.queryParser.MultiFieldQueryParser; -import org.apache.lucene.search.Collector; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.ScoreDoc; -import org.apache.lucene.search.Scorer; -import org.apache.lucene.store.RAMDirectory; -import org.apache.lucene.util.Version; - -/** - * See table searcher explanation. - * - */ -public class ListSearcher extends AbstractListModel { - private ListModel listModel; - - /** - * The reference links between the decorated ListModel - * and this list model based on search criteria - */ - private ArrayList rowToModelIndex = new ArrayList(); - - /** - * In memory lucene index - */ - private RAMDirectory directory; - - /** - * Cached lucene analyzer - */ - private Analyzer analyzer; - - /** - * Links between this list model and the decorated list model - * are maintained through links based on row number. This is a - * key constant to denote "row number" for indexing - */ - private static final String ROW_NUMBER = "ROW_NUMBER"; - - /** - * Since we only have one field, unlike lists with multiple - * fields -- we are just using a constant to denote field name. - * This is most likely unnecessary and should be removed at - * a later date - */ - private static final String FIELD_NAME = "FIELD_NAME"; - - /** - * Cache the current search String. Also used internally to - * key whether there is an active search running or not. i.e. if - * searchString is null, there is no active search. - */ - private String searchString = null; - private ListDataListener listModelListener; - - public ListSearcher(ListModel newModel) { - analyzer = new WhitespaceAnalyzer(Version.LUCENE_CURRENT); - setListModel(newModel); - listModelListener = new ListModelHandler(); - newModel.addListDataListener(listModelListener); - clearSearchingState(); - } - - private void setListModel(ListModel newModel) { - //remove listeners if there... - if (newModel != null) { - newModel.removeListDataListener(listModelListener); - } - - listModel = newModel; - if (listModel != null) { - listModel.addListDataListener(listModelListener); - } - - //recalculate the links between this list model and - //the inner list model since the decorated model just changed - reindex(); - - // let all listeners know the list has changed - fireContentsChanged(this, 0, getSize()); - } - - private void reindex() { - try { - // recreate the RAMDirectory - directory = new RAMDirectory(); - IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(Version.LUCENE_CURRENT, analyzer)); - - // iterate through all rows - for (int row=0; row < listModel.getSize(); row++){ - - //for each row make a new document - Document document = new Document(); - //add the row number of this row in the decorated list model - //this will allow us to retrieve the results later - //and map this list model's row to a row in the decorated - //list model - document.add(new Field(ROW_NUMBER, "" + row, Field.Store.YES, Field.Index.ANALYZED)); - //add the string representation of the row to the index - document.add(new Field(FIELD_NAME, String.valueOf(listModel.getElementAt(row)).toLowerCase(), Field.Store.YES, Field.Index.ANALYZED)); - writer.addDocument(document); - } - writer.optimize(); - writer.close(); - } catch (Exception e){ - e.printStackTrace(); - } - } - - /** - * Run a new search. - * - * @param searchString Any valid lucene search string - */ - public void search(String searchString){ - - //if search string is null or empty, clear the search == search all - if (searchString == null || searchString.equals("")){ - clearSearchingState(); - fireContentsChanged(this, 0, getSize()); - return; - } - - - try { - //cache search String - this.searchString = searchString; - - //make a new index searcher with the in memory (RAM) index. - IndexSearcher is = new IndexSearcher(directory, true); - - //make an array of fields - one for each column - String[] fields = {FIELD_NAME}; - - //build a query based on the fields, searchString and cached analyzer - //NOTE: This is an area for improvement since the MultiFieldQueryParser - // has some weirdness. - MultiFieldQueryParser parser = new MultiFieldQueryParser(Version.LUCENE_CURRENT, fields, analyzer); - Query query =parser.parse(searchString); - //reset this list model with the new results - resetSearchResults(is, query); - } catch (Exception e){ - e.printStackTrace(); - } - - //notify all listeners that the list has been changed - fireContentsChanged(this, 0, getSize()); - } - - final static class CountingCollector extends Collector { - public int numHits = 0; - - @Override - public void setScorer(Scorer scorer) throws IOException {} - @Override - public void collect(int doc) throws IOException { - numHits++; - } - - @Override - public void setNextReader(AtomicReaderContext context) {} - @Override - public boolean acceptsDocsOutOfOrder() { - return true; - } - } - - - /** - * - * @param hits The new result set to set this list to. - */ - private void resetSearchResults(IndexSearcher searcher, Query query) { - try { - //clear our index mapping this list model rows to - //the decorated inner list model - rowToModelIndex.clear(); - - CountingCollector countingCollector = new CountingCollector(); - searcher.search(query, countingCollector); - ScoreDoc[] hits = searcher.search(query, countingCollector.numHits).scoreDocs; - - //iterate through the hits - //get the row number stored at the index - //that number is the row number of the decorated - //table model row that we are mapping to - for (int t=0; tThis decorator works by holding a reference to a decorated ot inner - * TableModel. All data is stored within that table model, not this - * table model. Rather, this table model simply manages links to - * data in the inner table model according to the search. All methods on - * TableSearcher forward to the inner table model with subtle filtering - * or alteration according to the search criteria. - * - *

    Using the table model: - * - * Pass the TableModel you want to decorate in at the constructor. When - * the TableModel initializes, it displays all search results. Call - * the search method with any valid Lucene search String and the data - * will be filtered by the search string. Users can always clear the search - * at any time by searching with an empty string. Additionally, you can - * add a button calling the clearSearch() method. - * - */ -public class TableSearcher extends AbstractTableModel { - - /** - * The inner table model we are decorating - */ - protected TableModel tableModel; - - /** - * This listener is used to register this class as a listener to - * the decorated table model for update events - */ - private TableModelListener tableModelListener; - - /** - * these keeps reference to the decorated table model for data - * only rows that match the search criteria are linked - */ - private ArrayList rowToModelIndex = new ArrayList(); - - - //Lucene stuff. - - /** - * In memory lucene index - */ - private RAMDirectory directory; - - /** - * Cached lucene analyzer - */ - private Analyzer analyzer; - - /** - * Links between this table model and the decorated table model - * are maintained through links based on row number. This is a - * key constant to denote "row number" for indexing - */ - private static final String ROW_NUMBER = "ROW_NUMBER"; - - /** - * Cache the current search String. Also used internally to - * key whether there is an active search running or not. i.e. if - * searchString is null, there is no active search. - */ - private String searchString = null; - - /** - * @param tableModel The table model to decorate - */ - public TableSearcher(TableModel tableModel) { - analyzer = new WhitespaceAnalyzer(Version.LUCENE_CURRENT); - tableModelListener = new TableModelHandler(); - setTableModel(tableModel); - tableModel.addTableModelListener(tableModelListener); - clearSearchingState(); - } - - /** - * - * @return The inner table model this table model is decorating - */ - public TableModel getTableModel() { - return tableModel; - } - - /** - * Set the table model used by this table model - * @param tableModel The new table model to decorate - */ - public void setTableModel(TableModel tableModel) { - - //remove listeners if there... - if (this.tableModel != null) { - this.tableModel.removeTableModelListener(tableModelListener); - } - - this.tableModel = tableModel; - if (this.tableModel != null) { - this.tableModel.addTableModelListener(tableModelListener); - } - - //recalculate the links between this table model and - //the inner table model since the decorated model just changed - reindex(); - - // let all listeners know the table has changed - fireTableStructureChanged(); - } - - - /** - * Reset the search results and links to the decorated (inner) table - * model from this table model. - */ - private void reindex() { - try { - // recreate the RAMDirectory - directory = new RAMDirectory(); - IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig( - Version.LUCENE_CURRENT, analyzer)); - - // iterate through all rows - for (int row=0; row < tableModel.getRowCount(); row++){ - - //for each row make a new document - Document document = new Document(); - //add the row number of this row in the decorated table model - //this will allow us to retrieve the results later - //and map this table model's row to a row in the decorated - //table model - document.add(new Field(ROW_NUMBER, "" + row, Field.Store.YES, Field.Index.ANALYZED)); - //iterate through all columns - //index the value keyed by the column name - //NOTE: there could be a problem with using column names with spaces - for (int column=0; column < tableModel.getColumnCount(); column++){ - String columnName = tableModel.getColumnName(column); - String columnValue = String.valueOf(tableModel.getValueAt(row, column)).toLowerCase(); - document.add(new Field(columnName, columnValue, Field.Store.YES, Field.Index.ANALYZED)); - } - writer.addDocument(document); - } - writer.optimize(); - writer.close(); - } catch (Exception e){ - e.printStackTrace(); - } - } - - /** - * @return The current lucene analyzer - */ - public Analyzer getAnalyzer() { - return analyzer; - } - - /** - * @param analyzer The new analyzer to use - */ - public void setAnalyzer(Analyzer analyzer) { - this.analyzer = analyzer; - //reindex from the model with the new analyzer - reindex(); - - //rerun the search if there is an active search - if (isSearching()){ - search(searchString); - } - } - - /** - * Run a new search. - * - * @param searchString Any valid lucene search string - */ - public void search(String searchString){ - - //if search string is null or empty, clear the search == search all - if (searchString == null || searchString.equals("")){ - clearSearchingState(); - fireTableDataChanged(); - return; - } - - - try { - //cache search String - this.searchString = searchString; - - //make a new index searcher with the in memory (RAM) index. - IndexSearcher is = new IndexSearcher(directory, true); - - //make an array of fields - one for each column - String[] fields = new String[tableModel.getColumnCount()]; - for (int t=0; t getColumnClass(int column) { - return tableModel.getColumnClass(column); - } - - @Override - public boolean isCellEditable(int row, int column) { - return tableModel.isCellEditable(getModelRow(row), column); - } - - public Object getValueAt(int row, int column) { - return tableModel.getValueAt(getModelRow(row), column); - } - - @Override - public void setValueAt(Object aValue, int row, int column) { - tableModel.setValueAt(aValue, getModelRow(row), column); - } - - private boolean isSearching() { - return searchString != null; - } - - private class TableModelHandler implements TableModelListener { - public void tableChanged(TableModelEvent e) { - // If we're not searching, just pass the event along. - if (!isSearching()) { - clearSearchingState(); - reindex(); - fireTableChanged(e); - return; - } - - // Something has happened to the data that may have invalidated the search. - reindex(); - search(searchString); - fireTableDataChanged(); - return; - } - - } - -} diff --git a/lucene/contrib/swing/src/java/org/apache/lucene/swing/models/package.html b/lucene/contrib/swing/src/java/org/apache/lucene/swing/models/package.html deleted file mode 100644 index 5958657fde9..00000000000 --- a/lucene/contrib/swing/src/java/org/apache/lucene/swing/models/package.html +++ /dev/null @@ -1,22 +0,0 @@ - - - - -Decorators for JTable TableModel and JList ListModel encapsulating Lucene indexing and searching functionality. - - diff --git a/lucene/contrib/swing/src/java/overview.html b/lucene/contrib/swing/src/java/overview.html deleted file mode 100644 index 2331985df89..00000000000 --- a/lucene/contrib/swing/src/java/overview.html +++ /dev/null @@ -1,23 +0,0 @@ - - - - Apache Lucene Swing Component Models - - - - \ No newline at end of file diff --git a/lucene/contrib/swing/src/test/org/apache/lucene/swing/models/BaseListModel.java b/lucene/contrib/swing/src/test/org/apache/lucene/swing/models/BaseListModel.java deleted file mode 100644 index e2b40f3d3e4..00000000000 --- a/lucene/contrib/swing/src/test/org/apache/lucene/swing/models/BaseListModel.java +++ /dev/null @@ -1,55 +0,0 @@ -package org.apache.lucene.swing.models; - -/** - * Copyright 2005 The Apache Software Foundation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; - -import javax.swing.AbstractListModel; - - -public class BaseListModel extends AbstractListModel { - private List data = new ArrayList(); - - public BaseListModel(Iterator iterator) { - while (iterator.hasNext()) { - data.add(iterator.next()); - } - } - - public int getSize() { - return data.size(); - } - - public Object getElementAt(int index) { - return data.get(index); - } - - public void addRow(Object toAdd) { - data.add(toAdd); - fireContentsChanged(this, 0, getSize()); - } - - public void removeRow(Object toRemove) { - data.remove(toRemove); - fireContentsChanged(this, 0, getSize()); - } - - - -} diff --git a/lucene/contrib/swing/src/test/org/apache/lucene/swing/models/BaseTableModel.java b/lucene/contrib/swing/src/test/org/apache/lucene/swing/models/BaseTableModel.java deleted file mode 100644 index cd8f4929458..00000000000 --- a/lucene/contrib/swing/src/test/org/apache/lucene/swing/models/BaseTableModel.java +++ /dev/null @@ -1,104 +0,0 @@ -package org.apache.lucene.swing.models; - -/** - * Copyright 2005 The Apache Software Foundation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; - -import javax.swing.table.AbstractTableModel; - - -public class BaseTableModel extends AbstractTableModel { - private List columnNames = new ArrayList(); - private List rows = new ArrayList(); - - public BaseTableModel(Iterator data) { - columnNames.add("Name"); - columnNames.add("Type"); - columnNames.add("Phone"); - columnNames.add("Street"); - columnNames.add("City"); - columnNames.add("State"); - columnNames.add("Zip"); - - while (data.hasNext()) { - Object nextRow = data.next(); - rows.add(nextRow); - } - } - - public int getColumnCount() { - return columnNames.size(); - } - - public int getRowCount() { - return rows.size(); - } - - public void addRow(RestaurantInfo info){ - rows.add(info); - fireTableDataChanged(); - } - - public void removeRow(RestaurantInfo info){ - rows.remove(info); - fireTableDataChanged(); - } - - @Override - public boolean isCellEditable(int rowIndex, int columnIndex) { - return false; - } - - @Override - public Class getColumnClass(int columnIndex) { - return String.class; - } - - public Object getValueAt(int rowIndex, int columnIndex) { - RestaurantInfo restaurantInfo = (RestaurantInfo) rows.get(rowIndex); - if (columnIndex == 0){ // name - return restaurantInfo.getName(); - } else if (columnIndex == 1){ // category - return restaurantInfo.getType(); - } else if (columnIndex == 2){ // phone - return restaurantInfo.getPhone(); - } else if (columnIndex == 3){ // street - return restaurantInfo.getStreet(); - } else if (columnIndex == 4){ // city - return restaurantInfo.getCity(); - } else if (columnIndex == 5){ // state - return restaurantInfo.getState(); - } else if (columnIndex == 6){ // zip - return restaurantInfo.getZip(); - } else { - return ""; - } - } - - @Override - public void setValueAt(Object aValue, int rowIndex, int columnIndex) { - //no op - } - - @Override - public String getColumnName(int columnIndex) { - return columnNames.get(columnIndex).toString(); - } - -} diff --git a/lucene/contrib/swing/src/test/org/apache/lucene/swing/models/DataStore.java b/lucene/contrib/swing/src/test/org/apache/lucene/swing/models/DataStore.java deleted file mode 100644 index 5313e0cc90e..00000000000 --- a/lucene/contrib/swing/src/test/org/apache/lucene/swing/models/DataStore.java +++ /dev/null @@ -1,202 +0,0 @@ -package org.apache.lucene.swing.models; - - -/** - * Copyright 2005 The Apache Software Foundation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Iterator; - - -public class DataStore { - - private static final String ITALIAN_CATEGORY = "Italian"; - private static final String CUBAN_CATEGORY = "Cuban"; - private static final String STEAK_CATEGORY = "Steak"; - private static int id = 0; - - static Collection restaurants = new ArrayList(); - static RestaurantInfo pinos = new RestaurantInfo(); - static RestaurantInfo canolis = new RestaurantInfo(); - static RestaurantInfo picadillo = new RestaurantInfo(); - static RestaurantInfo versailles = new RestaurantInfo(); - static RestaurantInfo laCaretta = new RestaurantInfo(); - static RestaurantInfo laCaretta2 = new RestaurantInfo(); - static RestaurantInfo laCaretta3 = new RestaurantInfo(); - static RestaurantInfo ranchaLuna = new RestaurantInfo(); - static RestaurantInfo leMerais = new RestaurantInfo(); - static RestaurantInfo chris = new RestaurantInfo(); - static RestaurantInfo outback = new RestaurantInfo(); - static RestaurantInfo outback2 = new RestaurantInfo(); - static RestaurantInfo outback3 = new RestaurantInfo(); - static RestaurantInfo outback4 = new RestaurantInfo(); - - - public static Iterator getRestaurants(){ - return restaurants.iterator(); - } - - static { - pinos.setId(getNextId()); - pinos.setType(ITALIAN_CATEGORY); - pinos.setName("Pino's"); - pinos.setPhone("(305) 111-2222"); - pinos.setStreet("12115 105th Street "); - pinos.setCity("Miami"); - pinos.setState("FL"); - pinos.setZip("33176"); - restaurants.add(pinos); - - canolis.setId(getNextId()); - canolis.setType(ITALIAN_CATEGORY); - canolis.setName("Canoli's"); - canolis.setPhone("(305) 234-5543"); - canolis.setStreet("12123 85th Street "); - canolis.setCity("Miami"); - canolis.setState("FL"); - canolis.setZip("33176"); - restaurants.add(canolis); - - picadillo.setId(getNextId()); - picadillo.setType(CUBAN_CATEGORY); - picadillo.setName("Picadillo"); - picadillo.setPhone("(305) 746-7865"); - picadillo.setStreet("109 12th Street "); - picadillo.setCity("Miami"); - picadillo.setState("FL"); - picadillo.setZip("33176"); - restaurants.add(picadillo); - - versailles.setId(getNextId()); - versailles.setType(CUBAN_CATEGORY); - versailles.setName("Cafe Versailles"); - versailles.setPhone("(305) 201-5438"); - versailles.setStreet("312 8th Street "); - versailles.setCity("Miami"); - versailles.setState("FL"); - versailles.setZip("33176"); - restaurants.add(versailles); - - laCaretta.setId(getNextId()); - laCaretta.setType(CUBAN_CATEGORY); - laCaretta.setName("La Carretta"); - laCaretta.setPhone("(305) 342-9876"); - laCaretta.setStreet("348 8th Street "); - laCaretta.setCity("Miami"); - laCaretta.setState("FL"); - laCaretta.setZip("33176"); - restaurants.add(laCaretta); - - laCaretta2.setId(getNextId()); - laCaretta2.setType(CUBAN_CATEGORY); - laCaretta2.setName("La Carretta"); - laCaretta2.setPhone("(305) 556-9876"); - laCaretta2.setStreet("31224 23rd Street "); - laCaretta2.setCity("Miami"); - laCaretta2.setState("FL"); - laCaretta2.setZip("33176"); - restaurants.add(laCaretta2); - - laCaretta3.setId(getNextId()); - laCaretta3.setType(CUBAN_CATEGORY); - laCaretta3.setName("La Carretta"); - laCaretta3.setPhone("(305) 682-9876"); - laCaretta3.setStreet("23543 107th Street "); - laCaretta3.setCity("Miami"); - laCaretta3.setState("FL"); - laCaretta3.setZip("33176"); - restaurants.add(laCaretta3); - - ranchaLuna.setId(getNextId()); - ranchaLuna.setType(CUBAN_CATEGORY); - ranchaLuna.setName("Rancha Luna"); - ranchaLuna.setPhone("(305) 777-4384"); - ranchaLuna.setStreet("110 23rd Street "); - ranchaLuna.setCity("Miami"); - ranchaLuna.setState("FL"); - ranchaLuna.setZip("33176"); - restaurants.add(ranchaLuna); - - leMerais.setId(getNextId()); - leMerais.setType(STEAK_CATEGORY); - leMerais.setName("Le Merais"); - leMerais.setPhone("(212) 654-9187"); - leMerais.setStreet("11 West 46th Street"); - leMerais.setCity("New York"); - leMerais.setState("NY"); - leMerais.setZip("10018"); - restaurants.add(leMerais); - - chris.setId(getNextId()); - chris.setType(STEAK_CATEGORY); - chris.setName("Ruth's Chris Seakhouse"); - chris.setPhone("(305) 354-8885"); - chris.setStreet("12365 203rd Street "); - chris.setCity("Miami"); - chris.setState("FL"); - chris.setZip("33176"); - restaurants.add(chris); - - outback.setId(getNextId()); - outback.setType(STEAK_CATEGORY); - outback.setName("Outback"); - outback.setPhone("(305) 244-7623"); - outback.setStreet("348 136th Street "); - outback.setCity("Miami"); - outback.setState("FL"); - outback.setZip("33176"); - restaurants.add(outback); - - outback2.setId(getNextId()); - outback2.setType(STEAK_CATEGORY); - outback2.setName("Outback"); - outback2.setPhone("(305) 533-6522"); - outback2.setStreet("21 207th Street "); - outback2.setCity("Miami"); - outback2.setState("FL"); - outback2.setZip("33176"); - restaurants.add(outback2); - - outback3.setId(getNextId()); - outback3.setType(STEAK_CATEGORY); - outback3.setName("Outback"); - outback3.setPhone("(305) 244-7623"); - outback3.setStreet("10117 107th Street "); - outback3.setCity("Miami"); - outback3.setState("FL"); - outback3.setZip("33176"); - restaurants.add(outback3); - - outback4.setId(getNextId()); - outback4.setType(STEAK_CATEGORY); - outback4.setName("Outback"); - outback4.setPhone("(954) 221-3312"); - outback4.setStreet("10 11th Street "); - outback4.setCity("Aventura"); - outback4.setState("FL"); - outback4.setZip("32154"); - restaurants.add(outback4); - - } - - private static int getNextId(){ - id++; - return id; - } - -} diff --git a/lucene/contrib/swing/src/test/org/apache/lucene/swing/models/ListSearcherSimulator.java b/lucene/contrib/swing/src/test/org/apache/lucene/swing/models/ListSearcherSimulator.java deleted file mode 100644 index 25531ee7dc1..00000000000 --- a/lucene/contrib/swing/src/test/org/apache/lucene/swing/models/ListSearcherSimulator.java +++ /dev/null @@ -1,88 +0,0 @@ -package org.apache.lucene.swing.models; - -/** - * Copyright 2005 The Apache Software Foundation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import java.awt.BorderLayout; - -import javax.swing.JFrame; -import javax.swing.JLabel; -import javax.swing.JList; -import javax.swing.JPanel; -import javax.swing.JScrollPane; -import javax.swing.JTextField; -import javax.swing.event.DocumentEvent; -import javax.swing.event.DocumentListener; - - -public class ListSearcherSimulator { - - public ListSearcherSimulator() { - JFrame frame = new JFrame(); - frame.setBounds(200,200, 400,250); - - JList list = new JList(); - JScrollPane scrollPane = new JScrollPane(list); - - final BaseListModel listModel = new BaseListModel(DataStore.getRestaurants()); - final ListSearcher listSearcher = new ListSearcher(listModel); - - list.setModel(listSearcher); - - final JTextField searchField = new JTextField(); - searchField.getDocument().addDocumentListener( - new DocumentListener(){ - public void changedUpdate(DocumentEvent e) { - listSearcher.search(searchField.getText().trim().toLowerCase()); - } - - public void insertUpdate(DocumentEvent e) { - listSearcher.search(searchField.getText().trim().toLowerCase()); - } - - public void removeUpdate(DocumentEvent e) { - listSearcher.search(searchField.getText().trim().toLowerCase()); - } - } - ); - - frame.getContentPane().setLayout(new BorderLayout()); - frame.getContentPane().add(scrollPane, BorderLayout.CENTER); - - JPanel searchPanel = new JPanel(); - searchPanel.setLayout(new BorderLayout(10,10)); - searchPanel.add(searchField, BorderLayout.CENTER); - searchPanel.add(new JLabel("Search: "), BorderLayout.WEST); - - JPanel topPanel = new JPanel(new BorderLayout()); - topPanel.add(searchPanel, BorderLayout.CENTER); - topPanel.add(new JPanel(), BorderLayout.EAST); - topPanel.add(new JPanel(), BorderLayout.WEST); - topPanel.add(new JPanel(), BorderLayout.NORTH); - topPanel.add(new JPanel(), BorderLayout.SOUTH); - - frame.getContentPane().add(topPanel, BorderLayout.NORTH); - - frame.setTitle("Lucene powered table searching"); - frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); - frame.show(); - } - - public static void main(String[] args) { - new ListSearcherSimulator(); - } - -} diff --git a/lucene/contrib/swing/src/test/org/apache/lucene/swing/models/RestaurantInfo.java b/lucene/contrib/swing/src/test/org/apache/lucene/swing/models/RestaurantInfo.java deleted file mode 100644 index b932e32b007..00000000000 --- a/lucene/contrib/swing/src/test/org/apache/lucene/swing/models/RestaurantInfo.java +++ /dev/null @@ -1,101 +0,0 @@ -package org.apache.lucene.swing.models; - -/** - * Copyright 2005 The Apache Software Foundation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - - -public class RestaurantInfo { - private int id; - private String name; - - private String type; - - private String phone; - private String street; - private String city; - private String state; - private String zip; - - public int getId() { - return id; - } - - public void setId(int id) { - this.id = id; - } - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public String getPhone() { - return phone; - } - - public void setPhone(String phone) { - this.phone = phone; - } - - public String getStreet() { - return street; - } - - public void setStreet(String street) { - this.street = street; - } - - public String getCity() { - return city; - } - - public void setCity(String city) { - this.city = city; - } - - public String getState() { - return state; - } - - public void setState(String state) { - this.state = state; - } - - public String getZip() { - return zip; - } - - public void setZip(String zip) { - this.zip = zip; - } - - public String getType() { - return type; - } - - public void setType(String type) { - this.type = type; - } - - @Override - public String toString() { - return getName() + " - " + getPhone(); - } - -} diff --git a/lucene/contrib/swing/src/test/org/apache/lucene/swing/models/TableSearcherSimulator.java b/lucene/contrib/swing/src/test/org/apache/lucene/swing/models/TableSearcherSimulator.java deleted file mode 100644 index e170a3fb478..00000000000 --- a/lucene/contrib/swing/src/test/org/apache/lucene/swing/models/TableSearcherSimulator.java +++ /dev/null @@ -1,81 +0,0 @@ -package org.apache.lucene.swing.models; - -/** - * Copyright 2005 The Apache Software Foundation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import javax.swing.*; -import java.awt.*; -import java.awt.event.ActionListener; -import java.awt.event.ActionEvent; - - -public class TableSearcherSimulator { - - public TableSearcherSimulator() { - JFrame frame = new JFrame(); - frame.setBounds(200,200, 400,250); - - JTable table = new JTable(); - final BaseTableModel tableModel = new BaseTableModel(DataStore.getRestaurants()); - final TableSearcher searchTableModel = new TableSearcher(tableModel); - - table.setModel(searchTableModel); - JScrollPane scrollPane = new JScrollPane(table); - - final JTextField searchField = new JTextField(); - JButton searchButton = new JButton("Go"); - - ActionListener searchListener = new ActionListener() { - public void actionPerformed(ActionEvent e) { - searchTableModel.search(searchField.getText().trim().toLowerCase()); - searchField.requestFocus(); - } - }; - - searchButton.addActionListener(searchListener); - searchField.addActionListener(searchListener); - - - - frame.getContentPane().setLayout(new BorderLayout()); - frame.getContentPane().add(scrollPane, BorderLayout.CENTER); - - JPanel searchPanel = new JPanel(); - searchPanel.setLayout(new BorderLayout(10,10)); - searchPanel.add(searchField, BorderLayout.CENTER); - searchPanel.add(searchButton, BorderLayout.EAST); - - JPanel topPanel = new JPanel(new BorderLayout()); - topPanel.add(searchPanel, BorderLayout.CENTER); - topPanel.add(new JPanel(), BorderLayout.EAST); - topPanel.add(new JPanel(), BorderLayout.WEST); - topPanel.add(new JPanel(), BorderLayout.NORTH); - topPanel.add(new JPanel(), BorderLayout.SOUTH); - - frame.getContentPane().add(topPanel, BorderLayout.NORTH); - - frame.setTitle("Lucene powered table searching"); - frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); - frame.show(); - - } - - - public static void main(String[] args) { - new TableSearcherSimulator(); - } - -} diff --git a/lucene/contrib/swing/src/test/org/apache/lucene/swing/models/TestBasicList.java b/lucene/contrib/swing/src/test/org/apache/lucene/swing/models/TestBasicList.java deleted file mode 100644 index 5aee55b9439..00000000000 --- a/lucene/contrib/swing/src/test/org/apache/lucene/swing/models/TestBasicList.java +++ /dev/null @@ -1,53 +0,0 @@ -package org.apache.lucene.swing.models; - -/** - * Copyright 2005 The Apache Software Foundation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import java.util.ArrayList; -import java.util.List; - -import javax.swing.ListModel; - -import org.apache.lucene.util.LuceneTestCase; - -/** - **/ -public class TestBasicList extends LuceneTestCase { - private ListModel baseListModel; - private ListSearcher listSearcher; - private List list; - - @Override - public void setUp() throws Exception { - super.setUp(); - list = new ArrayList(); - list.add(DataStore.canolis); - list.add(DataStore.chris); - - baseListModel = new BaseListModel(list.iterator()); - listSearcher = new ListSearcher(baseListModel); - } - - public void testRows(){ - assertEquals(list.size(), listSearcher.getSize()); - } - - public void testValueAt(){ - assertEquals(baseListModel.getElementAt(0), listSearcher.getElementAt(0)); - assertNotSame(baseListModel.getElementAt(1), listSearcher.getElementAt(0)); - } - -} diff --git a/lucene/contrib/swing/src/test/org/apache/lucene/swing/models/TestBasicTable.java b/lucene/contrib/swing/src/test/org/apache/lucene/swing/models/TestBasicTable.java deleted file mode 100644 index 6b6f49c3a48..00000000000 --- a/lucene/contrib/swing/src/test/org/apache/lucene/swing/models/TestBasicTable.java +++ /dev/null @@ -1,60 +0,0 @@ -package org.apache.lucene.swing.models; - -/** - * Copyright 2005 The Apache Software Foundation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import java.util.ArrayList; -import java.util.List; - -import javax.swing.table.TableModel; - -import org.apache.lucene.util.LuceneTestCase; - - -public class TestBasicTable extends LuceneTestCase { - private TableModel baseTableModel; - private TableSearcher tableSearcher; - private List list; - - @Override - public void setUp() throws Exception { - super.setUp(); - list = new ArrayList(); - list.add(DataStore.canolis); - list.add(DataStore.chris); - - baseTableModel = new BaseTableModel(list.iterator()); - tableSearcher = new TableSearcher(baseTableModel); - } - - public void testColumns(){ - - assertEquals(baseTableModel.getColumnCount(), tableSearcher.getColumnCount()); - assertEquals(baseTableModel.getColumnName(0), tableSearcher.getColumnName(0)); - assertNotSame(baseTableModel.getColumnName(0), tableSearcher.getColumnName(1)); - assertEquals(baseTableModel.getColumnClass(0), tableSearcher.getColumnClass(0)); - } - - public void testRows(){ - assertEquals(list.size(), tableSearcher.getRowCount()); - } - - public void testValueAt(){ - assertEquals(baseTableModel.getValueAt(0,0), tableSearcher.getValueAt(0,0)); - assertEquals(baseTableModel.getValueAt(0,3), tableSearcher.getValueAt(0,3)); - } - -} diff --git a/lucene/contrib/swing/src/test/org/apache/lucene/swing/models/TestSearchingList.java b/lucene/contrib/swing/src/test/org/apache/lucene/swing/models/TestSearchingList.java deleted file mode 100644 index 55566fe768d..00000000000 --- a/lucene/contrib/swing/src/test/org/apache/lucene/swing/models/TestSearchingList.java +++ /dev/null @@ -1,47 +0,0 @@ -package org.apache.lucene.swing.models; - -/** - * Copyright 2005 The Apache Software Foundation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import javax.swing.ListModel; - -import org.apache.lucene.util.LuceneTestCase; - - - -public class TestSearchingList extends LuceneTestCase { - private ListModel baseListModel; - private ListSearcher listSearcher; - - @Override - public void setUp() throws Exception { - super.setUp(); - baseListModel = new BaseListModel(DataStore.getRestaurants()); - listSearcher = new ListSearcher(baseListModel); - } - - public void testSearch(){ - //make sure data is there - assertEquals(baseListModel.getSize(), listSearcher.getSize()); - //search for pino's - listSearcher.search("pino's"); - assertEquals(1, listSearcher.getSize()); - //clear search and check that - listSearcher.search(null); - assertEquals(baseListModel.getSize(), listSearcher.getSize()); - } - -} diff --git a/lucene/contrib/swing/src/test/org/apache/lucene/swing/models/TestSearchingTable.java b/lucene/contrib/swing/src/test/org/apache/lucene/swing/models/TestSearchingTable.java deleted file mode 100644 index 3715ced8017..00000000000 --- a/lucene/contrib/swing/src/test/org/apache/lucene/swing/models/TestSearchingTable.java +++ /dev/null @@ -1,45 +0,0 @@ -package org.apache.lucene.swing.models; - -/** - * Copyright 2005 The Apache Software Foundation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import javax.swing.table.TableModel; - -import org.apache.lucene.util.LuceneTestCase; - -public class TestSearchingTable extends LuceneTestCase { - private TableModel baseTableModel; - private TableSearcher tableSearcher; - - @Override - public void setUp() throws Exception { - super.setUp(); - baseTableModel = new BaseTableModel(DataStore.getRestaurants()); - tableSearcher = new TableSearcher(baseTableModel); - } - - public void testSearch(){ - //make sure data is there - assertEquals(baseTableModel.getRowCount(), tableSearcher.getRowCount()); - //search for pino's - tableSearcher.search("pino's"); - assertEquals(1, tableSearcher.getRowCount()); - //clear search and check that - tableSearcher.search(null); - assertEquals(baseTableModel.getRowCount(), tableSearcher.getRowCount()); - } - -} diff --git a/lucene/contrib/swing/src/test/org/apache/lucene/swing/models/TestUpdatingList.java b/lucene/contrib/swing/src/test/org/apache/lucene/swing/models/TestUpdatingList.java deleted file mode 100644 index f51cec31b2a..00000000000 --- a/lucene/contrib/swing/src/test/org/apache/lucene/swing/models/TestUpdatingList.java +++ /dev/null @@ -1,79 +0,0 @@ -package org.apache.lucene.swing.models; - -/** - * Copyright 2005 The Apache Software Foundation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import org.apache.lucene.util.LuceneTestCase; - - -public class TestUpdatingList extends LuceneTestCase { - private BaseListModel baseListModel; - private ListSearcher listSearcher; - - RestaurantInfo infoToAdd1, infoToAdd2; - - @Override - public void setUp() throws Exception { - super.setUp(); - baseListModel = new BaseListModel(DataStore.getRestaurants()); - listSearcher = new ListSearcher(baseListModel); - - infoToAdd1 = new RestaurantInfo(); - infoToAdd1.setName("Pino's"); - - infoToAdd2 = new RestaurantInfo(); - infoToAdd2.setName("Pino's"); - infoToAdd2.setType("Italian"); - } - - public void testAddWithoutSearch(){ - assertEquals(baseListModel.getSize(), listSearcher.getSize()); - int count = listSearcher.getSize(); - baseListModel.addRow(infoToAdd1); - count++; - assertEquals(count, listSearcher.getSize()); - } - - public void testRemoveWithoutSearch(){ - assertEquals(baseListModel.getSize(), listSearcher.getSize()); - baseListModel.addRow(infoToAdd1); - int count = listSearcher.getSize(); - baseListModel.removeRow(infoToAdd1); - count--; - assertEquals(count, listSearcher.getSize()); - } - - public void testAddWithSearch(){ - assertEquals(baseListModel.getSize(), listSearcher.getSize()); - listSearcher.search("pino's"); - int count = listSearcher.getSize(); - baseListModel.addRow(infoToAdd2); - count++; - assertEquals(count, listSearcher.getSize()); - } - - public void testRemoveWithSearch(){ - assertEquals(baseListModel.getSize(), listSearcher.getSize()); - baseListModel.addRow(infoToAdd1); - listSearcher.search("pino's"); - int count = listSearcher.getSize(); - baseListModel.removeRow(infoToAdd1); - count--; - assertEquals(count, listSearcher.getSize()); - } - - -} diff --git a/lucene/contrib/swing/src/test/org/apache/lucene/swing/models/TestUpdatingTable.java b/lucene/contrib/swing/src/test/org/apache/lucene/swing/models/TestUpdatingTable.java deleted file mode 100644 index 2165bb8f7ea..00000000000 --- a/lucene/contrib/swing/src/test/org/apache/lucene/swing/models/TestUpdatingTable.java +++ /dev/null @@ -1,79 +0,0 @@ -package org.apache.lucene.swing.models; - -/** - * Copyright 2005 The Apache Software Foundation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import org.apache.lucene.util.LuceneTestCase; - - -public class TestUpdatingTable extends LuceneTestCase { - private BaseTableModel baseTableModel; - private TableSearcher tableSearcher; - - RestaurantInfo infoToAdd1, infoToAdd2; - - @Override - public void setUp() throws Exception { - super.setUp(); - baseTableModel = new BaseTableModel(DataStore.getRestaurants()); - tableSearcher = new TableSearcher(baseTableModel); - - infoToAdd1 = new RestaurantInfo(); - infoToAdd1.setName("Pino's"); - infoToAdd1.setType("Italian"); - - infoToAdd2 = new RestaurantInfo(); - infoToAdd2.setName("Pino's"); - infoToAdd2.setType("Italian"); - } - - public void testAddWithoutSearch(){ - assertEquals(baseTableModel.getRowCount(), tableSearcher.getRowCount()); - int count = tableSearcher.getRowCount(); - baseTableModel.addRow(infoToAdd1); - count++; - assertEquals(count, tableSearcher.getRowCount()); - } - - public void testRemoveWithoutSearch(){ - assertEquals(baseTableModel.getRowCount(), tableSearcher.getRowCount()); - int count = tableSearcher.getRowCount(); - baseTableModel.addRow(infoToAdd1); - baseTableModel.removeRow(infoToAdd1); - assertEquals(count, tableSearcher.getRowCount()); - } - - public void testAddWithSearch(){ - assertEquals(baseTableModel.getRowCount(), tableSearcher.getRowCount()); - tableSearcher.search("pino's"); - int count = tableSearcher.getRowCount(); - baseTableModel.addRow(infoToAdd2); - count++; - assertEquals(count, tableSearcher.getRowCount()); - } - - public void testRemoveWithSearch(){ - assertEquals(baseTableModel.getRowCount(), tableSearcher.getRowCount()); - baseTableModel.addRow(infoToAdd1); - tableSearcher.search("pino's"); - int count = tableSearcher.getRowCount(); - baseTableModel.removeRow(infoToAdd1); - count--; - assertEquals(count, tableSearcher.getRowCount()); - } - - -} diff --git a/lucene/src/site/src/documentation/content/xdocs/lucene-contrib/index.xml b/lucene/src/site/src/documentation/content/xdocs/lucene-contrib/index.xml index e8394c7655b..8d156ccee7e 100644 --- a/lucene/src/site/src/documentation/content/xdocs/lucene-contrib/index.xml +++ b/lucene/src/site/src/documentation/content/xdocs/lucene-contrib/index.xml @@ -50,11 +50,6 @@ http://svn.apache.org/repos/asf/lucene/dev/trunk/lucene/contrib/.

    -
    ant -

    Ant task to create Lucene indexes.

    -

    See ant javadoc

    -
    -
    benchmark

    The benchmark contribution contains tools for benchmarking Lucene using standard, freely available corpora.

    See benchmark javadoc

    @@ -66,11 +61,6 @@

    See demo javadoc

    -
    db -

    Provides integration with Berkley DB.

    -

    See db javadoc

    -
    -
    highlighter

    A set of classes for highlighting matching terms in search results.

    See highlighter javadoc

    @@ -80,11 +70,6 @@

    RAM-based index that enables much faster searching than RAMDirectory in certain situations.

    See instantiated javadoc

    - -
    lucli -

    An application that allows Lucene index manipulation from the command-line.

    -

    See lucli javadoc

    -
    memory

    High-performance single-document main memory index.

    @@ -121,11 +106,6 @@

    See spellchecker javadoc

    -
    swing -

    Swing components designed to integrate with Lucene.

    -

    See swing javadoc

    -
    -
    wordnet

    Tools to help utilize wordnet synonyms with Lucene

    See wordnet javadoc

    diff --git a/lucene/src/site/src/documentation/content/xdocs/site.xml b/lucene/src/site/src/documentation/content/xdocs/site.xml index 085b2632ad6..bf4850eee2d 100755 --- a/lucene/src/site/src/documentation/content/xdocs/site.xml +++ b/lucene/src/site/src/documentation/content/xdocs/site.xml @@ -52,14 +52,10 @@ See http://forrest.apache.org/docs/linking.html for more info - - - - @@ -69,8 +65,7 @@ See http://forrest.apache.org/docs/linking.html for more info --> - - + @@ -100,14 +95,10 @@ See http://forrest.apache.org/docs/linking.html for more info - - - - @@ -115,7 +106,6 @@ See http://forrest.apache.org/docs/linking.html for more info - From e2b888e9dec35f55f76109ceaf2ae6986246f55b Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Mon, 23 May 2011 00:36:31 +0000 Subject: [PATCH 25/27] LUCENE-3127: pull CoreReaders out of SegmentReader.java git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1126284 13f79535-47bb-0310-9956-ffa450edef68 --- .../lucene/index/SegmentCoreReaders.java | 191 ++++++++++++++++++ .../apache/lucene/index/SegmentReader.java | 174 +--------------- 2 files changed, 193 insertions(+), 172 deletions(-) create mode 100644 lucene/src/java/org/apache/lucene/index/SegmentCoreReaders.java diff --git a/lucene/src/java/org/apache/lucene/index/SegmentCoreReaders.java b/lucene/src/java/org/apache/lucene/index/SegmentCoreReaders.java new file mode 100644 index 00000000000..af64cae7fb6 --- /dev/null +++ b/lucene/src/java/org/apache/lucene/index/SegmentCoreReaders.java @@ -0,0 +1,191 @@ +package org.apache.lucene.index; + +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import java.io.IOException; +import java.util.concurrent.atomic.AtomicInteger; + +import org.apache.lucene.index.codecs.FieldsProducer; +import org.apache.lucene.store.Directory; + +/** Holds core readers that are shared (unchanged) when + * SegmentReader is cloned or reopened */ +final class SegmentCoreReaders { + + // Counts how many other reader share the core objects + // (freqStream, proxStream, tis, etc.) of this reader; + // when coreRef drops to 0, these core objects may be + // closed. A given instance of SegmentReader may be + // closed, even those it shares core objects with other + // SegmentReaders: + private final AtomicInteger ref = new AtomicInteger(1); + + final String segment; + final FieldInfos fieldInfos; + + final FieldsProducer fields; + + final Directory dir; + final Directory cfsDir; + final int readBufferSize; + final int termsIndexDivisor; + + private final SegmentReader owner; + + FieldsReader fieldsReaderOrig; + TermVectorsReader termVectorsReaderOrig; + CompoundFileReader cfsReader; + CompoundFileReader storeCFSReader; + + SegmentCoreReaders(SegmentReader owner, Directory dir, SegmentInfo si, int readBufferSize, int termsIndexDivisor) throws IOException { + + if (termsIndexDivisor == 0) { + throw new IllegalArgumentException("indexDivisor must be < 0 (don't load terms index) or greater than 0 (got 0)"); + } + + segment = si.name; + final SegmentCodecs segmentCodecs = si.getSegmentCodecs(); + this.readBufferSize = readBufferSize; + this.dir = dir; + + boolean success = false; + + try { + Directory dir0 = dir; + if (si.getUseCompoundFile()) { + cfsReader = new CompoundFileReader(dir, IndexFileNames.segmentFileName(segment, "", IndexFileNames.COMPOUND_FILE_EXTENSION), readBufferSize); + dir0 = cfsReader; + } + cfsDir = dir0; + si.loadFieldInfos(cfsDir, false); // prevent opening the CFS to load fieldInfos + fieldInfos = si.getFieldInfos(); + + this.termsIndexDivisor = termsIndexDivisor; + + // Ask codec for its Fields + fields = segmentCodecs.codec().fieldsProducer(new SegmentReadState(cfsDir, si, fieldInfos, readBufferSize, termsIndexDivisor)); + assert fields != null; + + success = true; + } finally { + if (!success) { + decRef(); + } + } + + // Must assign this at the end -- if we hit an + // exception above core, we don't want to attempt to + // purge the FieldCache (will hit NPE because core is + // not assigned yet). + this.owner = owner; + } + + synchronized TermVectorsReader getTermVectorsReaderOrig() { + return termVectorsReaderOrig; + } + + synchronized FieldsReader getFieldsReaderOrig() { + return fieldsReaderOrig; + } + + synchronized void incRef() { + ref.incrementAndGet(); + } + + synchronized Directory getCFSReader() { + return cfsReader; + } + + synchronized void decRef() throws IOException { + + if (ref.decrementAndGet() == 0) { + + if (fields != null) { + fields.close(); + } + + if (termVectorsReaderOrig != null) { + termVectorsReaderOrig.close(); + } + + if (fieldsReaderOrig != null) { + fieldsReaderOrig.close(); + } + + if (cfsReader != null) { + cfsReader.close(); + } + + if (storeCFSReader != null) { + storeCFSReader.close(); + } + + // Now, notify any ReaderFinished listeners: + if (owner != null) { + owner.notifyReaderFinishedListeners(); + } + } + } + + synchronized void openDocStores(SegmentInfo si) throws IOException { + + assert si.name.equals(segment); + + if (fieldsReaderOrig == null) { + final Directory storeDir; + if (si.getDocStoreOffset() != -1) { + if (si.getDocStoreIsCompoundFile()) { + assert storeCFSReader == null; + storeCFSReader = new CompoundFileReader(dir, + IndexFileNames.segmentFileName(si.getDocStoreSegment(), "", IndexFileNames.COMPOUND_FILE_STORE_EXTENSION), + readBufferSize); + storeDir = storeCFSReader; + assert storeDir != null; + } else { + storeDir = dir; + assert storeDir != null; + } + } else if (si.getUseCompoundFile()) { + // In some cases, we were originally opened when CFS + // was not used, but then we are asked to open doc + // stores after the segment has switched to CFS + if (cfsReader == null) { + cfsReader = new CompoundFileReader(dir, IndexFileNames.segmentFileName(segment, "", IndexFileNames.COMPOUND_FILE_EXTENSION), readBufferSize); + } + storeDir = cfsReader; + assert storeDir != null; + } else { + storeDir = dir; + assert storeDir != null; + } + + final String storesSegment = si.getDocStoreSegment(); + fieldsReaderOrig = new FieldsReader(storeDir, storesSegment, fieldInfos, readBufferSize, + si.getDocStoreOffset(), si.docCount); + + // Verify two sources of "maxDoc" agree: + if (si.getDocStoreOffset() == -1 && fieldsReaderOrig.size() != si.docCount) { + throw new CorruptIndexException("doc counts differ for segment " + segment + ": fieldsReader shows " + fieldsReaderOrig.size() + " but segmentInfo shows " + si.docCount); + } + + if (si.getHasVectors()) { // open term vector files only as needed + termVectorsReaderOrig = new TermVectorsReader(storeDir, storesSegment, fieldInfos, readBufferSize, si.getDocStoreOffset(), si.docCount); + } + } + } +} diff --git a/lucene/src/java/org/apache/lucene/index/SegmentReader.java b/lucene/src/java/org/apache/lucene/index/SegmentReader.java index 1bfe13b8a88..47d4cc62c66 100644 --- a/lucene/src/java/org/apache/lucene/index/SegmentReader.java +++ b/lucene/src/java/org/apache/lucene/index/SegmentReader.java @@ -29,7 +29,6 @@ import java.util.concurrent.atomic.AtomicInteger; import org.apache.lucene.document.Document; import org.apache.lucene.document.FieldSelector; -import org.apache.lucene.index.codecs.FieldsProducer; import org.apache.lucene.store.BufferedIndexInput; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IndexInput; @@ -70,174 +69,7 @@ public class SegmentReader extends IndexReader implements Cloneable { IndexInput singleNormStream; AtomicInteger singleNormRef; - CoreReaders core; - - // Holds core readers that are shared (unchanged) when - // SegmentReader is cloned or reopened - static final class CoreReaders { - - // Counts how many other reader share the core objects - // (freqStream, proxStream, tis, etc.) of this reader; - // when coreRef drops to 0, these core objects may be - // closed. A given instance of SegmentReader may be - // closed, even those it shares core objects with other - // SegmentReaders: - private final AtomicInteger ref = new AtomicInteger(1); - - final String segment; - final FieldInfos fieldInfos; - - final FieldsProducer fields; - - final Directory dir; - final Directory cfsDir; - final int readBufferSize; - final int termsIndexDivisor; - - private final SegmentReader origInstance; - - FieldsReader fieldsReaderOrig; - TermVectorsReader termVectorsReaderOrig; - CompoundFileReader cfsReader; - CompoundFileReader storeCFSReader; - - CoreReaders(SegmentReader origInstance, Directory dir, SegmentInfo si, int readBufferSize, int termsIndexDivisor) throws IOException { - - if (termsIndexDivisor == 0) { - throw new IllegalArgumentException("indexDivisor must be < 0 (don't load terms index) or greater than 0 (got 0)"); - } - - segment = si.name; - final SegmentCodecs segmentCodecs = si.getSegmentCodecs(); - this.readBufferSize = readBufferSize; - this.dir = dir; - - boolean success = false; - - try { - Directory dir0 = dir; - if (si.getUseCompoundFile()) { - cfsReader = new CompoundFileReader(dir, IndexFileNames.segmentFileName(segment, "", IndexFileNames.COMPOUND_FILE_EXTENSION), readBufferSize); - dir0 = cfsReader; - } - cfsDir = dir0; - si.loadFieldInfos(cfsDir, false); // prevent opening the CFS to load fieldInfos - fieldInfos = si.getFieldInfos(); - - this.termsIndexDivisor = termsIndexDivisor; - - // Ask codec for its Fields - fields = segmentCodecs.codec().fieldsProducer(new SegmentReadState(cfsDir, si, fieldInfos, readBufferSize, termsIndexDivisor)); - assert fields != null; - - success = true; - } finally { - if (!success) { - decRef(); - } - } - - // Must assign this at the end -- if we hit an - // exception above core, we don't want to attempt to - // purge the FieldCache (will hit NPE because core is - // not assigned yet). - this.origInstance = origInstance; - } - - synchronized TermVectorsReader getTermVectorsReaderOrig() { - return termVectorsReaderOrig; - } - - synchronized FieldsReader getFieldsReaderOrig() { - return fieldsReaderOrig; - } - - synchronized void incRef() { - ref.incrementAndGet(); - } - - synchronized Directory getCFSReader() { - return cfsReader; - } - - synchronized void decRef() throws IOException { - - if (ref.decrementAndGet() == 0) { - - if (fields != null) { - fields.close(); - } - - if (termVectorsReaderOrig != null) { - termVectorsReaderOrig.close(); - } - - if (fieldsReaderOrig != null) { - fieldsReaderOrig.close(); - } - - if (cfsReader != null) { - cfsReader.close(); - } - - if (storeCFSReader != null) { - storeCFSReader.close(); - } - - // Now, notify any ReaderFinished listeners: - if (origInstance != null) { - origInstance.notifyReaderFinishedListeners(); - } - } - } - - synchronized void openDocStores(SegmentInfo si) throws IOException { - - assert si.name.equals(segment); - - if (fieldsReaderOrig == null) { - final Directory storeDir; - if (si.getDocStoreOffset() != -1) { - if (si.getDocStoreIsCompoundFile()) { - assert storeCFSReader == null; - storeCFSReader = new CompoundFileReader(dir, - IndexFileNames.segmentFileName(si.getDocStoreSegment(), "", IndexFileNames.COMPOUND_FILE_STORE_EXTENSION), - readBufferSize); - storeDir = storeCFSReader; - assert storeDir != null; - } else { - storeDir = dir; - assert storeDir != null; - } - } else if (si.getUseCompoundFile()) { - // In some cases, we were originally opened when CFS - // was not used, but then we are asked to open doc - // stores after the segment has switched to CFS - if (cfsReader == null) { - cfsReader = new CompoundFileReader(dir, IndexFileNames.segmentFileName(segment, "", IndexFileNames.COMPOUND_FILE_EXTENSION), readBufferSize); - } - storeDir = cfsReader; - assert storeDir != null; - } else { - storeDir = dir; - assert storeDir != null; - } - - final String storesSegment = si.getDocStoreSegment(); - fieldsReaderOrig = new FieldsReader(storeDir, storesSegment, fieldInfos, readBufferSize, - si.getDocStoreOffset(), si.docCount); - - // Verify two sources of "maxDoc" agree: - if (si.getDocStoreOffset() == -1 && fieldsReaderOrig.size() != si.docCount) { - throw new CorruptIndexException("doc counts differ for segment " + segment + ": fieldsReader shows " + fieldsReaderOrig.size() + " but segmentInfo shows " + si.docCount); - } - - if (si.getHasVectors()) { // open term vector files only as needed - termVectorsReaderOrig = new TermVectorsReader(storeDir, storesSegment, fieldInfos, readBufferSize, si.getDocStoreOffset(), si.docCount); - } - } - } - } + SegmentCoreReaders core; /** * Sets the initial value @@ -248,8 +80,6 @@ public class SegmentReader extends IndexReader implements Cloneable { return (FieldsReader) core.getFieldsReaderOrig().clone(); } } - - Map norms = new HashMap(); @@ -281,7 +111,7 @@ public class SegmentReader extends IndexReader implements Cloneable { boolean success = false; try { - instance.core = new CoreReaders(instance, dir, si, readBufferSize, termInfosIndexDivisor); + instance.core = new SegmentCoreReaders(instance, dir, si, readBufferSize, termInfosIndexDivisor); if (doOpenStores) { instance.core.openDocStores(si); } From b554dd19021a5559081668859e0c6997fa907321 Mon Sep 17 00:00:00 2001 From: Steven Rowe Date: Mon, 23 May 2011 04:30:59 +0000 Subject: [PATCH 26/27] LUCENE-2981: Removed ant, bdb, bdb-je, lucli, and swing contribs from the IntelliJ IDEA configuration. git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1126307 13f79535-47bb-0310-9956-ffa450edef68 --- dev-tools/idea/.idea/ant.xml | 6 -- dev-tools/idea/.idea/modules.xml | 5 -- dev-tools/idea/.idea/workspace.xml | 90 ++++++------------- dev-tools/idea/lucene/contrib/ant/ant.iml | 31 ------- .../idea/lucene/contrib/db/bdb-je/bdb-je.iml | 27 ------ dev-tools/idea/lucene/contrib/db/bdb/bdb.iml | 27 ------ dev-tools/idea/lucene/contrib/lucli/lucli.iml | 28 ------ dev-tools/idea/lucene/contrib/swing/swing.iml | 19 ---- 8 files changed, 25 insertions(+), 208 deletions(-) delete mode 100644 dev-tools/idea/lucene/contrib/ant/ant.iml delete mode 100644 dev-tools/idea/lucene/contrib/db/bdb-je/bdb-je.iml delete mode 100644 dev-tools/idea/lucene/contrib/db/bdb/bdb.iml delete mode 100644 dev-tools/idea/lucene/contrib/lucli/lucli.iml delete mode 100644 dev-tools/idea/lucene/contrib/swing/swing.iml diff --git a/dev-tools/idea/.idea/ant.xml b/dev-tools/idea/.idea/ant.xml index c50e9505405..13dca4f1131 100644 --- a/dev-tools/idea/.idea/ant.xml +++ b/dev-tools/idea/.idea/ant.xml @@ -3,21 +3,15 @@ - - - - - - diff --git a/dev-tools/idea/.idea/modules.xml b/dev-tools/idea/.idea/modules.xml index 1618e1c9a87..f0f762126ee 100644 --- a/dev-tools/idea/.idea/modules.xml +++ b/dev-tools/idea/.idea/modules.xml @@ -4,20 +4,15 @@ - - - - - diff --git a/dev-tools/idea/.idea/workspace.xml b/dev-tools/idea/.idea/workspace.xml index 28288b3deff..b9fe4ea722a 100644 --- a/dev-tools/idea/.idea/workspace.xml +++ b/dev-tools/idea/.idea/workspace.xml @@ -8,27 +8,6 @@ - - - - - - - - - - - - - - - - - - - - - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dev-tools/idea/lucene/contrib/ant/ant.iml b/dev-tools/idea/lucene/contrib/ant/ant.iml deleted file mode 100644 index 8987c57a65f..00000000000 --- a/dev-tools/idea/lucene/contrib/ant/ant.iml +++ /dev/null @@ -1,31 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/dev-tools/idea/lucene/contrib/db/bdb-je/bdb-je.iml b/dev-tools/idea/lucene/contrib/db/bdb-je/bdb-je.iml deleted file mode 100644 index af2667fdc24..00000000000 --- a/dev-tools/idea/lucene/contrib/db/bdb-je/bdb-je.iml +++ /dev/null @@ -1,27 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/dev-tools/idea/lucene/contrib/db/bdb/bdb.iml b/dev-tools/idea/lucene/contrib/db/bdb/bdb.iml deleted file mode 100644 index af2667fdc24..00000000000 --- a/dev-tools/idea/lucene/contrib/db/bdb/bdb.iml +++ /dev/null @@ -1,27 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/dev-tools/idea/lucene/contrib/lucli/lucli.iml b/dev-tools/idea/lucene/contrib/lucli/lucli.iml deleted file mode 100644 index 6d61e7ec871..00000000000 --- a/dev-tools/idea/lucene/contrib/lucli/lucli.iml +++ /dev/null @@ -1,28 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/dev-tools/idea/lucene/contrib/swing/swing.iml b/dev-tools/idea/lucene/contrib/swing/swing.iml deleted file mode 100644 index a84cc08be50..00000000000 --- a/dev-tools/idea/lucene/contrib/swing/swing.iml +++ /dev/null @@ -1,19 +0,0 @@ - - - - - - - - - - - - - - - - - - - From 00c35bfec344874e67eef72aeef0db224489d07d Mon Sep 17 00:00:00 2001 From: Steven Rowe Date: Mon, 23 May 2011 04:55:03 +0000 Subject: [PATCH 27/27] LUCENE-2981: Removed ant, bdb, bdb-je, lucli, and swing contribs from the Maven configuration. git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1126310 13f79535-47bb-0310-9956-ffa450edef68 --- .../maven/lucene/contrib/ant/pom.xml.template | 114 ------------------ .../lucene/contrib/db/bdb-je/pom.xml.template | 75 ------------ .../lucene/contrib/db/bdb/pom.xml.template | 86 ------------- .../maven/lucene/contrib/db/pom.xml.template | 49 -------- .../lucene/contrib/lucli/pom.xml.template | 107 ---------------- .../maven/lucene/contrib/pom.xml.template | 4 - .../lucene/contrib/swing/pom.xml.template | 100 --------------- dev-tools/maven/pom.xml.template | 62 ---------- 8 files changed, 597 deletions(-) delete mode 100644 dev-tools/maven/lucene/contrib/ant/pom.xml.template delete mode 100644 dev-tools/maven/lucene/contrib/db/bdb-je/pom.xml.template delete mode 100644 dev-tools/maven/lucene/contrib/db/bdb/pom.xml.template delete mode 100644 dev-tools/maven/lucene/contrib/db/pom.xml.template delete mode 100644 dev-tools/maven/lucene/contrib/lucli/pom.xml.template delete mode 100644 dev-tools/maven/lucene/contrib/swing/pom.xml.template diff --git a/dev-tools/maven/lucene/contrib/ant/pom.xml.template b/dev-tools/maven/lucene/contrib/ant/pom.xml.template deleted file mode 100644 index e300d0f8ada..00000000000 --- a/dev-tools/maven/lucene/contrib/ant/pom.xml.template +++ /dev/null @@ -1,114 +0,0 @@ - - - 4.0.0 - - org.apache.lucene - lucene-parent - @version@ - ../../pom.xml - - org.apache.lucene - lucene-ant - jar - Lucene Contrib Ant - Ant task to create Lucene indexes - - lucene/contrib/ant - ../../build/contrib/ant - - - - ${project.groupId} - lucene-core - ${project.version} - - - ${project.groupId} - lucene-test-framework - ${project.version} - test - - - ${project.groupId} - lucene-analyzers-common - ${project.version} - - - jtidy - jtidy - - - org.apache.ant - ant - - - org.apache.ant - ant-junit - test - - - junit - junit - test - - - - ${build-directory} - ${build-directory}/classes/java - ${build-directory}/classes/test - src/java - src/test - - - src/resources - - - - - ${project.build.testSourceDirectory} - - **/*.java - - - - - - org.codehaus.mojo - appassembler-maven-plugin - - -Xmx128M - flat - - windows - unix - - - - org.apache.lucene.ant.HtmlDocument - HtmlDocument - - - - - - - diff --git a/dev-tools/maven/lucene/contrib/db/bdb-je/pom.xml.template b/dev-tools/maven/lucene/contrib/db/bdb-je/pom.xml.template deleted file mode 100644 index a5d7647b526..00000000000 --- a/dev-tools/maven/lucene/contrib/db/bdb-je/pom.xml.template +++ /dev/null @@ -1,75 +0,0 @@ - - - 4.0.0 - - org.apache.lucene - lucene-parent - @version@ - ../../../pom.xml - - org.apache.lucene - lucene-bdb-je - jar - Lucene Contrib bdb-je - Berkeley DB based Directory implementation - - lucene/contrib/db/bdb-je - ../../../build/contrib/db/bdb-je - - - - ${project.groupId} - lucene-core - ${project.version} - - - ${project.groupId} - lucene-test-framework - ${project.version} - test - - - com.sleepycat - berkeleydb-je - - - junit - junit - test - - - - ${build-directory} - ${build-directory}/classes/java - ${build-directory}/classes/test - src/java - src/test - - - ${project.build.testSourceDirectory} - - **/*.java - - - - - diff --git a/dev-tools/maven/lucene/contrib/db/bdb/pom.xml.template b/dev-tools/maven/lucene/contrib/db/bdb/pom.xml.template deleted file mode 100644 index d5d5123ab5f..00000000000 --- a/dev-tools/maven/lucene/contrib/db/bdb/pom.xml.template +++ /dev/null @@ -1,86 +0,0 @@ - - - 4.0.0 - - org.apache.lucene - lucene-parent - @version@ - ../../../pom.xml - - org.apache.lucene - lucene-bdb - jar - Lucene Contrib bdb - Berkeley DB based Directory implementation - - lucene/contrib/db/bdb - ../../../build/contrib/db/bdb - - - - ${project.groupId} - lucene-core - ${project.version} - - - ${project.groupId} - lucene-test-framework - ${project.version} - test - - - com.sleepycat - berkeleydb - - - junit - junit - test - - - - ${build-directory} - ${build-directory}/classes/java - ${build-directory}/classes/test - src/java - src/test - - - ${project.build.testSourceDirectory} - - **/*.java - - - - - - org.apache.maven.plugins - maven-surefire-plugin - - - - true - - - - - diff --git a/dev-tools/maven/lucene/contrib/db/pom.xml.template b/dev-tools/maven/lucene/contrib/db/pom.xml.template deleted file mode 100644 index aa3b0daa77f..00000000000 --- a/dev-tools/maven/lucene/contrib/db/pom.xml.template +++ /dev/null @@ -1,49 +0,0 @@ - - - 4.0.0 - - org.apache.lucene - lucene-parent - @version@ - ../../pom.xml - - org.apache.lucene - lucene-db-aggregator - Lucene Database aggregator POM - pom - - bdb - bdb-je - - - ../../build/contrib/db/lucene-db-aggregator - - - org.apache.maven.plugins - maven-deploy-plugin - - true - - - - - diff --git a/dev-tools/maven/lucene/contrib/lucli/pom.xml.template b/dev-tools/maven/lucene/contrib/lucli/pom.xml.template deleted file mode 100644 index efa4c53a882..00000000000 --- a/dev-tools/maven/lucene/contrib/lucli/pom.xml.template +++ /dev/null @@ -1,107 +0,0 @@ - - - 4.0.0 - - org.apache.lucene - lucene-parent - @version@ - ../../pom.xml - - org.apache.lucene - lucene-lucli - jar - Lucene Lucli - Lucene Command Line Interface - - lucene/contrib/lucli - ../../build/contrib/lucli - - - - ${project.groupId} - lucene-core - ${project.version} - - - ${project.groupId} - lucene-test-framework - ${project.version} - test - - - ${project.groupId} - lucene-analyzers-common - ${project.version} - - - jline - jline - - - junit - junit - test - - - - ${build-directory} - ${build-directory}/classes/java - ${build-directory}/classes/test - src/java - src/test - - - ${project.build.testSourceDirectory} - - **/*.java - - - - - - org.apache.maven.plugins - maven-javadoc-plugin - - true - - - - org.codehaus.mojo - appassembler-maven-plugin - - -Xmx128M - flat - - windows - unix - - - - lucli.Lucli - lucli - - - - - - - diff --git a/dev-tools/maven/lucene/contrib/pom.xml.template b/dev-tools/maven/lucene/contrib/pom.xml.template index f16895b084d..abf7b3eb6fc 100644 --- a/dev-tools/maven/lucene/contrib/pom.xml.template +++ b/dev-tools/maven/lucene/contrib/pom.xml.template @@ -31,19 +31,15 @@ Lucene Contrib aggregator POM pom - ant - db demo highlighter instantiated - lucli memory misc queries queryparser spatial spellchecker - swing wordnet xml-query-parser diff --git a/dev-tools/maven/lucene/contrib/swing/pom.xml.template b/dev-tools/maven/lucene/contrib/swing/pom.xml.template deleted file mode 100644 index bb5178d1b60..00000000000 --- a/dev-tools/maven/lucene/contrib/swing/pom.xml.template +++ /dev/null @@ -1,100 +0,0 @@ - - - 4.0.0 - - org.apache.lucene - lucene-parent - @version@ - ../../pom.xml - - org.apache.lucene - lucene-swing - jar - Lucene Swing - Swing Models - - lucene/contrib/swing - ../../build/contrib/swing - - - - ${project.groupId} - lucene-core - ${project.version} - - - ${project.groupId} - lucene-test-framework - ${project.version} - test - - - ${project.groupId} - lucene-analyzers-common - ${project.version} - - - junit - junit - test - - - - ${build-directory} - ${build-directory}/classes/java - ${build-directory}/classes/test - src/java - src/test - - - ${project.build.testSourceDirectory} - - **/*.java - - - - - - org.codehaus.mojo - appassembler-maven-plugin - - -Xmx128M - flat - - windows - unix - - - - org.apache.lucene.swing.models.ListSearcherSimulator - ListSearchSimulator - - - org.apache.lucene.swing.models.TableSearcherSimulator - TableSearchSimulator - - - - - - - diff --git a/dev-tools/maven/pom.xml.template b/dev-tools/maven/pom.xml.template index f2d98038cc3..5c026af66d1 100644 --- a/dev-tools/maven/pom.xml.template +++ b/dev-tools/maven/pom.xml.template @@ -130,16 +130,6 @@ guava r05 - - com.sleepycat - berkeleydb - 4.7.25 - - - com.sleepycat - berkeleydb-je - 3.3.93 - commons-beanutils commons-beanutils @@ -205,11 +195,6 @@ jline 0.9.1 - - jtidy - jtidy - 4aug2000r7-dev - junit junit @@ -583,25 +568,6 @@ bootstrap - - org.apache.maven.plugins - maven-antrun-plugin - - - get-jars-and-poms - install - - run - - - - - - - - - - org.apache.maven.plugins maven-install-plugin @@ -634,34 +600,6 @@ modules/benchmark/lib/xercesImpl-2.9.1-patched-XERCESJ-1257.jar - - install-berkeleydb - install - - install-file - - - com.sleepycat - berkeleydb - 4.7.25 - jar - lucene/contrib/db/bdb/lib/db-4.7.25.jar - - - - install-berkeleydb-je - install - - install-file - - - com.sleepycat - berkeleydb-je - 3.3.93 - jar - lucene/contrib/db/bdb-je/lib/je-3.3.93.jar - - install-solr-commons-csv install