Upgrade to lucene-5.5.0-snapshot-1721183.

Some files that implement or use the Scorer API had to be changed because of
https://issues.apache.org/jira/browse/LUCENE-6919.
This commit is contained in:
Adrien Grand 2015-12-21 17:01:28 +01:00
parent 454f3e8679
commit cf52e96c42
62 changed files with 146 additions and 209 deletions

View File

@ -1,5 +1,5 @@
elasticsearch = 3.0.0-SNAPSHOT
lucene = 5.5.0-snapshot-1719088
lucene = 5.5.0-snapshot-1721183
# optional dependencies
spatial4j = 0.5

View File

@ -284,7 +284,8 @@ public class Lucene {
continue;
}
final Bits liveDocs = context.reader().getLiveDocs();
for (int doc = scorer.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = scorer.nextDoc()) {
final DocIdSetIterator iterator = scorer.iterator();
for (int doc = iterator.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = iterator.nextDoc()) {
if (liveDocs == null || liveDocs.get(doc)) {
return true;
}
@ -667,19 +668,11 @@ public class Lucene {
throw new IllegalStateException(message);
}
@Override
public int advance(int arg0) throws IOException {
throw new IllegalStateException(message);
}
@Override
public long cost() {
throw new IllegalStateException(message);
}
@Override
public int docID() {
throw new IllegalStateException(message);
}
@Override
public int nextDoc() throws IOException {
public DocIdSetIterator iterator() {
throw new IllegalStateException(message);
}
};
@ -757,10 +750,10 @@ public class Lucene {
if (scorer == null) {
return new Bits.MatchNoBits(maxDoc);
}
final TwoPhaseIterator twoPhase = scorer.asTwoPhaseIterator();
final TwoPhaseIterator twoPhase = scorer.twoPhaseIterator();
final DocIdSetIterator iterator;
if (twoPhase == null) {
iterator = scorer;
iterator = scorer.iterator();
} else {
iterator = twoPhase.approximation();
}

View File

@ -29,6 +29,7 @@ import org.apache.lucene.index.TermState;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.CollectionStatistics;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchNoDocsQuery;
@ -120,7 +121,7 @@ public final class AllTermQuery extends Query {
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
AllTermScorer scorer = scorer(context);
if (scorer != null) {
int newDoc = scorer.advance(doc);
int newDoc = scorer.iterator().advance(doc);
if (newDoc == doc) {
float score = scorer.score();
float freq = scorer.freq();
@ -213,18 +214,8 @@ public final class AllTermQuery extends Query {
}
@Override
public int nextDoc() throws IOException {
return postings.nextDoc();
}
@Override
public int advance(int target) throws IOException {
return postings.advance(target);
}
@Override
public long cost() {
return postings.cost();
public DocIdSetIterator iterator() {
return postings;
}
}

View File

@ -28,6 +28,7 @@ import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.FilteredDocIdSetIterator;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.util.BitDocIdSet;
import org.apache.lucene.util.BitSet;
@ -99,11 +100,12 @@ public class FilterableTermsEnum extends TermsEnum {
}
BitSet bits = null;
if (weight != null) {
DocIdSetIterator docs = weight.scorer(context);
if (docs == null) {
Scorer scorer = weight.scorer(context);
if (scorer == null) {
// fully filtered, none matching, no need to iterate on this
continue;
}
DocIdSetIterator docs = scorer.iterator();
// we want to force apply deleted docs
final Bits liveDocs = context.reader().getLiveDocs();

View File

@ -1,67 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.lucene.search;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import java.io.IOException;
/**
*
*/
public class EmptyScorer extends Scorer {
private int docId = -1;
public EmptyScorer(Weight weight) {
super(weight);
}
@Override
public float score() throws IOException {
throw new UnsupportedOperationException("Should never be called");
}
@Override
public int freq() throws IOException {
throw new UnsupportedOperationException("Should never be called");
}
@Override
public int docID() {
return docId;
}
@Override
public int nextDoc() throws IOException {
assert docId != NO_MORE_DOCS;
return docId = NO_MORE_DOCS;
}
@Override
public int advance(int target) throws IOException {
return slowAdvance(target);
}
@Override
public long cost() {
return 0;
}
}

View File

@ -19,6 +19,7 @@
package org.elasticsearch.common.lucene.search.function;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
@ -27,6 +28,7 @@ import java.io.IOException;
abstract class CustomBoostFactorScorer extends Scorer {
final Scorer scorer;
final DocIdSetIterator iterator;
final float maxBoost;
final CombineFunction scoreCombiner;
@ -42,6 +44,7 @@ abstract class CustomBoostFactorScorer extends Scorer {
nextDoc = new MinScoreNextDoc();
}
this.scorer = scorer;
this.iterator = scorer.iterator();
this.maxBoost = maxBoost;
this.scoreCombiner = scoreCombiner;
this.minScore = minScore;
@ -53,13 +56,25 @@ abstract class CustomBoostFactorScorer extends Scorer {
}
@Override
public int advance(int target) throws IOException {
return nextDoc.advance(target);
}
@Override
public int nextDoc() throws IOException {
return nextDoc.nextDoc();
public DocIdSetIterator iterator() {
return new DocIdSetIterator() {
@Override
public int nextDoc() throws IOException {
return nextDoc.nextDoc();
}
@Override
public int advance(int target) throws IOException {
return nextDoc.advance(target);
}
@Override
public long cost() {
return iterator.cost();
}
@Override
public int docID() {
return iterator.docID();
}
};
}
public abstract float innerScore() throws IOException;
@ -74,11 +89,6 @@ abstract class CustomBoostFactorScorer extends Scorer {
return scorer.freq();
}
@Override
public long cost() {
return scorer.cost();
}
public interface NextDoc {
public int advance(int target) throws IOException;
@ -94,8 +104,8 @@ abstract class CustomBoostFactorScorer extends Scorer {
public int nextDoc() throws IOException {
int doc;
do {
doc = scorer.nextDoc();
if (doc == NO_MORE_DOCS) {
doc = iterator.nextDoc();
if (doc == DocIdSetIterator.NO_MORE_DOCS) {
return doc;
}
currentScore = innerScore();
@ -110,13 +120,13 @@ abstract class CustomBoostFactorScorer extends Scorer {
@Override
public int advance(int target) throws IOException {
int doc = scorer.advance(target);
if (doc == NO_MORE_DOCS) {
int doc = iterator.advance(target);
if (doc == DocIdSetIterator.NO_MORE_DOCS) {
return doc;
}
currentScore = innerScore();
if (currentScore < minScore) {
return scorer.nextDoc();
return iterator.nextDoc();
}
return doc;
}
@ -126,7 +136,7 @@ abstract class CustomBoostFactorScorer extends Scorer {
@Override
public int nextDoc() throws IOException {
return scorer.nextDoc();
return iterator.nextDoc();
}
@Override
@ -136,7 +146,7 @@ abstract class CustomBoostFactorScorer extends Scorer {
@Override
public int advance(int target) throws IOException {
return scorer.advance(target);
return iterator.advance(target);
}
}
}

View File

@ -231,7 +231,7 @@ public class FiltersFunctionScoreQuery extends Query {
}
FiltersFunctionFactorScorer scorer = (FiltersFunctionFactorScorer)scorer(context);
int actualDoc = scorer.advance(doc);
int actualDoc = scorer.iterator.advance(doc);
assert (actualDoc == doc);
double score = scorer.computeScore(doc, subQueryExpl.getValue());
Explanation factorExplanation = Explanation.match(

View File

@ -20,6 +20,7 @@
package org.elasticsearch.common.lucene.search.function;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.Scorer;
import org.elasticsearch.script.ExplainableSearchScript;
@ -57,19 +58,9 @@ public class ScriptScoreFunction extends ScoreFunction {
}
@Override
public int nextDoc() throws IOException {
public DocIdSetIterator iterator() {
throw new UnsupportedOperationException();
}
@Override
public int advance(int target) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public long cost() {
return 1;
}
}
private final Script sScript;

View File

@ -26,6 +26,7 @@ import org.apache.lucene.index.ReaderUtil;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.search.join.BitSetProducer;
import org.apache.lucene.util.Accountable;
@ -127,12 +128,12 @@ public final class BitsetFilterCache extends AbstractIndexComponent implements L
final IndexSearcher searcher = new IndexSearcher(topLevelContext);
searcher.setQueryCache(null);
final Weight weight = searcher.createNormalizedWeight(query, false);
final DocIdSetIterator it = weight.scorer(context);
Scorer s = weight.scorer(context);
final BitSet bitSet;
if (it == null) {
if (s == null) {
bitSet = null;
} else {
bitSet = BitSet.of(it, context.reader().maxDoc());
bitSet = BitSet.of(s.iterator(), context.reader().maxDoc());
}
Value value = new Value(bitSet, shardId);

View File

@ -24,6 +24,7 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.FieldComparatorSource;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.Weight;
import org.apache.lucene.search.join.BitSetProducer;
@ -139,7 +140,8 @@ public interface IndexFieldData<FD extends AtomicFieldData> extends IndexCompone
* Get a {@link DocIdSet} that matches the inner documents.
*/
public DocIdSetIterator innerDocs(LeafReaderContext ctx) throws IOException {
return innerFilter.scorer(ctx);
Scorer s = innerFilter.scorer(ctx);
return s == null ? null : s.iterator();
}
}

View File

@ -20,8 +20,8 @@
package org.elasticsearch.index.mapper;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.elasticsearch.ElasticsearchGenerationException;
import org.elasticsearch.Version;
@ -297,12 +297,12 @@ public class DocumentMapper implements ToXContent {
// We can pass down 'null' as acceptedDocs, because nestedDocId is a doc to be fetched and
// therefor is guaranteed to be a live doc.
final Weight nestedWeight = filter.createWeight(sc.searcher(), false);
DocIdSetIterator iterator = nestedWeight.scorer(context);
if (iterator == null) {
Scorer scorer = nestedWeight.scorer(context);
if (scorer == null) {
continue;
}
if (iterator.advance(nestedDocId) == nestedDocId) {
if (scorer.iterator().advance(nestedDocId) == nestedDocId) {
if (nestedObjectMapper == null) {
nestedObjectMapper = objectMapper;
} else {

View File

@ -144,14 +144,15 @@ public class GeoDistanceRangeQuery extends Query {
public Scorer scorer(LeafReaderContext context) throws IOException {
final DocIdSetIterator approximation;
if (boundingBoxWeight != null) {
approximation = boundingBoxWeight.scorer(context);
Scorer s = boundingBoxWeight.scorer(context);
if (s == null) {
// if the approximation does not match anything, we're done
return null;
}
approximation = s.iterator();
} else {
approximation = DocIdSetIterator.all(context.reader().maxDoc());
}
if (approximation == null) {
// if the approximation does not match anything, we're done
return null;
}
final MultiGeoPointValues values = indexFieldData.load(context).getGeoPointValues();
final TwoPhaseIterator twoPhaseIterator = new TwoPhaseIterator(approximation) {
@Override

View File

@ -20,6 +20,7 @@
package org.elasticsearch.search.aggregations.bucket;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.LeafCollector;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Scorer;
@ -277,17 +278,7 @@ public class BestDocsDeferringCollector extends DeferringBucketCollector impleme
}
@Override
public int nextDoc() throws IOException {
throw new ElasticsearchException("This caching scorer implementation only implements score() and docID()");
}
@Override
public int advance(int target) throws IOException {
throw new ElasticsearchException("This caching scorer implementation only implements score() and docID()");
}
@Override
public long cost() {
public DocIdSetIterator iterator() {
throw new ElasticsearchException("This caching scorer implementation only implements score() and docID()");
}

View File

@ -134,10 +134,11 @@ public class ParentToChildrenAggregator extends SingleBucketAggregator {
protected void doPostCollection() throws IOException {
IndexReader indexReader = context().searchContext().searcher().getIndexReader();
for (LeafReaderContext ctx : indexReader.leaves()) {
DocIdSetIterator childDocsIter = childFilter.scorer(ctx);
if (childDocsIter == null) {
Scorer childDocsScorer = childFilter.scorer(ctx);
if (childDocsScorer == null) {
continue;
}
DocIdSetIterator childDocsIter = childDocsScorer.iterator();
final LeafBucketCollector sub = collectableSubAggregators.getLeafCollector(ctx);
final SortedDocValues globalOrdinals = valuesSource.globalOrdinalsValues(parentType, ctx);

View File

@ -24,6 +24,7 @@ import org.apache.lucene.index.ReaderUtil;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.search.join.BitSetProducer;
import org.apache.lucene.util.BitSet;
@ -69,7 +70,12 @@ public class NestedAggregator extends SingleBucketAggregator {
final IndexSearcher searcher = new IndexSearcher(topLevelContext);
searcher.setQueryCache(null);
final Weight weight = searcher.createNormalizedWeight(childFilter, false);
childDocs = weight.scorer(ctx);
Scorer childDocsScorer = weight.scorer(ctx);
if (childDocsScorer == null) {
childDocs = null;
} else {
childDocs = childDocsScorer.iterator();
}
return new LeafBucketCollectorBase(sub, null) {
@Override

View File

@ -23,6 +23,7 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.ReaderUtil;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.util.BitSet;
import org.elasticsearch.ExceptionsHelper;
@ -314,11 +315,12 @@ public class FetchPhase implements SearchPhase {
continue;
}
final Weight childWeight = context.searcher().createNormalizedWeight(childFilter, false);
DocIdSetIterator childIter = childWeight.scorer(subReaderContext);
if (childIter == null) {
Scorer childScorer = childWeight.scorer(subReaderContext);
if (childScorer == null) {
current = nestedParentObjectMapper;
continue;
}
DocIdSetIterator childIter = childScorer.iterator();
BitSet parentBits = context.bitsetFilterCache().getBitSetProducer(parentFilter).getBitSet(subReaderContext);

View File

@ -221,10 +221,11 @@ public final class InnerHitsContext {
return null;
}
final DocIdSetIterator childrenIterator = childWeight.scorer(context);
if (childrenIterator == null) {
final Scorer childrenScorer = childWeight.scorer(context);
if (childrenScorer == null) {
return null;
}
DocIdSetIterator childrenIterator = childrenScorer.iterator();
final DocIdSetIterator it = new DocIdSetIterator() {
int doc = -1;

View File

@ -90,9 +90,9 @@ public class MatchedQueriesFetchSubPhase implements FetchSubPhase {
if (scorer == null) {
continue;
}
final TwoPhaseIterator twoPhase = scorer.asTwoPhaseIterator();
final TwoPhaseIterator twoPhase = scorer.twoPhaseIterator();
if (twoPhase == null) {
if (scorer.advance(hitContext.docId()) == hitContext.docId()) {
if (scorer.iterator().advance(hitContext.docId()) == hitContext.docId()) {
matchedQueries.add(name);
}
} else {

View File

@ -49,26 +49,6 @@ final class ProfileScorer extends Scorer {
return scorer.docID();
}
@Override
public int advance(int target) throws IOException {
profile.startTime(ProfileBreakdown.TimingType.ADVANCE);
try {
return scorer.advance(target);
} finally {
profile.stopAndRecordTime();
}
}
@Override
public int nextDoc() throws IOException {
profile.startTime(ProfileBreakdown.TimingType.NEXT_DOC);
try {
return scorer.nextDoc();
} finally {
profile.stopAndRecordTime();
}
}
@Override
public float score() throws IOException {
profile.startTime(ProfileBreakdown.TimingType.SCORE);
@ -84,11 +64,6 @@ final class ProfileScorer extends Scorer {
return scorer.freq();
}
@Override
public long cost() {
return scorer.cost();
}
@Override
public Weight getWeight() {
return profileWeight;
@ -100,8 +75,45 @@ final class ProfileScorer extends Scorer {
}
@Override
public TwoPhaseIterator asTwoPhaseIterator() {
final TwoPhaseIterator in = scorer.asTwoPhaseIterator();
public DocIdSetIterator iterator() {
final DocIdSetIterator in = scorer.iterator();
return new DocIdSetIterator() {
@Override
public int advance(int target) throws IOException {
profile.startTime(ProfileBreakdown.TimingType.ADVANCE);
try {
return in.advance(target);
} finally {
profile.stopAndRecordTime();
}
}
@Override
public int nextDoc() throws IOException {
profile.startTime(ProfileBreakdown.TimingType.NEXT_DOC);
try {
return in.nextDoc();
} finally {
profile.stopAndRecordTime();
}
}
@Override
public int docID() {
return in.docID();
}
@Override
public long cost() {
return in.cost();
}
};
}
@Override
public TwoPhaseIterator twoPhaseIterator() {
final TwoPhaseIterator in = scorer.twoPhaseIterator();
if (in == null) {
return null;
}

View File

@ -31,7 +31,7 @@ grant codeBase "${codebase.securesm-1.0.jar}" {
//// Very special jar permissions:
//// These are dangerous permissions that we don't want to grant to everything.
grant codeBase "${codebase.lucene-core-5.5.0-snapshot-1719088.jar}" {
grant codeBase "${codebase.lucene-core-5.5.0-snapshot-1721183.jar}" {
// needed to allow MMapDirectory's "unmap hack"
permission java.lang.RuntimePermission "accessClassInPackage.sun.misc";
permission java.lang.reflect.ReflectPermission "suppressAccessChecks";

View File

@ -31,7 +31,7 @@ grant codeBase "${codebase.securemock-1.2.jar}" {
permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
};
grant codeBase "${codebase.lucene-test-framework-5.5.0-snapshot-1719088.jar}" {
grant codeBase "${codebase.lucene-test-framework-5.5.0-snapshot-1721183.jar}" {
// needed by RamUsageTester
permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
};

View File

@ -81,7 +81,7 @@ public class NestedChildrenFilterTests extends ESTestCase {
int checkedParents = 0;
final Weight parentsWeight = searcher.createNormalizedWeight(new TermQuery(new Term("type", "parent")), false);
for (LeafReaderContext leaf : reader.leaves()) {
DocIdSetIterator parents = parentsWeight.scorer(leaf);
DocIdSetIterator parents = parentsWeight.scorer(leaf).iterator();
for (int parentDoc = parents.nextDoc(); parentDoc != DocIdSetIterator.NO_MORE_DOCS ; parentDoc = parents.nextDoc()) {
int expectedChildDocs = leaf.reader().document(parentDoc).getField("num_child_docs").numericValue().intValue();
hitContext.reset(null, leaf, parentDoc, searcher);

View File

@ -1 +0,0 @@
9f2b9811a4f4a57a1b3a98bdc1e1b63476b9f628

View File

@ -0,0 +1 @@
69e187ef1d2d9c9570363eb4186821e0341df5b8

View File

@ -1 +0,0 @@
038071889a5dbeb279e37fa46225e194139a427c

View File

@ -0,0 +1 @@
0fa00a45ff9bc6a4df44db81f2e4e44ea94bf88e

View File

@ -1 +0,0 @@
b986d0ad8ee4dda8172a5a61875c47631e4b21d4

View File

@ -0,0 +1 @@
f6854c65c7f4c6d9de583f4daa4fd3ae8a3800f1

View File

@ -1 +0,0 @@
f46574fbdfbcc81d936c77e15ba5b3af2c2b7253

View File

@ -0,0 +1 @@
e996e6c723eb415ba2cfa7f5e98bbf194a4918dd

View File

@ -1 +0,0 @@
f620262d667a294d390e8df7575cc2cca2626559

View File

@ -0,0 +1 @@
3b7a5d97b10885f16eb53deb15d64c942b9f9fdb

View File

@ -1 +0,0 @@
4c44b07242fd706f6f7f14c9063a725e0e5b98cd

View File

@ -0,0 +1 @@
e4dda3eeb76e340aa4713a3b20d68c4a1504e505

View File

@ -1 +0,0 @@
1e33e0aa5fc227e90c8314f61b4cba1090035e33

View File

@ -0,0 +1 @@
800442a5d7612ce4c8748831871b4d436a50554e

View File

@ -1 +0,0 @@
e416893f7b781239a15d3e2c7200ff26574d14de

View File

@ -0,0 +1 @@
bdf184de9b5773c7af3ae908af78eeb1e512470c

View File

@ -1 +0,0 @@
b153b63b9333feedb18af2673eb6ccaf95bcc8bf

View File

@ -0,0 +1 @@
fc59de52bd2c7e420edfd235723cb8b0dd44e92d

View File

@ -1 +0,0 @@
0aa2758d70a79f2e0f33a87624fd9d31e155c864

View File

@ -0,0 +1 @@
1d341e6a4f11f3170773ccffdbe6815b45967e3d

View File

@ -1 +0,0 @@
873c716ba629dae389b12ddb1aedf2f5c5f57fea

View File

@ -0,0 +1 @@
a1b02c2b595ac92f45f0d2be03841a3a7fcae1f1

View File

@ -1 +0,0 @@
9d7e47c2fb73c614cc5ca41529b2c273c73b0ce7

View File

@ -0,0 +1 @@
e3ea422b56734329fb6974e9cf9f66478adb5793

View File

@ -1 +0,0 @@
4766305088797a66fe02d5aaa98e086867816e42

View File

@ -0,0 +1 @@
5eadbd4e63120b59ab6445e39489205f98420471

View File

@ -1 +0,0 @@
f0ee6fb780ea8aa9ec6d31e6a9cc7d48700bd2ca

View File

@ -0,0 +1 @@
a336287e65d082535f02a8427666dbe46b1b9b74

View File

@ -1 +0,0 @@
787356d4ae6142bb8ca7e9713d0a281a797b57fb

View File

@ -0,0 +1 @@
60e056d2dd04a81440482b047af0737bc41593d9

View File

@ -1 +0,0 @@
4e56ba76d6b23756b2bd4d9e42b2b00122cd4fa5

View File

@ -0,0 +1 @@
1fce4e9b5c4482bb95e8b275c825d112640d6f1e

View File

@ -1 +0,0 @@
d6ccac802dc1e4c177be043a173377cf5e517cff

View File

@ -0,0 +1 @@
f104f306fef9d3033db026705043e9cbd145aba5

View File

@ -1 +0,0 @@
70ad9f6c3738727229867419d949527cc7789f62

View File

@ -0,0 +1 @@
40b2034a6aed4c3fe0509016fab4f7bbb37a5fc8

View File

@ -1 +0,0 @@
75504fd906929700e7d11f9600e4a79de48e1090

View File

@ -0,0 +1 @@
e117a87f4338be80b0a052d2ce454d5086aa57f1

View File

@ -1 +0,0 @@
9eeeeabeab89ec305e831d80bdcc7e85a1140fbb

View File

@ -0,0 +1 @@
703dd91fccdc1c4662c80e412a449097c0578d83