mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-02-26 14:54:56 +00:00
Add unit tests for FiltersAggregator (#22678)
Adds unit tests for the `filters` aggregation. This change also adds an helper to search and reduce any aggregator in a unit test. This is done by dividing a single searcher in sub-searcher, one for each segment. Relates #22278
This commit is contained in:
parent
3d54258de2
commit
b781a4a176
@ -18,10 +18,15 @@
|
||||
*/
|
||||
package org.elasticsearch.search.aggregations;
|
||||
|
||||
import org.apache.lucene.index.CompositeReaderContext;
|
||||
import org.apache.lucene.index.IndexReaderContext;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.search.Collector;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.QueryCache;
|
||||
import org.apache.lucene.search.QueryCachingPolicy;
|
||||
import org.apache.lucene.search.Weight;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
@ -46,22 +51,23 @@ import org.elasticsearch.search.lookup.SearchLookup;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import static org.mockito.Matchers.any;
|
||||
import static org.mockito.Matchers.anyString;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
/**
|
||||
* Base class for testing {@link Aggregator} implementations.
|
||||
* Provides a helper constructing the {@link Aggregator} implementation based on a provided {@link AggregationBuilder} instance.
|
||||
* Provides helpers for constructing and searching an {@link Aggregator} implementation based on a provided
|
||||
* {@link AggregationBuilder} instance.
|
||||
*/
|
||||
public abstract class AggregatorTestCase extends ESTestCase {
|
||||
|
||||
protected <A extends Aggregator, B extends AggregationBuilder> A createAggregator(B aggregationBuilder,
|
||||
MappedFieldType fieldType,
|
||||
IndexSearcher indexSearcher) throws IOException {
|
||||
IndexSearcher indexSearcher,
|
||||
MappedFieldType... fieldTypes) throws IOException {
|
||||
IndexSettings indexSettings = new IndexSettings(
|
||||
IndexMetaData.builder("_index").settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT))
|
||||
.numberOfShards(1)
|
||||
@ -102,15 +108,92 @@ public abstract class AggregatorTestCase extends ESTestCase {
|
||||
when(searchContext.lookup()).thenReturn(searchLookup);
|
||||
|
||||
QueryShardContext queryShardContext = mock(QueryShardContext.class);
|
||||
IndexFieldData<?> fieldData = fieldType.fielddataBuilder().build(indexSettings, fieldType,
|
||||
new IndexFieldDataCache.None(), circuitBreakerService, mock(MapperService.class));
|
||||
when(queryShardContext.fieldMapper(anyString())).thenReturn(fieldType);
|
||||
when(queryShardContext.getForField(any())).thenReturn(fieldData);
|
||||
when(searchContext.getQueryShardContext()).thenReturn(queryShardContext);
|
||||
for (MappedFieldType fieldType : fieldTypes) {
|
||||
IndexFieldData<?> fieldData = fieldType.fielddataBuilder().build(indexSettings, fieldType,
|
||||
new IndexFieldDataCache.None(), circuitBreakerService, mock(MapperService.class));
|
||||
when(queryShardContext.fieldMapper(fieldType.name())).thenReturn(fieldType);
|
||||
when(queryShardContext.getForField(fieldType)).thenReturn(fieldData);
|
||||
when(searchContext.getQueryShardContext()).thenReturn(queryShardContext);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
A aggregator = (A) aggregationBuilder.build(searchContext, null).create(null, true);
|
||||
return aggregator;
|
||||
}
|
||||
|
||||
protected <A extends InternalAggregation, C extends Aggregator> A search(IndexSearcher searcher,
|
||||
Query query,
|
||||
AggregationBuilder builder,
|
||||
MappedFieldType... fieldTypes) throws IOException {
|
||||
try (C a = createAggregator(builder, searcher, fieldTypes)) {
|
||||
a.preCollection();
|
||||
searcher.search(query, a);
|
||||
a.postCollection();
|
||||
@SuppressWarnings("unchecked")
|
||||
A internalAgg = (A) a.buildAggregation(0L);
|
||||
return internalAgg;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Divides the provided {@link IndexSearcher} in sub-searcher, one for each segment,
|
||||
* builds an aggregator for each sub-searcher filtered by the provided {@link Query} and
|
||||
* returns the reduced {@link InternalAggregation}.
|
||||
*/
|
||||
protected <A extends InternalAggregation, C extends Aggregator> A searchAndReduce(IndexSearcher searcher,
|
||||
Query query,
|
||||
AggregationBuilder builder,
|
||||
MappedFieldType... fieldTypes) throws IOException {
|
||||
final IndexReaderContext ctx = searcher.getTopReaderContext();
|
||||
|
||||
final ShardSearcher[] subSearchers;
|
||||
if (ctx instanceof LeafReaderContext) {
|
||||
subSearchers = new ShardSearcher[1];
|
||||
subSearchers[0] = new ShardSearcher((LeafReaderContext) ctx, ctx);
|
||||
} else {
|
||||
final CompositeReaderContext compCTX = (CompositeReaderContext) ctx;
|
||||
final int size = compCTX.leaves().size();
|
||||
subSearchers = new ShardSearcher[size];
|
||||
for(int searcherIDX=0;searcherIDX<subSearchers.length;searcherIDX++) {
|
||||
final LeafReaderContext leave = compCTX.leaves().get(searcherIDX);
|
||||
subSearchers[searcherIDX] = new ShardSearcher(leave, compCTX);
|
||||
}
|
||||
}
|
||||
|
||||
List<InternalAggregation> aggs = new ArrayList<> ();
|
||||
Query rewritten = searcher.rewrite(query);
|
||||
Weight weight = searcher.createWeight(rewritten, true);
|
||||
try (C root = createAggregator(builder, searcher, fieldTypes)) {
|
||||
for (ShardSearcher subSearcher : subSearchers) {
|
||||
try (C a = createAggregator(builder, subSearcher, fieldTypes)) {
|
||||
a.preCollection();
|
||||
subSearcher.search(weight, a);
|
||||
a.postCollection();
|
||||
aggs.add(a.buildAggregation(0L));
|
||||
}
|
||||
}
|
||||
@SuppressWarnings("unchecked")
|
||||
A internalAgg = (A) aggs.get(0).doReduce(aggs,
|
||||
new InternalAggregation.ReduceContext(root.context().bigArrays(), null));
|
||||
return internalAgg;
|
||||
}
|
||||
}
|
||||
|
||||
private static class ShardSearcher extends IndexSearcher {
|
||||
private final List<LeafReaderContext> ctx;
|
||||
|
||||
public ShardSearcher(LeafReaderContext ctx, IndexReaderContext parent) {
|
||||
super(parent);
|
||||
this.ctx = Collections.singletonList(ctx);
|
||||
}
|
||||
|
||||
public void search(Weight weight, Collector collector) throws IOException {
|
||||
search(ctx, weight, collector);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ShardSearcher(" + ctx.get(0) + ")";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,203 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.search.aggregations.bucket;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.elasticsearch.index.mapper.KeywordFieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.search.aggregations.AggregatorTestCase;
|
||||
import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregator;
|
||||
import org.elasticsearch.search.aggregations.bucket.filters.InternalFilters;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
public class FiltersAggregatorTests extends AggregatorTestCase {
|
||||
private MappedFieldType fieldType;
|
||||
|
||||
@Before
|
||||
public void setUpTest() throws Exception {
|
||||
super.setUp();
|
||||
fieldType = new KeywordFieldMapper.KeywordFieldType();
|
||||
fieldType.setHasDocValues(true);
|
||||
fieldType.setIndexOptions(IndexOptions.DOCS);
|
||||
fieldType.setName("field");
|
||||
}
|
||||
|
||||
public void testEmpty() throws Exception {
|
||||
Directory directory = newDirectory();
|
||||
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory);
|
||||
indexWriter.close();
|
||||
IndexReader indexReader = DirectoryReader.open(directory);
|
||||
IndexSearcher indexSearcher = newSearcher(indexReader, true, true);
|
||||
int numFilters = randomIntBetween(1, 10);
|
||||
QueryBuilder[] filters = new QueryBuilder[numFilters];
|
||||
for (int i = 0; i < filters.length; i++) {
|
||||
filters[i] = QueryBuilders.termQuery("field", randomAsciiOfLength(5));
|
||||
}
|
||||
FiltersAggregationBuilder builder = new FiltersAggregationBuilder("test", filters);
|
||||
builder.otherBucketKey("other");
|
||||
InternalFilters response = search(indexSearcher, new MatchAllDocsQuery(), builder, fieldType);
|
||||
assertEquals(response.getBuckets().size(), numFilters);
|
||||
for (InternalFilters.InternalBucket filter : response.getBuckets()) {
|
||||
assertEquals(filter.getDocCount(), 0);
|
||||
}
|
||||
indexReader.close();
|
||||
directory.close();
|
||||
}
|
||||
|
||||
public void testKeyedFilter() throws Exception {
|
||||
Directory directory = newDirectory();
|
||||
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory);
|
||||
Document document = new Document();
|
||||
document.add(new Field("field", "foo", fieldType));
|
||||
indexWriter.addDocument(document);
|
||||
document.clear();
|
||||
document.add(new Field("field", "else", fieldType));
|
||||
indexWriter.addDocument(document);
|
||||
// make sure we have more than one segment to test the merge
|
||||
indexWriter.commit();
|
||||
document.add(new Field("field", "foo", fieldType));
|
||||
indexWriter.addDocument(document);
|
||||
document.clear();
|
||||
document.add(new Field("field", "bar", fieldType));
|
||||
indexWriter.addDocument(document);
|
||||
document.clear();
|
||||
document.add(new Field("field", "foobar", fieldType));
|
||||
indexWriter.addDocument(document);
|
||||
indexWriter.commit();
|
||||
document.clear();
|
||||
document.add(new Field("field", "something", fieldType));
|
||||
indexWriter.addDocument(document);
|
||||
indexWriter.commit();
|
||||
document.clear();
|
||||
document.add(new Field("field", "foobar", fieldType));
|
||||
indexWriter.addDocument(document);
|
||||
indexWriter.close();
|
||||
|
||||
IndexReader indexReader = DirectoryReader.open(directory);
|
||||
IndexSearcher indexSearcher = newSearcher(indexReader, true, true);
|
||||
|
||||
FiltersAggregator.KeyedFilter[] keys = new FiltersAggregator.KeyedFilter[6];
|
||||
keys[0] = new FiltersAggregator.KeyedFilter("foobar", QueryBuilders.termQuery("field", "foobar"));
|
||||
keys[1] = new FiltersAggregator.KeyedFilter("bar", QueryBuilders.termQuery("field", "bar"));
|
||||
keys[2] = new FiltersAggregator.KeyedFilter("foo", QueryBuilders.termQuery("field", "foo"));
|
||||
keys[3] = new FiltersAggregator.KeyedFilter("foo2", QueryBuilders.termQuery("field", "foo"));
|
||||
keys[4] = new FiltersAggregator.KeyedFilter("same", QueryBuilders.termQuery("field", "foo"));
|
||||
// filter name already present so it should be merge with the previous one ?
|
||||
keys[5] = new FiltersAggregator.KeyedFilter("same", QueryBuilders.termQuery("field", "bar"));
|
||||
FiltersAggregationBuilder builder = new FiltersAggregationBuilder("test", keys);
|
||||
builder.otherBucket(true);
|
||||
builder.otherBucketKey("other");
|
||||
for (boolean doReduce : new boolean[] {true, false}) {
|
||||
final InternalFilters filters;
|
||||
if (doReduce) {
|
||||
filters = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), builder, fieldType);
|
||||
} else {
|
||||
filters = search(indexSearcher, new MatchAllDocsQuery(), builder, fieldType);
|
||||
}
|
||||
assertEquals(filters.getBuckets().size(), 7);
|
||||
assertEquals(filters.getBucketByKey("foobar").getDocCount(), 2);
|
||||
assertEquals(filters.getBucketByKey("foo").getDocCount(), 2);
|
||||
assertEquals(filters.getBucketByKey("foo2").getDocCount(), 2);
|
||||
assertEquals(filters.getBucketByKey("bar").getDocCount(), 1);
|
||||
assertEquals(filters.getBucketByKey("same").getDocCount(), 1);
|
||||
assertEquals(filters.getBucketByKey("other").getDocCount(), 2);
|
||||
}
|
||||
|
||||
indexReader.close();
|
||||
directory.close();
|
||||
}
|
||||
|
||||
public void testRandom() throws Exception {
|
||||
Directory directory = newDirectory();
|
||||
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory);
|
||||
int numDocs = randomIntBetween(100, 200);
|
||||
int maxTerm = randomIntBetween(10, 50);
|
||||
int[] expectedBucketCount = new int[maxTerm];
|
||||
Document document = new Document();
|
||||
for (int i = 0; i < numDocs; i++) {
|
||||
if (frequently()) {
|
||||
// make sure we have more than one segment to test the merge
|
||||
indexWriter.commit();
|
||||
}
|
||||
int value = randomInt(maxTerm-1);
|
||||
expectedBucketCount[value] += 1;
|
||||
document.add(new Field("field", Integer.toString(value), fieldType));
|
||||
indexWriter.addDocument(document);
|
||||
document.clear();
|
||||
}
|
||||
indexWriter.close();
|
||||
|
||||
IndexReader indexReader = DirectoryReader.open(directory);
|
||||
IndexSearcher indexSearcher = newSearcher(indexReader, true, true);
|
||||
int numFilters = randomIntBetween(1, 10);
|
||||
QueryBuilder[] filters = new QueryBuilder[numFilters];
|
||||
int[] filterTerms = new int[numFilters];
|
||||
int expectedOtherCount = numDocs;
|
||||
Set<Integer> filterSet = new HashSet<>();
|
||||
for (int i = 0; i < filters.length; i++) {
|
||||
int value = randomInt(maxTerm-1);
|
||||
filters[i] = QueryBuilders.termQuery("field", Integer.toString(value));
|
||||
filterTerms[i] = value;
|
||||
if (filterSet.contains(value) == false) {
|
||||
expectedOtherCount -= expectedBucketCount[value];
|
||||
filterSet.add(value);
|
||||
}
|
||||
}
|
||||
FiltersAggregationBuilder builder = new FiltersAggregationBuilder("test", filters);
|
||||
builder.otherBucket(true);
|
||||
builder.otherBucketKey("other");
|
||||
|
||||
for (boolean doReduce : new boolean[] {true, false}) {
|
||||
final InternalFilters response;
|
||||
if (doReduce) {
|
||||
response = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), builder, fieldType);
|
||||
} else {
|
||||
response = search(indexSearcher, new MatchAllDocsQuery(), builder, fieldType);
|
||||
}
|
||||
List<InternalFilters.InternalBucket> buckets = response.getBuckets();
|
||||
assertEquals(buckets.size(), filters.length+1);
|
||||
|
||||
for (InternalFilters.InternalBucket bucket : buckets) {
|
||||
if ("other".equals(bucket.getKey())) {
|
||||
assertEquals(bucket.getDocCount(), expectedOtherCount);
|
||||
} else {
|
||||
int index = Integer.parseInt(bucket.getKey());
|
||||
assertEquals(bucket.getDocCount(), (long) expectedBucketCount[filterTerms[index]]);
|
||||
}
|
||||
}
|
||||
}
|
||||
indexReader.close();
|
||||
directory.close();
|
||||
}
|
||||
}
|
@ -74,7 +74,7 @@ public class TermsAggregatorTests extends AggregatorTestCase {
|
||||
MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType();
|
||||
fieldType.setName("string");
|
||||
fieldType.setHasDocValues(true );
|
||||
try (TermsAggregator aggregator = createAggregator(aggregationBuilder, fieldType, indexSearcher)) {
|
||||
try (TermsAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType)) {
|
||||
aggregator.preCollection();
|
||||
indexSearcher.search(new MatchAllDocsQuery(), aggregator);
|
||||
aggregator.postCollection();
|
||||
@ -94,7 +94,7 @@ public class TermsAggregatorTests extends AggregatorTestCase {
|
||||
directory.close();
|
||||
}
|
||||
|
||||
public void testMixLongAndDouble() throws IOException {
|
||||
public void testMixLongAndDouble() throws Exception {
|
||||
for (TermsAggregatorFactory.ExecutionMode executionMode : TermsAggregatorFactory.ExecutionMode.values()) {
|
||||
TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name", ValueType.LONG)
|
||||
.executionHint(executionMode.toString())
|
||||
@ -190,7 +190,7 @@ public class TermsAggregatorTests extends AggregatorTestCase {
|
||||
|
||||
private InternalAggregation buildInternalAggregation(TermsAggregationBuilder builder, MappedFieldType fieldType,
|
||||
IndexSearcher searcher) throws IOException {
|
||||
try (TermsAggregator aggregator = createAggregator(builder, fieldType, searcher)) {
|
||||
try (TermsAggregator aggregator = createAggregator(builder, searcher, fieldType)) {
|
||||
aggregator.preCollection();
|
||||
searcher.search(new MatchAllDocsQuery(), aggregator);
|
||||
aggregator.postCollection();
|
||||
|
@ -62,7 +62,7 @@ public class MinAggregatorTests extends AggregatorTestCase {
|
||||
MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("_name").field("number");
|
||||
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG);
|
||||
fieldType.setName("number");
|
||||
try (MinAggregator aggregator = createAggregator(aggregationBuilder, fieldType, indexSearcher)) {
|
||||
try (MinAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType)) {
|
||||
aggregator.preCollection();
|
||||
indexSearcher.search(new MatchAllDocsQuery(), aggregator);
|
||||
aggregator.postCollection();
|
||||
@ -96,7 +96,7 @@ public class MinAggregatorTests extends AggregatorTestCase {
|
||||
MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("_name").field("number");
|
||||
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG);
|
||||
fieldType.setName("number");
|
||||
try (MinAggregator aggregator = createAggregator(aggregationBuilder, fieldType, indexSearcher)) {
|
||||
try (MinAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType)) {
|
||||
aggregator.preCollection();
|
||||
indexSearcher.search(new MatchAllDocsQuery(), aggregator);
|
||||
aggregator.postCollection();
|
||||
@ -127,7 +127,7 @@ public class MinAggregatorTests extends AggregatorTestCase {
|
||||
MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("_name").field("number2");
|
||||
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG);
|
||||
fieldType.setName("number2");
|
||||
try (MinAggregator aggregator = createAggregator(aggregationBuilder, fieldType, indexSearcher)) {
|
||||
try (MinAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType)) {
|
||||
aggregator.preCollection();
|
||||
indexSearcher.search(new MatchAllDocsQuery(), aggregator);
|
||||
aggregator.postCollection();
|
||||
@ -149,7 +149,7 @@ public class MinAggregatorTests extends AggregatorTestCase {
|
||||
MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("_name").field("number");
|
||||
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG);
|
||||
fieldType.setName("number");
|
||||
try (MinAggregator aggregator = createAggregator(aggregationBuilder, fieldType, indexSearcher)) {
|
||||
try (MinAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType)) {
|
||||
aggregator.preCollection();
|
||||
indexSearcher.search(new MatchAllDocsQuery(), aggregator);
|
||||
aggregator.postCollection();
|
||||
|
@ -66,7 +66,7 @@ public class TopHitsAggregatorTests extends AggregatorTestCase {
|
||||
fieldType.setHasDocValues(true );
|
||||
TopHitsAggregationBuilder aggregationBuilder = new TopHitsAggregationBuilder("_name");
|
||||
aggregationBuilder.sort("string", SortOrder.DESC);
|
||||
try (TopHitsAggregator aggregator = createAggregator(aggregationBuilder, fieldType, indexSearcher)){
|
||||
try (TopHitsAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType)){
|
||||
aggregator.preCollection();
|
||||
indexSearcher.search(new MatchAllDocsQuery(), aggregator);
|
||||
aggregator.postCollection();
|
||||
|
Loading…
x
Reference in New Issue
Block a user