mirror of https://github.com/apache/druid.git
Merge squashed version of pull request #1148 from 'himanshug/unit-tests'
Also updated license header as part of the squash.
This commit is contained in:
commit
af807c6215
|
@ -151,6 +151,11 @@
|
|||
<type>test-jar</type>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.hamcrest</groupId>
|
||||
<artifactId>hamcrest-all</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
|
|
|
@ -112,7 +112,7 @@ public class IntList
|
|||
final IntBuffer retVal = IntBuffer.wrap(array);
|
||||
|
||||
if (index + 1 == baseListCount()) {
|
||||
retVal.limit(maxIndex - (index * allocateSize));
|
||||
retVal.limit(maxIndex - (index * allocateSize) + 1);
|
||||
}
|
||||
|
||||
return retVal.asReadOnlyBuffer();
|
||||
|
|
|
@ -58,7 +58,7 @@ public class PropUtils
|
|||
return Integer.parseInt(retVal);
|
||||
}
|
||||
catch (NumberFormatException e) {
|
||||
throw new ISE(e, "Property[%s] is expected to be an int, it is not[%s].", retVal);
|
||||
throw new ISE(e, "Property[%s] is expected to be an int, it is not[%s].",property, retVal);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,54 @@
|
|||
/*
|
||||
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Metamarkets licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package io.druid.collections;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.collect.Ordering;
|
||||
import com.google.common.collect.Iterables;
|
||||
|
||||
import com.google.common.collect.Sets;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
|
||||
import java.util.*;
|
||||
|
||||
public class CombiningIterableTest
|
||||
{
|
||||
@Test
|
||||
public void testCreateSplatted()
|
||||
{
|
||||
List<Integer> firstList = Arrays.asList(1, 2, 5, 7, 9, 10, 20);
|
||||
List<Integer> secondList = Arrays.asList(1, 2, 5, 8, 9);
|
||||
Set<Integer> mergedLists = new HashSet<>();
|
||||
mergedLists.addAll(firstList);
|
||||
mergedLists.addAll(secondList);
|
||||
ArrayList<Iterable<Integer>> iterators = Lists.newArrayList();
|
||||
iterators.add(firstList);
|
||||
iterators.add(secondList);
|
||||
CombiningIterable<Integer> actualIterable = CombiningIterable.createSplatted(
|
||||
iterators,
|
||||
Ordering.<Integer>natural()
|
||||
);
|
||||
Assert.assertEquals(mergedLists.size(),Iterables.size(actualIterable));
|
||||
Set actualHashset = Sets.newHashSet(actualIterable);
|
||||
Assert.assertEquals(actualHashset,mergedLists);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,127 @@
|
|||
/*
|
||||
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Metamarkets licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package io.druid.collections;
|
||||
|
||||
import com.google.common.collect.PeekingIterator;
|
||||
|
||||
import com.metamx.common.guava.nary.BinaryFn;
|
||||
|
||||
import org.junit.After;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.easymock.EasyMock;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.Comparator;
|
||||
import java.util.NoSuchElementException;
|
||||
|
||||
public class CombiningIteratorTest
|
||||
{
|
||||
private CombiningIterator<String> testingIterator;
|
||||
private Comparator<String> comparator;
|
||||
private BinaryFn binaryFn;
|
||||
private PeekingIterator<String> peekIterator;
|
||||
|
||||
@Before
|
||||
public void setUp()
|
||||
{
|
||||
peekIterator = EasyMock.createMock(PeekingIterator.class);
|
||||
comparator = EasyMock.createMock(Comparator.class);
|
||||
binaryFn = EasyMock.createMock(BinaryFn.class);
|
||||
testingIterator = CombiningIterator.create(peekIterator,comparator,binaryFn);
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown()
|
||||
{
|
||||
testingIterator = null;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testHasNext()
|
||||
{
|
||||
boolean expected = true;
|
||||
EasyMock.expect(peekIterator.hasNext()).andReturn(expected);
|
||||
EasyMock.replay(peekIterator);
|
||||
boolean actual = testingIterator.hasNext();
|
||||
EasyMock.verify(peekIterator);
|
||||
Assert.assertEquals("The hasNext function is broken",expected,actual);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFalseBranchNext()
|
||||
{
|
||||
boolean expected = true;
|
||||
EasyMock.expect(peekIterator.hasNext()).andReturn(expected);
|
||||
expected = false;
|
||||
EasyMock.expect(peekIterator.hasNext()).andReturn(expected);
|
||||
EasyMock.replay(peekIterator);
|
||||
Object res = testingIterator.next();
|
||||
EasyMock.verify(peekIterator);
|
||||
Assert.assertNull("Should be null",res);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNext()
|
||||
{
|
||||
boolean expected = true;
|
||||
EasyMock.expect(peekIterator.hasNext()).andReturn(expected).times(4);
|
||||
String defaultString = "S1";
|
||||
String resString = "S2";
|
||||
EasyMock.expect(peekIterator.next()).andReturn(defaultString);
|
||||
EasyMock.expect(binaryFn.apply(EasyMock.eq(defaultString), EasyMock.isNull()))
|
||||
.andReturn(resString);
|
||||
EasyMock.expect(peekIterator.next()).andReturn(defaultString);
|
||||
EasyMock.expect(comparator.compare(EasyMock.eq(resString), EasyMock.eq(defaultString)))
|
||||
.andReturn(0);
|
||||
EasyMock.expect(peekIterator.next()).andReturn(defaultString);
|
||||
EasyMock.expect(binaryFn.apply(EasyMock.eq(resString), EasyMock.eq(defaultString)))
|
||||
.andReturn(resString);
|
||||
EasyMock.expect(comparator.compare(EasyMock.eq(resString), EasyMock.eq(defaultString)))
|
||||
.andReturn(1);
|
||||
|
||||
EasyMock.replay(peekIterator);
|
||||
EasyMock.replay(binaryFn);
|
||||
EasyMock.replay(comparator);
|
||||
|
||||
String actual = testingIterator.next();
|
||||
Assert.assertEquals(resString,actual);
|
||||
|
||||
EasyMock.verify(peekIterator);
|
||||
EasyMock.verify(comparator);
|
||||
EasyMock.verify(binaryFn);
|
||||
}
|
||||
|
||||
@Test(expected = NoSuchElementException.class)
|
||||
public void testExceptionInNext() throws Exception
|
||||
{
|
||||
boolean expected = false;
|
||||
EasyMock.expect(peekIterator.hasNext()).andReturn(expected);
|
||||
EasyMock.replay(peekIterator);
|
||||
testingIterator.next();
|
||||
EasyMock.verify(peekIterator);
|
||||
}
|
||||
|
||||
@Test(expected = UnsupportedOperationException.class)
|
||||
public void testRemove() throws Exception
|
||||
{
|
||||
testingIterator.remove();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,70 @@
|
|||
/*
|
||||
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Metamarkets licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package io.druid.collections;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
|
||||
import org.junit.After;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
public class CountingMapTest
|
||||
{
|
||||
private CountingMap mapObject = null ;
|
||||
|
||||
@Before
|
||||
public void setUp()
|
||||
{
|
||||
mapObject = new CountingMap();
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown()
|
||||
{
|
||||
mapObject.clear();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAdd()
|
||||
{
|
||||
long defaultValue = 10;
|
||||
String defaultKey = "defaultKey";
|
||||
long actual;
|
||||
actual = mapObject.add(defaultKey,defaultValue);
|
||||
Assert.assertEquals("Values does not match", actual, defaultValue);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSnapshot()
|
||||
{
|
||||
long defaultValue = 10;
|
||||
String defaultKey = "defaultKey";
|
||||
mapObject.add(defaultKey, defaultValue);
|
||||
ImmutableMap snapShotMap = (ImmutableMap) mapObject.snapshot();
|
||||
Assert.assertEquals("Maps size does not match",mapObject.size(),snapShotMap.size());
|
||||
long expected = (long) snapShotMap.get(defaultKey);
|
||||
AtomicLong actual = (AtomicLong) mapObject.get(defaultKey);
|
||||
Assert.assertEquals("Values for key = " + defaultKey + " does not match",
|
||||
actual.longValue(),expected);
|
||||
}
|
||||
}
|
|
@ -17,6 +17,8 @@
|
|||
|
||||
package io.druid.collections;
|
||||
|
||||
import java.nio.IntBuffer;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
|
@ -68,4 +70,49 @@ public class IntListTest
|
|||
Assert.assertEquals(128, list.length());
|
||||
Assert.assertEquals(23, list.get(7));
|
||||
}
|
||||
|
||||
@Test(expected = ArrayIndexOutOfBoundsException.class)
|
||||
public void testExceptionInGet()
|
||||
{
|
||||
IntList list = new IntList();
|
||||
list.get(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testToArray()
|
||||
{
|
||||
int[] inputArray = {1,3,4};
|
||||
IntList list = new IntList();
|
||||
for (int i = 0; i < inputArray.length; i++) {
|
||||
list.add(inputArray[i]);
|
||||
}
|
||||
int[] outputArray = list.toArray();
|
||||
Assert.assertArrayEquals(inputArray, outputArray);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNullCaseGetBaseList()
|
||||
{
|
||||
final int intSize = 2;
|
||||
IntList list = new IntList(intSize);
|
||||
list.set(2 * intSize,100);
|
||||
IntBuffer outBuffer;
|
||||
outBuffer = list.getBaseList(0);
|
||||
Assert.assertNull("Should be Null",outBuffer);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetBaseList()
|
||||
{
|
||||
int listSize = 2;
|
||||
IntList list = new IntList(listSize);
|
||||
int[] expectedArray = {1,2};
|
||||
list.add(expectedArray[0]);
|
||||
list.add(expectedArray[1]);
|
||||
IntBuffer outputBuffer = list.getBaseList(0);
|
||||
Assert.assertEquals("Buffer size does not match",2, outputBuffer.limit());
|
||||
int[] actualArray = new int[outputBuffer.capacity()];
|
||||
outputBuffer.get(actualArray);
|
||||
Assert.assertArrayEquals("Arrays are not matching",expectedArray, actualArray);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.junit.Test;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Iterator;
|
||||
import java.util.NoSuchElementException;
|
||||
|
||||
/**
|
||||
*/
|
||||
|
@ -163,4 +164,26 @@ public class OrderedMergeIteratorTest
|
|||
|
||||
Assert.assertEquals(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9), Lists.newArrayList(iter));
|
||||
}
|
||||
|
||||
@Test(expected = NoSuchElementException.class)
|
||||
public void testNoElementInNext()
|
||||
{
|
||||
final ArrayList<Iterator<Integer>> iterators = Lists.newArrayList();
|
||||
OrderedMergeIterator<Integer> iter = new OrderedMergeIterator<Integer>(
|
||||
Ordering.<Integer>natural(),
|
||||
iterators.iterator()
|
||||
);
|
||||
iter.next();
|
||||
}
|
||||
|
||||
@Test(expected = UnsupportedOperationException.class)
|
||||
public void testRemove()
|
||||
{
|
||||
final ArrayList<Iterator<Integer>> iterators = Lists.newArrayList();
|
||||
OrderedMergeIterator<Integer> iter = new OrderedMergeIterator<Integer>(
|
||||
Ordering.<Integer>natural(),
|
||||
iterators.iterator()
|
||||
);
|
||||
iter.remove();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,81 @@
|
|||
/*
|
||||
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Metamarkets licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package io.druid.collections;
|
||||
|
||||
import com.google.common.base.Supplier;
|
||||
import com.metamx.common.ISE;
|
||||
|
||||
import org.easymock.EasyMock;
|
||||
import org.hamcrest.core.IsInstanceOf;
|
||||
import org.hamcrest.core.IsNull;
|
||||
import org.junit.After;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class StupidPoolTest
|
||||
{
|
||||
private Supplier<String> generator;
|
||||
private StupidPool<String> poolOfString;
|
||||
private ResourceHolder<String> resourceHolderObj;
|
||||
private String defaultString = new String("test");
|
||||
|
||||
@Before
|
||||
public void setUp()
|
||||
{
|
||||
generator = EasyMock.createMock(Supplier.class);
|
||||
EasyMock.expect(generator.get()).andReturn(defaultString).anyTimes();
|
||||
EasyMock.replay(generator);
|
||||
poolOfString = new StupidPool<>(generator);
|
||||
resourceHolderObj = poolOfString.take();
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() throws IOException
|
||||
{
|
||||
if (resourceHolderObj != null) {
|
||||
resourceHolderObj.close();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTake()
|
||||
{
|
||||
Assert.assertThat(resourceHolderObj, new IsInstanceOf(ResourceHolder.class));
|
||||
Object expectedObject = resourceHolderObj.get();
|
||||
Assert.assertEquals(expectedObject, defaultString);
|
||||
}
|
||||
|
||||
@Test(expected = ISE.class)
|
||||
public void testExceptionInResourceHolderGet() throws IOException
|
||||
{
|
||||
resourceHolderObj.close();
|
||||
resourceHolderObj.get();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFinalizeInResourceHolder()
|
||||
{
|
||||
resourceHolderObj = null;
|
||||
System.runFinalization();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,42 @@
|
|||
/*
|
||||
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Metamarkets licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package io.druid.collections;
|
||||
|
||||
import org.junit.After;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class StupidResourceHolderTest
|
||||
{
|
||||
private StupidResourceHolder<String> resourceHolder;
|
||||
|
||||
@Test
|
||||
public void testCreateAndGet() throws IOException
|
||||
{
|
||||
String expected = "String";
|
||||
resourceHolder = StupidResourceHolder.create(expected);
|
||||
String actual = resourceHolder.get();
|
||||
Assert.assertEquals(expected,actual);
|
||||
resourceHolder.close();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,82 @@
|
|||
/*
|
||||
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Metamarkets licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package io.druid.common.utils;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
import com.metamx.common.ISE;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
|
||||
public class PropUtilsTest
|
||||
{
|
||||
@Test(expected = ISE.class)
|
||||
public void testNotSpecifiedGetProperty()
|
||||
{
|
||||
Properties prop = new Properties();
|
||||
PropUtils.getProperty(prop,"");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetProperty()
|
||||
{
|
||||
Properties prop = new Properties();
|
||||
prop.setProperty("key","value");
|
||||
Assert.assertEquals("value", PropUtils.getProperty(prop,"key"));
|
||||
}
|
||||
|
||||
@Test(expected = ISE.class)
|
||||
public void testNotSpecifiedGetPropertyAsInt()
|
||||
{
|
||||
Properties prop = new Properties();
|
||||
PropUtils.getPropertyAsInt(prop,"",null);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDefaultValueGetPropertyAsInt()
|
||||
{
|
||||
Properties prop = new Properties();
|
||||
int defaultValue = 1;
|
||||
int result = PropUtils.getPropertyAsInt(prop,"",defaultValue);
|
||||
Assert.assertEquals(defaultValue, result);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testParseGetPropertyAsInt()
|
||||
{
|
||||
Properties prop = new Properties();
|
||||
int expectedValue = 1;
|
||||
prop.setProperty("key", Integer.toString(expectedValue));
|
||||
int result = PropUtils.getPropertyAsInt(prop,"key");
|
||||
Assert.assertEquals(expectedValue, result);
|
||||
}
|
||||
|
||||
@Test(expected = ISE.class)
|
||||
public void testFormatExceptionGetPropertyAsInt()
|
||||
{
|
||||
Properties prop = new Properties();
|
||||
prop.setProperty("key","1-value");
|
||||
PropUtils.getPropertyAsInt(prop,"key",null);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,237 @@
|
|||
/*
|
||||
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Metamarkets licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package io.druid.common.utils;
|
||||
|
||||
import org.junit.After;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.DataOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.channels.Channels;
|
||||
import java.nio.channels.WritableByteChannel;
|
||||
import java.nio.charset.Charset;
|
||||
|
||||
import io.druid.collections.IntList;
|
||||
|
||||
public class SerializerUtilsTest
|
||||
{
|
||||
private SerializerUtils serializerUtils;
|
||||
private final float delta = 0;
|
||||
private final String [] strings = {"1#","2","3"};
|
||||
private final int [] ints = {1,2,3};
|
||||
private final float [] floats = {1.1f,2,3};
|
||||
private final long [] longs = {3,2,1};
|
||||
private final Charset UTF8 = Charset.forName("UTF-8");
|
||||
private byte [] stringsByte;
|
||||
private byte [] intsByte;
|
||||
private byte [] floatsByte;
|
||||
private byte [] longsByte;
|
||||
private ByteArrayOutputStream outStream;
|
||||
|
||||
@Before
|
||||
public void setUpByteArrays() throws IOException
|
||||
{
|
||||
ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
||||
DataOutputStream out = new DataOutputStream(bos);
|
||||
out.writeInt(strings.length);
|
||||
for (int i = 0;i < strings.length;i++) {
|
||||
byte [] stringBytes = strings[i].getBytes(UTF8);
|
||||
out.writeInt(stringBytes.length);
|
||||
out.write(strings[i].getBytes());
|
||||
}
|
||||
out.close();
|
||||
stringsByte = bos.toByteArray();
|
||||
bos.close();
|
||||
bos = new ByteArrayOutputStream();
|
||||
out = new DataOutputStream(bos);
|
||||
out.writeInt(ints.length);
|
||||
for (int i = 0;i < ints.length;i++) {
|
||||
out.writeInt(ints[i]);
|
||||
}
|
||||
out.close();
|
||||
intsByte = bos.toByteArray();
|
||||
bos.close();
|
||||
bos = new ByteArrayOutputStream();
|
||||
out = new DataOutputStream(bos);
|
||||
out.writeInt(floats.length);
|
||||
for (int i = 0;i < ints.length;i++) {
|
||||
out.writeFloat(floats[i]);
|
||||
}
|
||||
out.close();
|
||||
floatsByte = bos.toByteArray();
|
||||
bos.close();
|
||||
bos = new ByteArrayOutputStream();
|
||||
out = new DataOutputStream(bos);
|
||||
out.writeInt(longs.length);
|
||||
for (int i = 0;i < longs.length;i++) {
|
||||
out.writeLong(longs[i]);
|
||||
}
|
||||
out.close();
|
||||
longsByte = bos.toByteArray();
|
||||
bos.close();
|
||||
serializerUtils = new SerializerUtils();
|
||||
outStream = new ByteArrayOutputStream();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWriteInts() throws IOException
|
||||
{
|
||||
serializerUtils.writeInts(outStream, ints);
|
||||
byte [] actuals = outStream.toByteArray();
|
||||
Assert.assertArrayEquals(intsByte, actuals);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWriteIntList() throws IOException
|
||||
{
|
||||
IntList list = new IntList();
|
||||
for (int i = 0;i < ints.length;i++) {
|
||||
list.add(ints[i]);
|
||||
}
|
||||
serializerUtils.writeInts(outStream, list);
|
||||
byte [] actuals = outStream.toByteArray();
|
||||
Assert.assertArrayEquals(intsByte, actuals);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWriteFloats() throws IOException
|
||||
{
|
||||
serializerUtils.writeFloats(outStream, floats);
|
||||
byte [] actuals = outStream.toByteArray();
|
||||
Assert.assertArrayEquals(floatsByte, actuals);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testChannelWritefloat() throws IOException
|
||||
{
|
||||
final int index = 0;
|
||||
WritableByteChannel channelOutput = Channels.newChannel(outStream);
|
||||
serializerUtils.writeFloat(channelOutput, floats[index]);
|
||||
ByteArrayInputStream inputstream = new ByteArrayInputStream(outStream.toByteArray());
|
||||
if (channelOutput != null) {
|
||||
channelOutput.close();
|
||||
}
|
||||
float expected = serializerUtils.readFloat(inputstream);
|
||||
float actuals = floats[index];
|
||||
Assert.assertEquals(expected, actuals,delta);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWriteLongs() throws IOException
|
||||
{
|
||||
serializerUtils.writeLongs(outStream, longs);
|
||||
byte [] actuals = outStream.toByteArray();
|
||||
Assert.assertArrayEquals(longsByte,actuals);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWriteStrings() throws IOException
|
||||
{
|
||||
serializerUtils.writeStrings(outStream, strings);
|
||||
byte [] actuals = outStream.toByteArray();
|
||||
Assert.assertArrayEquals(stringsByte,actuals);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testChannelWritelong() throws IOException
|
||||
{
|
||||
final int index = 0;
|
||||
WritableByteChannel channelOutput = Channels.newChannel(outStream);
|
||||
serializerUtils.writeLong(channelOutput, longs[index]);
|
||||
ByteArrayInputStream inputstream = new ByteArrayInputStream(outStream.toByteArray());
|
||||
channelOutput.close();
|
||||
inputstream.close();
|
||||
long expected = serializerUtils.readLong(inputstream);
|
||||
long actuals = longs[index];
|
||||
Assert.assertEquals(expected, actuals);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReadInts() throws IOException
|
||||
{
|
||||
ByteArrayInputStream inputstream = new ByteArrayInputStream(intsByte);
|
||||
int [] actuals = serializerUtils.readInts(inputstream);
|
||||
inputstream.close();
|
||||
Assert.assertArrayEquals(ints, actuals);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReadFloats() throws IOException
|
||||
{
|
||||
ByteArrayInputStream inputstream = new ByteArrayInputStream(floatsByte);
|
||||
float [] actuals = serializerUtils.readFloats(inputstream);
|
||||
inputstream.close();
|
||||
Assert.assertArrayEquals(floats, actuals, delta);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReadLongs() throws IOException
|
||||
{
|
||||
ByteArrayInputStream inputstream = new ByteArrayInputStream(longsByte);
|
||||
long [] actuals = serializerUtils.readLongs(inputstream);
|
||||
inputstream.close();
|
||||
Assert.assertArrayEquals(longs, actuals);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReadStrings()throws IOException
|
||||
{
|
||||
ByteArrayInputStream inputstream = new ByteArrayInputStream(stringsByte);
|
||||
String [] actuals = serializerUtils.readStrings(inputstream);
|
||||
inputstream.close();
|
||||
Assert.assertArrayEquals(strings, actuals);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testChannelWriteString() throws IOException
|
||||
{
|
||||
final int index = 0;
|
||||
WritableByteChannel channelOutput = Channels.newChannel(outStream);
|
||||
serializerUtils.writeString(channelOutput, strings[index]);
|
||||
ByteArrayInputStream inputstream = new ByteArrayInputStream(outStream.toByteArray());
|
||||
channelOutput.close();
|
||||
inputstream.close();
|
||||
String expected = serializerUtils.readString(inputstream);
|
||||
String actuals = strings[index];
|
||||
Assert.assertEquals(expected, actuals);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testByteBufferReadStrings() throws IOException
|
||||
{
|
||||
ByteBuffer buffer = ByteBuffer.allocate(stringsByte.length);
|
||||
buffer.put(stringsByte);
|
||||
buffer.flip();
|
||||
String [] actuals = serializerUtils.readStrings(buffer);
|
||||
Assert.assertArrayEquals(strings, actuals);
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() throws IOException
|
||||
{
|
||||
serializerUtils = null;
|
||||
outStream.close();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,50 @@
|
|||
/*
|
||||
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Metamarkets licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package io.druid.common.utils;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.hamcrest.number.OrderingComparison;
|
||||
import com.metamx.common.ISE;
|
||||
|
||||
public class SocketUtilTest
|
||||
{
|
||||
private final int MAX_PORT = 0xffff;
|
||||
@Test
|
||||
public void testSocketUtil()
|
||||
{
|
||||
int port = SocketUtil.findOpenPort(0);
|
||||
Assert.assertThat("Port is greater than the maximum port 0xffff",port, OrderingComparison.lessThanOrEqualTo(MAX_PORT));
|
||||
Assert.assertThat("Port is less than minimum port 0",port, OrderingComparison.greaterThanOrEqualTo(0));
|
||||
}
|
||||
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void testIllegalArgument()
|
||||
{
|
||||
SocketUtil.findOpenPort(-1);
|
||||
}
|
||||
|
||||
@Test(expected = ISE.class)
|
||||
public void testISEexception()
|
||||
{
|
||||
SocketUtil.findOpenPort(0xffff);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,39 @@
|
|||
/*
|
||||
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Metamarkets licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package io.druid.common.utils;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
public class VMUtilsTest
|
||||
{
|
||||
@Test
|
||||
public void testgetMaxDirectMemory()
|
||||
{
|
||||
try {
|
||||
long maxMemory = VMUtils.getMaxDirectMemory();
|
||||
Assert.assertTrue((maxMemory > 0));
|
||||
} catch (UnsupportedOperationException expected) {
|
||||
Assert.assertTrue(true);
|
||||
} catch (RuntimeException expected) {
|
||||
Assert.assertTrue(true);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -88,13 +88,22 @@
|
|||
<artifactId>jsr305</artifactId>
|
||||
</dependency>
|
||||
|
||||
|
||||
<!-- Tests -->
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.easymock</groupId>
|
||||
<artifactId>easymock</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.hamcrest</groupId>
|
||||
<artifactId>hamcrest-all</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
|
|
|
@ -40,10 +40,6 @@ public class SortableBytes
|
|||
{
|
||||
this.groupKey = groupKey;
|
||||
this.sortKey = sortKey;
|
||||
|
||||
if ("".equals(sortKey)) {
|
||||
throw new IllegalArgumentException();
|
||||
}
|
||||
}
|
||||
|
||||
public byte[] getGroupKey()
|
||||
|
|
|
@ -19,8 +19,6 @@ package io.druid.indexer;
|
|||
|
||||
import com.fasterxml.jackson.core.type.TypeReference;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.google.common.base.Preconditions;
|
||||
import com.google.common.collect.Iterators;
|
||||
import com.metamx.common.ISE;
|
||||
import io.druid.jackson.DefaultObjectMapper;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
|
@ -34,8 +32,6 @@ import org.apache.hadoop.util.ReflectionUtils;
|
|||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
|
@ -44,30 +40,19 @@ public class Utils
|
|||
{
|
||||
private static final ObjectMapper jsonMapper = new DefaultObjectMapper();
|
||||
|
||||
public static <K, V> Map<K, V> zipMap(Iterable<K> keys, Iterable<V> values)
|
||||
{
|
||||
Map<K, V> retVal = new HashMap<K, V>();
|
||||
|
||||
Iterator<K> keyIter = keys.iterator();
|
||||
Iterator<V> valsIter = values.iterator();
|
||||
while (keyIter.hasNext()) {
|
||||
final K key = keyIter.next();
|
||||
|
||||
Preconditions.checkArgument(valsIter.hasNext(), "keys longer than vals, bad, bad vals. Broke on key[%s]", key);
|
||||
retVal.put(key, valsIter.next());
|
||||
}
|
||||
if (valsIter.hasNext()) {
|
||||
throw new ISE("More values[%d] than keys[%d]", retVal.size() + Iterators.size(valsIter), retVal.size());
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
public static OutputStream makePathAndOutputStream(JobContext job, Path outputPath, boolean deleteExisting)
|
||||
throws IOException
|
||||
{
|
||||
OutputStream retVal;
|
||||
FileSystem fs = outputPath.getFileSystem(job.getConfiguration());
|
||||
Class<? extends CompressionCodec> codecClass;
|
||||
CompressionCodec codec = null;
|
||||
|
||||
if (FileOutputFormat.getCompressOutput(job)) {
|
||||
codecClass = FileOutputFormat.getOutputCompressorClass(job, GzipCodec.class);
|
||||
codec = ReflectionUtils.newInstance(codecClass, job.getConfiguration());
|
||||
outputPath = new Path(outputPath.toString() + codec.getDefaultExtension());
|
||||
}
|
||||
|
||||
if (fs.exists(outputPath)) {
|
||||
if (deleteExisting) {
|
||||
|
@ -77,16 +62,11 @@ public class Utils
|
|||
}
|
||||
}
|
||||
|
||||
if (!FileOutputFormat.getCompressOutput(job)) {
|
||||
retVal = fs.create(outputPath, false);
|
||||
} else {
|
||||
Class<? extends CompressionCodec> codecClass = FileOutputFormat.getOutputCompressorClass(job, GzipCodec.class);
|
||||
CompressionCodec codec = ReflectionUtils.newInstance(codecClass, job.getConfiguration());
|
||||
outputPath = new Path(outputPath.toString() + codec.getDefaultExtension());
|
||||
|
||||
if (FileOutputFormat.getCompressOutput(job)) {
|
||||
retVal = codec.createOutputStream(fs.create(outputPath, false));
|
||||
} else {
|
||||
retVal = fs.create(outputPath, false);
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,97 @@
|
|||
/*
|
||||
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Metamarkets licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package io.druid.indexer;
|
||||
|
||||
import com.google.common.primitives.Bytes;
|
||||
|
||||
import org.hamcrest.number.OrderingComparison;
|
||||
import org.joda.time.DateTime;
|
||||
import org.junit.After;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import com.metamx.common.Pair;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
public class BucketTest
|
||||
{
|
||||
Bucket bucket;
|
||||
int shardNum;
|
||||
int partitionNum;
|
||||
DateTime time;
|
||||
|
||||
@Before public void setUp()
|
||||
{
|
||||
time = new DateTime(2014, 11, 24, 10, 30);
|
||||
shardNum = 1;
|
||||
partitionNum = 1;
|
||||
bucket = new Bucket(shardNum, time, partitionNum);
|
||||
}
|
||||
|
||||
@After public void tearDown()
|
||||
{
|
||||
bucket = null;
|
||||
}
|
||||
|
||||
@Test public void testToGroupKey()
|
||||
{
|
||||
byte[] firstPart = {1, 1, 0, 10};
|
||||
byte[] secondPart = {2, 4, 0, 5};
|
||||
byte[] expectedGroupParts = bucket.toGroupKey(firstPart,secondPart);
|
||||
Pair<Bucket, byte[]> actualPair = Bucket.fromGroupKey(expectedGroupParts);
|
||||
Assert.assertEquals("Bucket is not matching", bucket, actualPair.lhs);
|
||||
Assert.assertArrayEquals("Parts not matching", Bytes.concat(firstPart,secondPart), actualPair.rhs);
|
||||
}
|
||||
|
||||
@Test public void testToString()
|
||||
{
|
||||
String expectedString = "Bucket{" +
|
||||
"time=" + time +
|
||||
", partitionNum=" + partitionNum +
|
||||
", shardNum=" + shardNum +
|
||||
'}';
|
||||
Assert.assertEquals(bucket.toString(),expectedString);
|
||||
}
|
||||
|
||||
@Test public void testEquals()
|
||||
{
|
||||
Assert.assertFalse("Object should not be equals to NULL", bucket.equals(null));
|
||||
Assert.assertFalse("Objects do not have the same Class",bucket.equals(new Integer(0)));
|
||||
Assert.assertFalse("Objects do not have the same partitionNum",
|
||||
bucket.equals(new Bucket(shardNum, time, partitionNum + 1)));
|
||||
Assert.assertFalse("Objects do not have the same shardNum",
|
||||
bucket.equals(new Bucket(shardNum + 1,time,partitionNum)));
|
||||
Assert.assertFalse("Objects do not have the same time",bucket.equals(new Bucket(shardNum,new DateTime(),partitionNum)));
|
||||
Assert.assertFalse("Object do have NULL time",bucket.equals(new Bucket(shardNum,null,partitionNum)));
|
||||
Assert.assertTrue("Objects must be the same",bucket.equals(new Bucket(shardNum, time, partitionNum)));
|
||||
|
||||
}
|
||||
|
||||
@Test public void testHashCode()
|
||||
{
|
||||
int hashCode = bucket.hashCode();
|
||||
Assert.assertThat(hashCode, OrderingComparison.greaterThanOrEqualTo(31 * partitionNum + shardNum));
|
||||
bucket = new Bucket(shardNum,null,partitionNum);
|
||||
hashCode = bucket.hashCode();
|
||||
Assert.assertEquals(hashCode, (31 * partitionNum + shardNum));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,77 @@
|
|||
/*
|
||||
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Metamarkets licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package io.druid.indexer;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.mapred.JobContext;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
|
||||
import org.junit.After;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.Rule;
|
||||
import org.junit.rules.TemporaryFolder;
|
||||
import org.easymock.EasyMock;
|
||||
|
||||
import java.io.IOException;
|
||||
public class HadoopIOPeonTest
|
||||
{
|
||||
final String TMP_FILE_NAME = "test_file";
|
||||
JobContext mockJobContext;
|
||||
Configuration jobConfig;
|
||||
boolean overwritesFiles = true;
|
||||
HadoopIOPeon ioPeon;
|
||||
|
||||
@Rule
|
||||
public TemporaryFolder tmpFolder = new TemporaryFolder();
|
||||
|
||||
@Before public void setUp() throws IOException
|
||||
{
|
||||
jobConfig = new Configuration();
|
||||
mockJobContext = EasyMock.createMock(JobContext.class);
|
||||
EasyMock.expect(mockJobContext.getConfiguration()).andReturn(jobConfig).anyTimes();
|
||||
EasyMock.replay(mockJobContext);
|
||||
|
||||
ioPeon = new HadoopIOPeon(mockJobContext,new Path(tmpFolder.newFile().getParent()),overwritesFiles);
|
||||
}
|
||||
|
||||
@After public void tearDown()
|
||||
{
|
||||
jobConfig = null;
|
||||
mockJobContext = null;
|
||||
tmpFolder.delete();
|
||||
}
|
||||
|
||||
@Test public void testMakeOutputStream() throws IOException
|
||||
{
|
||||
Assert.assertNotNull(ioPeon.makeOutputStream(TMP_FILE_NAME));
|
||||
}
|
||||
|
||||
@Test public void testMakeInputStream() throws IOException
|
||||
{
|
||||
Assert.assertNotNull(ioPeon.makeInputStream(tmpFolder.newFile(TMP_FILE_NAME).getName()));
|
||||
}
|
||||
|
||||
@Test(expected = UnsupportedOperationException.class) public void testCleanup() throws IOException
|
||||
{
|
||||
ioPeon.cleanup();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,131 @@
|
|||
/*
|
||||
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Metamarkets licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package io.druid.indexer;
|
||||
|
||||
import com.google.common.io.ByteStreams;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.io.compress.CompressionCodec;
|
||||
import org.apache.hadoop.io.compress.GzipCodec;
|
||||
import org.apache.hadoop.mapred.JobContext;
|
||||
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
|
||||
import org.apache.hadoop.util.ReflectionUtils;
|
||||
import org.easymock.EasyMock;
|
||||
|
||||
import org.junit.After;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.TemporaryFolder;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
public class UtilsCompressionTest
|
||||
{
|
||||
|
||||
private static final String DUMMY_STRING = "Very important string";
|
||||
private static final String TMP_FILE_NAME = "test_file";
|
||||
private static final Class<? extends CompressionCodec> DEFAULT_COMPRESSION_CODEC = GzipCodec.class;
|
||||
private static final String CODEC_CLASS = "org.apache.hadoop.io.compress.GzipCodec";
|
||||
private Configuration jobConfig;
|
||||
private JobContext mockJobContext;
|
||||
private FileSystem defaultFileSystem;
|
||||
private CompressionCodec codec;
|
||||
private File tmpFile;
|
||||
private Path tmpPathWithoutExtension;
|
||||
private Path tmpPathWithExtension;
|
||||
|
||||
@Rule
|
||||
public TemporaryFolder tmpFolder = new TemporaryFolder();
|
||||
|
||||
@Before
|
||||
public void setUp() throws IOException
|
||||
{
|
||||
jobConfig = new Configuration();
|
||||
mockJobContext = EasyMock.createMock(JobContext.class);
|
||||
EasyMock.expect(mockJobContext.getConfiguration()).andReturn(jobConfig).anyTimes();
|
||||
EasyMock.replay(mockJobContext);
|
||||
|
||||
jobConfig.setBoolean(FileOutputFormat.COMPRESS, true);
|
||||
jobConfig.set(FileOutputFormat.COMPRESS_CODEC, CODEC_CLASS);
|
||||
Class<? extends CompressionCodec> codecClass = FileOutputFormat
|
||||
.getOutputCompressorClass(mockJobContext, DEFAULT_COMPRESSION_CODEC);
|
||||
codec = ReflectionUtils.newInstance(codecClass, jobConfig);
|
||||
|
||||
tmpFile = tmpFolder.newFile(TMP_FILE_NAME + codec.getDefaultExtension());
|
||||
tmpPathWithExtension = new Path(tmpFile.getAbsolutePath());
|
||||
tmpPathWithoutExtension = new Path(tmpFile.getParent(), TMP_FILE_NAME);
|
||||
defaultFileSystem = tmpPathWithoutExtension.getFileSystem(jobConfig);
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown()
|
||||
{
|
||||
tmpFolder.delete();
|
||||
}
|
||||
|
||||
@Test public void testExistsCompressedFile() throws IOException
|
||||
{
|
||||
boolean expected = Utils.exists(mockJobContext,defaultFileSystem,tmpPathWithoutExtension);
|
||||
Assert.assertTrue("Should be true since file is created", expected);
|
||||
tmpFolder.delete();
|
||||
expected = Utils.exists(mockJobContext,defaultFileSystem,tmpPathWithoutExtension);
|
||||
Assert.assertFalse("Should be false since file is deleted",expected);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCompressedOpenInputStream() throws IOException
|
||||
{
|
||||
boolean overwrite = true;
|
||||
OutputStream outStream = codec.createOutputStream(defaultFileSystem.create(tmpPathWithExtension, overwrite));
|
||||
writeStingToOutputStream(DUMMY_STRING,outStream);
|
||||
InputStream inStream = Utils.openInputStream(mockJobContext, tmpPathWithoutExtension);
|
||||
Assert.assertNotNull("Input stream should not be Null",inStream);
|
||||
String actual = new String(ByteStreams.toByteArray(inStream), StandardCharsets.UTF_8.toString());
|
||||
Assert.assertEquals("Strings not matching", DUMMY_STRING,actual);
|
||||
inStream.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCompressedMakePathAndOutputStream() throws IOException
|
||||
{
|
||||
boolean overwrite = true;
|
||||
OutputStream outStream = Utils.makePathAndOutputStream(mockJobContext,tmpPathWithoutExtension, overwrite);
|
||||
Assert.assertNotNull("Output stream should not be null",outStream);
|
||||
writeStingToOutputStream(DUMMY_STRING,outStream);
|
||||
InputStream inStream = codec.createInputStream(defaultFileSystem.open(tmpPathWithExtension));
|
||||
String actual = new String(ByteStreams.toByteArray(inStream), StandardCharsets.UTF_8.toString());
|
||||
Assert.assertEquals("Strings not matching", DUMMY_STRING,actual);
|
||||
inStream.close();
|
||||
}
|
||||
|
||||
private void writeStingToOutputStream(String string, OutputStream outStream) throws IOException
|
||||
{
|
||||
outStream.write(string.getBytes(StandardCharsets.UTF_8.toString()));
|
||||
outStream.flush();
|
||||
outStream.close();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,137 @@
|
|||
/*
|
||||
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Metamarkets licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package io.druid.indexer;
|
||||
|
||||
import com.google.common.base.Function;
|
||||
import com.google.common.collect.Iterables;
|
||||
import com.google.common.collect.Maps;
|
||||
|
||||
import com.google.common.io.ByteStreams;
|
||||
import com.metamx.common.ISE;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.mapred.JobContext;
|
||||
|
||||
import org.easymock.EasyMock;
|
||||
import org.hamcrest.core.Is;
|
||||
import org.junit.After;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.TemporaryFolder;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.InputStream;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
|
||||
public class UtilsTest
|
||||
{
|
||||
private static final String DUMMY_STRING = "Very important string";
|
||||
private static final String TMP_FILE_NAME = "test_file";
|
||||
private Configuration jobConfig;
|
||||
private JobContext mockJobContext;
|
||||
private Map expectedMap;
|
||||
private File tmpFile;
|
||||
private Path tmpPath;
|
||||
private FileSystem defaultFileSystem;
|
||||
private Set setOfKeys;
|
||||
|
||||
@Rule
|
||||
public TemporaryFolder tmpFolder = new TemporaryFolder();
|
||||
|
||||
private class CreateValueFromKey implements Function
|
||||
{
|
||||
@Override public Object apply(Object input)
|
||||
{
|
||||
return input.toString() + DUMMY_STRING;
|
||||
}
|
||||
}
|
||||
|
||||
@Before
|
||||
public void setUp() throws IOException
|
||||
{
|
||||
jobConfig = new Configuration();
|
||||
mockJobContext = EasyMock.createMock(JobContext.class);
|
||||
EasyMock.expect(mockJobContext.getConfiguration()).andReturn(jobConfig).anyTimes();
|
||||
EasyMock.replay(mockJobContext);
|
||||
|
||||
setOfKeys = new HashSet();
|
||||
setOfKeys.addAll(new ArrayList<>(Arrays.asList("key1","key2","key3")));
|
||||
expectedMap = (Map<String, Object>) Maps.asMap(setOfKeys, new CreateValueFromKey());
|
||||
|
||||
tmpFile = tmpFolder.newFile(TMP_FILE_NAME);
|
||||
tmpPath = new Path(tmpFile.getAbsolutePath());
|
||||
defaultFileSystem = tmpPath.getFileSystem(jobConfig);
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown()
|
||||
{
|
||||
tmpFolder.delete();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExistsPlainFile() throws IOException
|
||||
{
|
||||
boolean expected = Utils.exists(mockJobContext,defaultFileSystem,tmpPath);
|
||||
Assert.assertTrue("Should be true since file is created",expected);
|
||||
tmpFolder.delete();
|
||||
expected = Utils.exists(mockJobContext,defaultFileSystem,tmpPath);
|
||||
Assert.assertFalse("Should be false since file is deleted",expected);
|
||||
EasyMock.verify(mockJobContext);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPlainStoreThenGetStats() throws IOException
|
||||
{
|
||||
Utils.storeStats(mockJobContext, tmpPath,expectedMap);
|
||||
Map actualMap = Utils.getStats(mockJobContext, tmpPath);
|
||||
Assert.assertThat(actualMap,Is.is(actualMap));
|
||||
EasyMock.verify(mockJobContext);
|
||||
}
|
||||
|
||||
@Test(expected = ISE.class)
|
||||
public void testExceptionInMakePathAndOutputStream() throws IOException
|
||||
{
|
||||
boolean overwrite = false;
|
||||
Utils.makePathAndOutputStream(mockJobContext,tmpPath,overwrite);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPlainOpenInputStream() throws IOException
|
||||
{
|
||||
FileUtils.writeStringToFile(tmpFile, DUMMY_STRING);
|
||||
InputStream inStream = Utils.openInputStream(mockJobContext, tmpPath);
|
||||
Assert.assertNotNull(inStream);
|
||||
String expected = new String(ByteStreams.toByteArray(inStream), StandardCharsets.UTF_8.toString());
|
||||
Assert.assertEquals(expected, DUMMY_STRING);
|
||||
}
|
||||
}
|
|
@ -17,8 +17,10 @@
|
|||
|
||||
package io.druid.indexer.partitions;
|
||||
|
||||
import io.druid.jackson.DefaultObjectMapper;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.google.common.base.Throwables;
|
||||
import io.druid.indexer.HadoopDruidIndexerConfigTest;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
|
@ -26,6 +28,8 @@ import org.junit.Test;
|
|||
*/
|
||||
public class HashedPartitionsSpecTest
|
||||
{
|
||||
private static final ObjectMapper jsonMapper = new DefaultObjectMapper();
|
||||
|
||||
@Test
|
||||
public void testHashedPartitionsSpec() throws Exception
|
||||
{
|
||||
|
@ -33,7 +37,7 @@ public class HashedPartitionsSpecTest
|
|||
final PartitionsSpec partitionsSpec;
|
||||
|
||||
try {
|
||||
partitionsSpec = HadoopDruidIndexerConfigTest.jsonReadWriteRead(
|
||||
partitionsSpec = jsonReadWriteRead(
|
||||
"{"
|
||||
+ " \"targetPartitionSize\":100,"
|
||||
+ " \"type\":\"hashed\""
|
||||
|
@ -73,7 +77,7 @@ public class HashedPartitionsSpecTest
|
|||
final PartitionsSpec partitionsSpec;
|
||||
|
||||
try {
|
||||
partitionsSpec = HadoopDruidIndexerConfigTest.jsonReadWriteRead(
|
||||
partitionsSpec = jsonReadWriteRead(
|
||||
"{"
|
||||
+ " \"type\":\"hashed\","
|
||||
+ " \"numShards\":2"
|
||||
|
@ -111,4 +115,14 @@ public class HashedPartitionsSpecTest
|
|||
|
||||
Assert.assertTrue("partitionsSpec", partitionsSpec instanceof HashedPartitionsSpec);
|
||||
}
|
||||
|
||||
private <T> T jsonReadWriteRead(String s, Class<T> klass)
|
||||
{
|
||||
try {
|
||||
return jsonMapper.readValue(jsonMapper.writeValueAsBytes(jsonMapper.readValue(s, klass)), klass);
|
||||
}
|
||||
catch (Exception e) {
|
||||
throw Throwables.propagate(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,35 @@
|
|||
/*
|
||||
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Metamarkets licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package io.druid.indexer.path;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
public class GranularUnprocessedPathSpecTest
|
||||
{
|
||||
@Test
|
||||
public void testSetGetMaxBuckets()
|
||||
{
|
||||
GranularUnprocessedPathSpec granularUnprocessedPathSpec = new GranularUnprocessedPathSpec();
|
||||
int maxBuckets = 5;
|
||||
granularUnprocessedPathSpec.setMaxBuckets(maxBuckets);
|
||||
Assert.assertEquals(maxBuckets,granularUnprocessedPathSpec.getMaxBuckets());
|
||||
}
|
||||
}
|
|
@ -0,0 +1,70 @@
|
|||
/*
|
||||
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Metamarkets licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package io.druid.indexer.path;
|
||||
|
||||
import com.metamx.common.Granularity;
|
||||
import org.junit.After;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
|
||||
public class GranularityPathSpecTest
|
||||
{
|
||||
private GranularityPathSpec granularityPathSpec;
|
||||
private final String TEST_STRING_PATH = "TEST";
|
||||
private final String TEST_STRING_PATTERN = "*.TEST";
|
||||
private final String TEST_STRING_FORMAT = "F_TEST";
|
||||
|
||||
@Before public void setUp()
|
||||
{
|
||||
granularityPathSpec = new GranularityPathSpec();
|
||||
}
|
||||
|
||||
@After public void tearDown()
|
||||
{
|
||||
granularityPathSpec = null;
|
||||
}
|
||||
|
||||
@Test public void testSetInputPath()
|
||||
{
|
||||
granularityPathSpec.setInputPath(TEST_STRING_PATH);
|
||||
Assert.assertEquals(TEST_STRING_PATH,granularityPathSpec.getInputPath());
|
||||
}
|
||||
|
||||
@Test public void testSetFilePattern()
|
||||
{
|
||||
granularityPathSpec.setFilePattern(TEST_STRING_PATTERN);
|
||||
Assert.assertEquals(TEST_STRING_PATTERN,granularityPathSpec.getFilePattern());
|
||||
}
|
||||
|
||||
@Test public void testSetPathFormat()
|
||||
{
|
||||
granularityPathSpec.setPathFormat(TEST_STRING_FORMAT);
|
||||
Assert.assertEquals(TEST_STRING_FORMAT,granularityPathSpec.getPathFormat());
|
||||
}
|
||||
|
||||
@Test public void testSetDataGranularity()
|
||||
{
|
||||
Granularity granularity = Granularity.DAY;
|
||||
granularityPathSpec.setDataGranularity(granularity);
|
||||
Assert.assertEquals(granularity,granularityPathSpec.getDataGranularity());
|
||||
}
|
||||
}
|
|
@ -46,12 +46,13 @@ public class FileTaskLogs implements TaskLogs
|
|||
@Override
|
||||
public void pushTaskLog(final String taskid, File file) throws IOException
|
||||
{
|
||||
if (!config.getDirectory().exists()) {
|
||||
config.getDirectory().mkdir();
|
||||
if (config.getDirectory().exists() || config.getDirectory().mkdirs()) {
|
||||
final File outputFile = fileForTask(taskid);
|
||||
Files.copy(file, outputFile);
|
||||
log.info("Wrote task log to: %s", outputFile);
|
||||
} else {
|
||||
throw new IOException(String.format("Unable to create task log dir[%s]", config.getDirectory()));
|
||||
}
|
||||
final File outputFile = fileForTask(taskid);
|
||||
Files.copy(file, outputFile);
|
||||
log.info("Wrote task log to: %s", outputFile);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -0,0 +1,42 @@
|
|||
/*
|
||||
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Metamarkets licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package io.druid.indexing.common;
|
||||
|
||||
import org.joda.time.Period;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.joda.time.Duration;
|
||||
|
||||
public class RetryPolicyFactoryTest
|
||||
{
|
||||
@Test
|
||||
public void testMakeRetryPolicy()
|
||||
{
|
||||
RetryPolicyConfig config = new RetryPolicyConfig()
|
||||
.setMinWait(new Period("PT1S"))
|
||||
.setMaxWait(new Period("PT10S"))
|
||||
.setMaxRetryCount(1);
|
||||
RetryPolicyFactory retryPolicyFactory = new RetryPolicyFactory(config);
|
||||
RetryPolicy retryPolicy = retryPolicyFactory.makeRetryPolicy();
|
||||
Assert.assertEquals(new Duration("PT1S"),retryPolicy.getAndIncrementRetryDelay());
|
||||
Assert.assertTrue(retryPolicy.hasExceededRetryThreshold());
|
||||
}
|
||||
}
|
|
@ -0,0 +1,177 @@
|
|||
/*
|
||||
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Metamarkets licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package io.druid.indexing.common;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.metamx.emitter.service.ServiceEmitter;
|
||||
import com.metamx.metrics.MonitorScheduler;
|
||||
import io.druid.client.FilteredServerView;
|
||||
import io.druid.indexing.common.actions.TaskActionClientFactory;
|
||||
import io.druid.indexing.common.config.TaskConfig;
|
||||
import io.druid.indexing.common.task.Task;
|
||||
import io.druid.query.QueryRunnerFactoryConglomerate;
|
||||
import io.druid.segment.loading.DataSegmentKiller;
|
||||
import io.druid.segment.loading.DataSegmentMover;
|
||||
import io.druid.segment.loading.DataSegmentPusher;
|
||||
import io.druid.segment.loading.OmniSegmentLoader;
|
||||
import io.druid.segment.loading.DataSegmentArchiver;
|
||||
import io.druid.segment.loading.SegmentLoadingException;
|
||||
import io.druid.segment.loading.SegmentLoaderConfig;
|
||||
import io.druid.server.coordination.DataSegmentAnnouncer;
|
||||
import io.druid.timeline.DataSegment;
|
||||
import org.easymock.EasyMock;
|
||||
import org.joda.time.Interval;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.TemporaryFolder;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
|
||||
public class TaskToolboxTest
|
||||
{
|
||||
|
||||
private TaskToolboxFactory taskToolbox = null;
|
||||
private TaskActionClientFactory mockTaskActionClientFactory = EasyMock.createMock(TaskActionClientFactory.class);
|
||||
private ServiceEmitter mockEmitter = EasyMock.createMock(ServiceEmitter.class);
|
||||
private DataSegmentPusher mockSegmentPusher = EasyMock.createMock(DataSegmentPusher.class);
|
||||
private DataSegmentKiller mockDataSegmentKiller = EasyMock.createMock(DataSegmentKiller.class);
|
||||
private DataSegmentMover mockDataSegmentMover = EasyMock.createMock(DataSegmentMover.class);
|
||||
private DataSegmentArchiver mockDataSegmentArchiver = EasyMock.createMock(DataSegmentArchiver.class);
|
||||
private DataSegmentAnnouncer mockSegmentAnnouncer = EasyMock.createMock(DataSegmentAnnouncer.class);
|
||||
private FilteredServerView mockNewSegmentServerView = EasyMock.createMock(FilteredServerView.class);
|
||||
private QueryRunnerFactoryConglomerate mockQueryRunnerFactoryConglomerate
|
||||
= EasyMock.createMock(QueryRunnerFactoryConglomerate.class);
|
||||
private MonitorScheduler mockMonitorScheduler = EasyMock.createMock(MonitorScheduler.class);
|
||||
private ExecutorService mockQueryExecutorService = EasyMock.createMock(ExecutorService.class);
|
||||
private ObjectMapper ObjectMapper = new ObjectMapper();
|
||||
private OmniSegmentLoader mockOmniSegmentLoader = EasyMock.createMock(OmniSegmentLoader.class);
|
||||
private Task task = EasyMock.createMock(Task.class);
|
||||
|
||||
@Rule
|
||||
public TemporaryFolder temporaryFolder = new TemporaryFolder();
|
||||
|
||||
@Before
|
||||
public void setUp() throws IOException
|
||||
{
|
||||
EasyMock.expect(task.getId()).andReturn("task_id").anyTimes();
|
||||
EasyMock.replay(task);
|
||||
|
||||
taskToolbox = new TaskToolboxFactory(
|
||||
new TaskConfig(temporaryFolder.newFile().toString(), null, null, 50000, null),
|
||||
mockTaskActionClientFactory,
|
||||
mockEmitter,
|
||||
mockSegmentPusher,
|
||||
mockDataSegmentKiller,
|
||||
mockDataSegmentMover,
|
||||
mockDataSegmentArchiver,
|
||||
mockSegmentAnnouncer,
|
||||
mockNewSegmentServerView,
|
||||
mockQueryRunnerFactoryConglomerate,
|
||||
mockQueryExecutorService,
|
||||
mockMonitorScheduler,
|
||||
new SegmentLoaderFactory(mockOmniSegmentLoader),
|
||||
ObjectMapper
|
||||
);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetDataSegmentArchiver()
|
||||
{
|
||||
Assert.assertEquals(mockDataSegmentArchiver,taskToolbox.build(task).getDataSegmentArchiver());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetSegmentAnnouncer()
|
||||
{
|
||||
Assert.assertEquals(mockSegmentAnnouncer,taskToolbox.build(task).getSegmentAnnouncer());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetNewSegmentServerView()
|
||||
{
|
||||
Assert.assertEquals(mockNewSegmentServerView,taskToolbox.build(task).getNewSegmentServerView());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetQueryRunnerFactoryConglomerate()
|
||||
{
|
||||
Assert.assertEquals(mockQueryRunnerFactoryConglomerate,taskToolbox.build(task).getQueryRunnerFactoryConglomerate());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetQueryExecutorService()
|
||||
{
|
||||
Assert.assertEquals(mockQueryExecutorService,taskToolbox.build(task).getQueryExecutorService());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetMonitorScheduler()
|
||||
{
|
||||
Assert.assertEquals(mockMonitorScheduler,taskToolbox.build(task).getMonitorScheduler());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetObjectMapper()
|
||||
{
|
||||
Assert.assertEquals(ObjectMapper,taskToolbox.build(task).getObjectMapper());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFetchSegments() throws SegmentLoadingException, IOException
|
||||
{
|
||||
File expectedFile = temporaryFolder.newFile();
|
||||
EasyMock.expect(mockOmniSegmentLoader.getSegmentFiles((DataSegment)EasyMock.anyObject()))
|
||||
.andReturn(expectedFile).anyTimes();
|
||||
EasyMock.expect(mockOmniSegmentLoader.withConfig((SegmentLoaderConfig)EasyMock.anyObject()))
|
||||
.andReturn(mockOmniSegmentLoader).anyTimes();
|
||||
EasyMock.replay(mockOmniSegmentLoader);
|
||||
DataSegment dataSegment = DataSegment.builder().dataSource("source").interval(new Interval("2012-01-01/P1D")).version("1").size(1).build();
|
||||
List<DataSegment> segments = ImmutableList.of
|
||||
(
|
||||
dataSegment
|
||||
);
|
||||
Map actualFetchedSegment = taskToolbox.build(task).fetchSegments(segments);
|
||||
Assert.assertEquals(expectedFile, actualFetchedSegment.get(dataSegment));
|
||||
}
|
||||
@Test
|
||||
public void testGetEmitter()
|
||||
{
|
||||
Assert.assertEquals(mockEmitter,taskToolbox.build(task).getEmitter());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetDataSegmentKiller()
|
||||
{
|
||||
Assert.assertEquals(mockDataSegmentKiller, taskToolbox.build(task).getDataSegmentKiller());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetDataSegmentMover()
|
||||
{
|
||||
Assert.assertEquals(mockDataSegmentMover, taskToolbox.build(task).getDataSegmentMover());
|
||||
}
|
||||
}
|
|
@ -28,6 +28,7 @@ import org.junit.Assert;
|
|||
import org.junit.Test;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
public class FileTaskLogsTest
|
||||
|
@ -37,7 +38,7 @@ public class FileTaskLogsTest
|
|||
{
|
||||
final File tmpDir = Files.createTempDir();
|
||||
try {
|
||||
final File logDir = new File(tmpDir, "logs");
|
||||
final File logDir = new File(tmpDir, "druid/logs");
|
||||
final File logFile = new File(tmpDir, "log");
|
||||
Files.write("blah", logFile, Charsets.UTF_8);
|
||||
final TaskLogs taskLogs = new FileTaskLogs(new FileTaskLogsConfig(logDir));
|
||||
|
@ -54,4 +55,34 @@ public class FileTaskLogsTest
|
|||
FileUtils.deleteDirectory(tmpDir);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPushTaskLogDirCreationFails() throws Exception
|
||||
{
|
||||
final File tmpDir = Files.createTempDir();
|
||||
|
||||
try {
|
||||
IOException thrown = null;
|
||||
|
||||
final File logDir = new File(tmpDir, "druid/logs");
|
||||
final File logFile = new File(tmpDir, "log");
|
||||
Files.write("blah", logFile, Charsets.UTF_8);
|
||||
|
||||
if(!tmpDir.setWritable(false)) {
|
||||
new RuntimeException("failed to make tmp dir read-only");
|
||||
}
|
||||
|
||||
final TaskLogs taskLogs = new FileTaskLogs(new FileTaskLogsConfig(logDir));
|
||||
try {
|
||||
taskLogs.pushTaskLog("foo", logFile);
|
||||
} catch(IOException ex) {
|
||||
thrown = ex;
|
||||
}
|
||||
Assert.assertNotNull("pushTaskLog should fail with exception of dir creation error", thrown);
|
||||
}
|
||||
finally {
|
||||
tmpDir.setWritable(true);
|
||||
FileUtils.deleteDirectory(tmpDir);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,17 +18,11 @@
|
|||
package io.druid.indexing.worker;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.introspect.AnnotationIntrospectorPair;
|
||||
import com.fasterxml.jackson.databind.introspect.GuiceAnnotationIntrospector;
|
||||
import com.fasterxml.jackson.databind.introspect.GuiceInjectableValues;
|
||||
import com.fasterxml.jackson.databind.jsontype.NamedType;
|
||||
import com.google.common.base.Joiner;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.io.Files;
|
||||
import com.google.inject.Binder;
|
||||
import com.google.inject.Guice;
|
||||
import com.google.inject.Injector;
|
||||
import io.druid.curator.PotentiallyGzippedCompressionProvider;
|
||||
import io.druid.indexing.common.IndexingServiceCondition;
|
||||
import io.druid.indexing.common.SegmentLoaderFactory;
|
||||
|
@ -42,7 +36,6 @@ import io.druid.indexing.overlord.TestRemoteTaskRunnerConfig;
|
|||
import io.druid.indexing.overlord.ThreadPoolTaskRunner;
|
||||
import io.druid.indexing.worker.config.WorkerConfig;
|
||||
import io.druid.jackson.DefaultObjectMapper;
|
||||
import io.druid.segment.column.ColumnConfig;
|
||||
import io.druid.segment.loading.DataSegmentPuller;
|
||||
import io.druid.segment.loading.LocalDataSegmentPuller;
|
||||
import io.druid.segment.loading.OmniSegmentLoader;
|
||||
|
@ -67,39 +60,6 @@ import java.util.List;
|
|||
public class WorkerTaskMonitorTest
|
||||
{
|
||||
private static final ObjectMapper jsonMapper = new DefaultObjectMapper();
|
||||
private static final Injector injector = Guice.createInjector(
|
||||
new com.google.inject.Module()
|
||||
{
|
||||
@Override
|
||||
public void configure(Binder binder)
|
||||
{
|
||||
binder.bind(ColumnConfig.class).toInstance(
|
||||
new ColumnConfig()
|
||||
{
|
||||
@Override
|
||||
public int columnCacheSizeBytes()
|
||||
{
|
||||
return 1024 * 1024;
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
static {
|
||||
final GuiceAnnotationIntrospector guiceIntrospector = new GuiceAnnotationIntrospector();
|
||||
|
||||
jsonMapper.setInjectableValues(new GuiceInjectableValues(injector));
|
||||
jsonMapper.setAnnotationIntrospectors(
|
||||
new AnnotationIntrospectorPair(
|
||||
guiceIntrospector, jsonMapper.getSerializationConfig().getAnnotationIntrospector()
|
||||
),
|
||||
new AnnotationIntrospectorPair(
|
||||
guiceIntrospector, jsonMapper.getDeserializationConfig().getAnnotationIntrospector()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
private static final Joiner joiner = Joiner.on("/");
|
||||
private static final String basePath = "/test/druid";
|
||||
|
|
6
pom.xml
6
pom.xml
|
@ -563,6 +563,12 @@
|
|||
<artifactId>testng</artifactId>
|
||||
<version>6.8.7</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.hamcrest</groupId>
|
||||
<artifactId>hamcrest-all</artifactId>
|
||||
<version>1.3</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</dependencyManagement>
|
||||
|
||||
|
|
|
@ -0,0 +1,70 @@
|
|||
/*
|
||||
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Metamarkets licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package io.druid.client.indexing;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
import io.druid.timeline.DataSegment;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.Interval;
|
||||
import org.junit.After;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public class ClientAppendQueryTest
|
||||
{
|
||||
private ClientAppendQuery clientAppendQuery;
|
||||
private static final String DATA_SOURCE = "data_source";
|
||||
private List<DataSegment> segments = Lists.<DataSegment>newArrayList(
|
||||
new DataSegment(DATA_SOURCE, new Interval(new DateTime(), new DateTime().plus(1)), new DateTime().toString(), null,
|
||||
null, null, null, 0, 0));
|
||||
@Before
|
||||
public void setUp()
|
||||
{
|
||||
clientAppendQuery = new ClientAppendQuery(DATA_SOURCE, segments);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetType()
|
||||
{
|
||||
Assert.assertEquals("append",clientAppendQuery.getType());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetDataSource()
|
||||
{
|
||||
Assert.assertEquals(DATA_SOURCE, clientAppendQuery.getDataSource());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetSegments()
|
||||
{
|
||||
Assert.assertEquals(segments, clientAppendQuery.getSegments());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testToString()
|
||||
{
|
||||
Assert.assertTrue(clientAppendQuery.toString().contains(DATA_SOURCE));
|
||||
Assert.assertTrue(clientAppendQuery.toString().contains(segments.toString()));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,66 @@
|
|||
/*
|
||||
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Metamarkets licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package io.druid.client.indexing;
|
||||
|
||||
import io.druid.timeline.DataSegment;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.Interval;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
public class ClientConversionQueryTest
|
||||
{
|
||||
private ClientConversionQuery clientConversionQuery;
|
||||
private static final String DATA_SOURCE = "data_source";
|
||||
private static final Interval INTERVAL = new Interval(new DateTime(), new DateTime().plus(1));
|
||||
private static final DataSegment DATA_SEGMENT = new DataSegment(DATA_SOURCE, INTERVAL, new DateTime().toString(), null,
|
||||
null, null, null, 0, 0);
|
||||
|
||||
@Test
|
||||
public void testGetType()
|
||||
{
|
||||
clientConversionQuery = new ClientConversionQuery(DATA_SEGMENT);
|
||||
Assert.assertEquals("version_converter", clientConversionQuery.getType());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetDataSource()
|
||||
{
|
||||
clientConversionQuery = new ClientConversionQuery(DATA_SEGMENT);
|
||||
Assert.assertEquals(DATA_SOURCE, clientConversionQuery.getDataSource());
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetInterval()
|
||||
{
|
||||
clientConversionQuery = new ClientConversionQuery(DATA_SEGMENT);
|
||||
Assert.assertEquals(INTERVAL, clientConversionQuery.getInterval());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetSegment()
|
||||
{
|
||||
clientConversionQuery = new ClientConversionQuery(DATA_SEGMENT);
|
||||
Assert.assertEquals(DATA_SEGMENT, clientConversionQuery.getSegment());
|
||||
clientConversionQuery = new ClientConversionQuery(DATA_SOURCE,INTERVAL);
|
||||
Assert.assertNull(clientConversionQuery.getSegment());
|
||||
}
|
||||
}
|
|
@ -0,0 +1,64 @@
|
|||
/*
|
||||
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Metamarkets licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package io.druid.client.indexing;
|
||||
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.Interval;
|
||||
import org.junit.After;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
public class ClientKillQueryTest
|
||||
{
|
||||
private static final String DATA_SOURCE = "data_source";
|
||||
private static final Interval INTERVAL = new Interval(new DateTime(), new DateTime().plus(1));
|
||||
ClientKillQuery clientKillQuery;
|
||||
|
||||
@Before
|
||||
public void setUp()
|
||||
{
|
||||
clientKillQuery = new ClientKillQuery(DATA_SOURCE, INTERVAL);
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown()
|
||||
{
|
||||
clientKillQuery = null;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetType()
|
||||
{
|
||||
Assert.assertEquals("kill", clientKillQuery.getType());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetDataSource()
|
||||
{
|
||||
Assert.assertEquals(DATA_SOURCE, clientKillQuery.getDataSource());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetInterval()
|
||||
{
|
||||
Assert.assertEquals(INTERVAL, clientKillQuery.getInterval());
|
||||
}
|
||||
}
|
|
@ -0,0 +1,73 @@
|
|||
/*
|
||||
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Metamarkets licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package io.druid.client.indexing;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
import io.druid.query.aggregation.AggregatorFactory;
|
||||
import io.druid.timeline.DataSegment;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.Interval;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public class ClientMergeQueryTest
|
||||
{
|
||||
private static final String DATA_SOURCE = "data_source";
|
||||
private static final Interval INTERVAL = new Interval(new DateTime(), new DateTime().plus(1));
|
||||
private static final DataSegment DATA_SEGMENT = new DataSegment(DATA_SOURCE, INTERVAL, new DateTime().toString(), null,
|
||||
null, null, null, 0, 0);
|
||||
private static final List<DataSegment> SEGMENT_LIST = Lists.newArrayList(DATA_SEGMENT);
|
||||
private static final List<AggregatorFactory> AGGREGATOR_LIST = Lists.newArrayList();
|
||||
private static final ClientMergeQuery CLIENT_MERGE_QUERY = new ClientMergeQuery(DATA_SOURCE,SEGMENT_LIST,AGGREGATOR_LIST);;
|
||||
|
||||
@Test
|
||||
public void testGetType()
|
||||
{
|
||||
Assert.assertEquals("append", CLIENT_MERGE_QUERY.getType());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetDataSource()
|
||||
{
|
||||
Assert.assertEquals(DATA_SOURCE, CLIENT_MERGE_QUERY.getDataSource());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetSegments()
|
||||
{
|
||||
Assert.assertEquals(SEGMENT_LIST, CLIENT_MERGE_QUERY.getSegments());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetAggregators()
|
||||
{
|
||||
Assert.assertEquals(AGGREGATOR_LIST, CLIENT_MERGE_QUERY.getAggregators());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testToString()
|
||||
{
|
||||
Assert.assertTrue(CLIENT_MERGE_QUERY.toString().contains(DATA_SOURCE));
|
||||
Assert.assertTrue(CLIENT_MERGE_QUERY.toString().contains(SEGMENT_LIST.toString()));
|
||||
Assert.assertTrue(CLIENT_MERGE_QUERY.toString().contains(AGGREGATOR_LIST.toString()));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,102 @@
|
|||
/*
|
||||
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Metamarkets licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package io.druid.curator.discovery;
|
||||
|
||||
import io.druid.client.selector.Server;
|
||||
import org.apache.curator.x.discovery.ServiceInstance;
|
||||
import org.apache.curator.x.discovery.ServiceProvider;
|
||||
import org.easymock.EasyMock;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class ServerDiscoverySelectorTest
|
||||
{
|
||||
|
||||
private ServiceProvider serviceProvider;
|
||||
private ServerDiscoverySelector serverDiscoverySelector;
|
||||
private ServiceInstance instance;
|
||||
private static final int PORT = 8080;
|
||||
private static final String ADDRESS = "localhost";
|
||||
|
||||
@Before
|
||||
public void setUp()
|
||||
{
|
||||
serviceProvider = EasyMock.createMock(ServiceProvider.class);
|
||||
instance = EasyMock.createMock(ServiceInstance.class);
|
||||
serverDiscoverySelector = new ServerDiscoverySelector(serviceProvider);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPick() throws Exception
|
||||
{
|
||||
EasyMock.expect(serviceProvider.getInstance()).andReturn(instance).anyTimes();
|
||||
EasyMock.expect(instance.getAddress()).andReturn(ADDRESS).anyTimes();
|
||||
EasyMock.expect(instance.getPort()).andReturn(PORT).anyTimes();
|
||||
EasyMock.replay(instance,serviceProvider);
|
||||
Server server = serverDiscoverySelector.pick();
|
||||
Assert.assertEquals(PORT,server.getPort());
|
||||
Assert.assertEquals(ADDRESS,server.getAddress());
|
||||
Assert.assertTrue(server.getHost().contains(new Integer(PORT).toString()));
|
||||
Assert.assertTrue(server.getHost().contains(ADDRESS));
|
||||
Assert.assertEquals(new String("http"), server.getScheme());
|
||||
EasyMock.verify(instance,serviceProvider);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPickWithNullInstance() throws Exception
|
||||
{
|
||||
EasyMock.expect(serviceProvider.getInstance()).andReturn(null).anyTimes();
|
||||
EasyMock.replay(serviceProvider);
|
||||
Server server = serverDiscoverySelector.pick();
|
||||
Assert.assertNull(server);
|
||||
EasyMock.verify(serviceProvider);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPickWithException() throws Exception
|
||||
{
|
||||
EasyMock.expect(serviceProvider.getInstance()).andThrow(new Exception()).anyTimes();
|
||||
EasyMock.replay(serviceProvider);
|
||||
Server server = serverDiscoverySelector.pick();
|
||||
Assert.assertNull(server);
|
||||
EasyMock.verify(serviceProvider);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStart() throws Exception
|
||||
{
|
||||
serviceProvider.start();
|
||||
EasyMock.replay(serviceProvider);
|
||||
serverDiscoverySelector.start();
|
||||
EasyMock.verify(serviceProvider);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStop() throws IOException
|
||||
{
|
||||
serviceProvider.close();
|
||||
EasyMock.replay(serviceProvider);
|
||||
serverDiscoverySelector.stop();
|
||||
EasyMock.verify(serviceProvider);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,70 @@
|
|||
/*
|
||||
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Metamarkets licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package io.druid.server.http;
|
||||
|
||||
import io.druid.server.coordinator.DruidCoordinator;
|
||||
import org.easymock.EasyMock;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.net.URL;
|
||||
|
||||
public class CoordinatorRedirectInfoTest
|
||||
{
|
||||
private DruidCoordinator druidCoordinator;
|
||||
private CoordinatorRedirectInfo coordinatorRedirectInfo;
|
||||
|
||||
@Before
|
||||
public void setUp()
|
||||
{
|
||||
druidCoordinator = EasyMock.createMock(DruidCoordinator.class);
|
||||
coordinatorRedirectInfo = new CoordinatorRedirectInfo(druidCoordinator);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDoLocal()
|
||||
{
|
||||
EasyMock.expect(druidCoordinator.isLeader()).andReturn(true).anyTimes();
|
||||
EasyMock.replay(druidCoordinator);
|
||||
Assert.assertTrue(coordinatorRedirectInfo.doLocal());
|
||||
EasyMock.verify(druidCoordinator);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetRedirectURLNull()
|
||||
{
|
||||
EasyMock.expect(druidCoordinator.getCurrentLeader()).andReturn(null).anyTimes();
|
||||
EasyMock.replay(druidCoordinator);
|
||||
URL url = coordinatorRedirectInfo.getRedirectURL("query", "request");
|
||||
Assert.assertNull(url);
|
||||
}
|
||||
@Test
|
||||
public void testGetRedirectURL()
|
||||
{
|
||||
String host = "localhost";
|
||||
String query = "query";
|
||||
String request = "request";
|
||||
EasyMock.expect(druidCoordinator.getCurrentLeader()).andReturn(host).anyTimes();
|
||||
EasyMock.replay(druidCoordinator);
|
||||
URL url = coordinatorRedirectInfo.getRedirectURL(query,request);
|
||||
Assert.assertTrue(url.toString().contains(host+request+"?"+query));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,66 @@
|
|||
/*
|
||||
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Metamarkets licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package io.druid.server.log;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.google.common.io.CharStreams;
|
||||
import io.druid.server.RequestLogLine;
|
||||
import org.easymock.EasyMock;
|
||||
import org.joda.time.DateTime;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.junit.rules.TemporaryFolder;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileReader;
|
||||
import java.io.IOException;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.ScheduledExecutorService;
|
||||
|
||||
public class FileRequestLoggerTest
|
||||
{
|
||||
private final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1);
|
||||
private static final String HOST = "localhost";
|
||||
|
||||
@Rule
|
||||
public TemporaryFolder temporaryFolder = new TemporaryFolder();
|
||||
|
||||
@Test public void testLog() throws IOException
|
||||
{
|
||||
ObjectMapper objectMapper = new ObjectMapper();
|
||||
DateTime dateTime = new DateTime();
|
||||
File logDir = temporaryFolder.newFolder();
|
||||
String actualLogString = new String(dateTime.toString()+"\t"+HOST);
|
||||
|
||||
FileRequestLogger fileRequestLogger = new FileRequestLogger(objectMapper, scheduler, logDir);
|
||||
fileRequestLogger.start();
|
||||
RequestLogLine requestLogLine = EasyMock.createMock(RequestLogLine.class);
|
||||
EasyMock.expect(requestLogLine.getLine((ObjectMapper) EasyMock.anyObject())).
|
||||
andReturn(actualLogString).anyTimes();
|
||||
EasyMock.replay(requestLogLine);
|
||||
fileRequestLogger.log(requestLogLine);
|
||||
File logFile = new File(logDir, dateTime.toString("yyyy-MM-dd'.log'"));
|
||||
String logString = CharStreams.toString(new FileReader(logFile));
|
||||
Assert.assertTrue(logString.contains(actualLogString));
|
||||
fileRequestLogger.stop();
|
||||
}
|
||||
}
|
|
@ -55,6 +55,13 @@
|
|||
<groupId>io.airlift</groupId>
|
||||
<artifactId>airline</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- Test Dependencies -->
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
|
|
|
@ -0,0 +1,82 @@
|
|||
/*
|
||||
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Metamarkets licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package io.druid.cli.convert;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
|
||||
import org.junit.After;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
import org.junit.Rule;
|
||||
|
||||
import io.airlift.command.Cli;
|
||||
|
||||
import org.junit.rules.TemporaryFolder;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
public class ConvertPropertiesTest
|
||||
{
|
||||
@Rule
|
||||
public TemporaryFolder tmpFolder = new TemporaryFolder();
|
||||
@Test
|
||||
public void testConvertProperties() throws IOException
|
||||
{
|
||||
File inputFile = tmpFolder.newFile();
|
||||
File outputFile = tmpFolder.newFile();
|
||||
String oldVersionPropertiesString = "druid.database.rules.defaultDatasource 1"
|
||||
+ System.lineSeparator()
|
||||
+ "druid.indexer.chathandler.publishDiscovery true"
|
||||
+ System.lineSeparator()
|
||||
+ "druid.database.segmentTable table"
|
||||
+ System.lineSeparator()
|
||||
+ "druid.pusher.local false"
|
||||
+ System.lineSeparator()
|
||||
+ "druid.paths.indexCache hdfs://path"
|
||||
+ System.lineSeparator()
|
||||
+ "notHandled";
|
||||
String newVersionPropertiesString;
|
||||
FileUtils.writeStringToFile(inputFile, oldVersionPropertiesString, StandardCharsets.UTF_8.toString());
|
||||
Cli<?> parser = Cli.builder("convertProps")
|
||||
.withCommand(ConvertProperties.class)
|
||||
.build();
|
||||
Object command = parser.parse("convertProps","-f", inputFile.getAbsolutePath(),"-o", outputFile.getAbsolutePath());
|
||||
Assert.assertNotNull(command);
|
||||
ConvertProperties convertProperties = (ConvertProperties) command;
|
||||
convertProperties.run();
|
||||
|
||||
newVersionPropertiesString = FileUtils.readFileToString(outputFile, StandardCharsets.UTF_8.toString());
|
||||
System.out.printf(newVersionPropertiesString);
|
||||
Assert.assertTrue(newVersionPropertiesString.contains("druid.manager.rules.defaultTier=1"));
|
||||
Assert.assertTrue(newVersionPropertiesString.contains("druid.db.tables.segments=table"));
|
||||
Assert.assertTrue(newVersionPropertiesString.contains("druid.indexer.task.chathandler.type=curator"));
|
||||
Assert.assertTrue(newVersionPropertiesString.contains("druid.storage.local=false"));
|
||||
Assert.assertTrue(newVersionPropertiesString.contains("druid.segmentCache.locations=[{\"path\": \"hdfs://path\", \"maxSize\": null}]"));
|
||||
Assert.assertTrue(newVersionPropertiesString.contains("notHandled"));
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown()
|
||||
{
|
||||
tmpFolder.delete();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,76 @@
|
|||
/*
|
||||
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Metamarkets licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package io.druid.cli.validate;
|
||||
|
||||
import io.airlift.command.Cli;
|
||||
|
||||
import org.junit.After;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.TemporaryFolder;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
|
||||
public class DruidJsonValidatorTest
|
||||
{
|
||||
private File inputFile;
|
||||
@Rule
|
||||
public TemporaryFolder temporaryFolder = new TemporaryFolder();
|
||||
|
||||
@Before
|
||||
public void setUp() throws IOException
|
||||
{
|
||||
inputFile = temporaryFolder.newFile();
|
||||
}
|
||||
|
||||
@Test(expected = UnsupportedOperationException.class)
|
||||
public void testExceptionCase()
|
||||
{
|
||||
String type = "";
|
||||
Cli<?> parser = Cli.builder("validator")
|
||||
.withCommand(DruidJsonValidator.class)
|
||||
.build();
|
||||
Object command = parser.parse("validator","-f", inputFile.getAbsolutePath(), "-t", type);
|
||||
Assert.assertNotNull(command);
|
||||
DruidJsonValidator druidJsonValidator = (DruidJsonValidator) command;
|
||||
druidJsonValidator.run();
|
||||
}
|
||||
|
||||
@Test(expected = RuntimeException.class)
|
||||
public void testExceptionCaseNoFile()
|
||||
{
|
||||
String type = "query";
|
||||
Cli<?> parser = Cli.builder("validator")
|
||||
.withCommand(DruidJsonValidator.class)
|
||||
.build();
|
||||
Object command = parser.parse("validator","-f", "", "-t", type);
|
||||
Assert.assertNotNull(command);
|
||||
DruidJsonValidator druidJsonValidator = (DruidJsonValidator) command;
|
||||
druidJsonValidator.run();
|
||||
}
|
||||
|
||||
@After public void tearDown()
|
||||
{
|
||||
temporaryFolder.delete();
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue