diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/updater/HadoopConverterJobTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/updater/HadoopConverterJobTest.java index 2b7766f7f72..a06df928116 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/updater/HadoopConverterJobTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/updater/HadoopConverterJobTest.java @@ -27,7 +27,7 @@ import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.google.common.io.ByteSource; import com.google.common.io.Files; -import io.druid.client.DruidDataSource; +import io.druid.client.ImmutableDruidDataSource; import io.druid.data.input.impl.DelimitedParseSpec; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.StringInputRowParser; @@ -263,7 +263,7 @@ public class HadoopConverterJobTest Thread.sleep(10); } manager.poll(); - final DruidDataSource druidDataSource = manager.getInventoryValue(DATASOURCE); + final ImmutableDruidDataSource druidDataSource = manager.getInventoryValue(DATASOURCE); manager.stop(); return Lists.newArrayList(druidDataSource.getSegments()); } diff --git a/indexing-service/src/test/java/io/druid/indexing/test/TestServerView.java b/indexing-service/src/test/java/io/druid/indexing/test/TestServerView.java index ae7dcbcd7f5..2217248cdf7 100644 --- a/indexing-service/src/test/java/io/druid/indexing/test/TestServerView.java +++ b/indexing-service/src/test/java/io/druid/indexing/test/TestServerView.java @@ -27,6 +27,7 @@ import io.druid.java.util.common.Pair; import io.druid.server.coordination.DruidServerMetadata; import io.druid.timeline.DataSegment; +import java.util.Collection; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; @@ -126,7 +127,7 @@ public class TestServerView implements FilteredServerInventoryView, ServerView.S } @Override - public Iterable getInventory() + public Collection getInventory() { return null; } diff --git a/server/src/main/java/io/druid/client/AbstractCuratorServerInventoryView.java b/server/src/main/java/io/druid/client/AbstractCuratorServerInventoryView.java index 173dd8dfda6..fec18e5a654 100644 --- a/server/src/main/java/io/druid/client/AbstractCuratorServerInventoryView.java +++ b/server/src/main/java/io/druid/client/AbstractCuratorServerInventoryView.java @@ -36,6 +36,7 @@ import org.apache.curator.framework.CuratorFramework; import org.apache.curator.utils.ZKPaths; import java.io.IOException; +import java.util.Collection; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; @@ -204,7 +205,7 @@ public abstract class AbstractCuratorServerInventoryView implemen } @Override - public Iterable getInventory() + public Collection getInventory() { return inventoryManager.getInventory(); } @@ -282,7 +283,7 @@ public abstract class AbstractCuratorServerInventoryView implemen return; } - container.addDataSegment(inventory.getIdentifier(), inventory); + container.addDataSegment(inventory); runSegmentCallbacks( new Function() diff --git a/server/src/main/java/io/druid/client/CoordinatorServerView.java b/server/src/main/java/io/druid/client/CoordinatorServerView.java index e436b56a347..9e3c0f874d2 100644 --- a/server/src/main/java/io/druid/client/CoordinatorServerView.java +++ b/server/src/main/java/io/druid/client/CoordinatorServerView.java @@ -23,7 +23,6 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Maps; import com.google.common.collect.Ordering; import com.google.inject.Inject; - import io.druid.java.util.common.concurrent.Execs; import io.druid.java.util.common.logger.Logger; import io.druid.query.DataSource; @@ -32,6 +31,7 @@ import io.druid.timeline.DataSegment; import io.druid.timeline.VersionedIntervalTimeline; import io.druid.timeline.partition.PartitionChunk; +import java.util.Collection; import java.util.Map; import java.util.concurrent.ExecutorService; @@ -203,7 +203,7 @@ public class CoordinatorServerView implements InventoryView } @Override - public Iterable getInventory() + public Collection getInventory() { return baseView.getInventory(); } diff --git a/server/src/main/java/io/druid/client/DruidDataSource.java b/server/src/main/java/io/druid/client/DruidDataSource.java index a6b3fa7a9c3..84b04b3b146 100644 --- a/server/src/main/java/io/druid/client/DruidDataSource.java +++ b/server/src/main/java/io/druid/client/DruidDataSource.java @@ -20,37 +20,31 @@ package io.druid.client; import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Maps; import io.druid.timeline.DataSegment; +import java.util.Collection; import java.util.Collections; import java.util.Map; -import java.util.Set; -import java.util.concurrent.ConcurrentSkipListSet; +import java.util.concurrent.ConcurrentHashMap; /** */ public class DruidDataSource { - private final Object lock = new Object(); - private final String name; private final Map properties; - private final Map partitionNames; - private final ConcurrentSkipListSet segmentsHolder; + private final ConcurrentHashMap idToSegmentMap; public DruidDataSource( String name, Map properties ) { - this.name = name; + this.name = Preconditions.checkNotNull(name); this.properties = properties; - - this.partitionNames = Maps.newHashMap(); - this.segmentsHolder = new ConcurrentSkipListSet(); + this.idToSegmentMap = new ConcurrentHashMap<>(); } @JsonProperty @@ -65,115 +59,60 @@ public class DruidDataSource return properties; } - @JsonProperty - public Set getSegments() + public Collection getSegments() { - return Collections.unmodifiableSet(segmentsHolder); + return Collections.unmodifiableCollection(idToSegmentMap.values()); } - public DruidDataSource addSegment(String partitionName, DataSegment dataSegment) + public DruidDataSource addSegment(DataSegment dataSegment) { - synchronized (lock) { - partitionNames.put(partitionName, dataSegment); - - segmentsHolder.add(dataSegment); - } + idToSegmentMap.put(dataSegment.getIdentifier(), dataSegment); return this; } - public DruidDataSource addSegments(Map partitionMap) + public DruidDataSource removePartition(String segmentId) { - synchronized (lock) { - partitionNames.putAll(partitionMap); - - segmentsHolder.addAll(partitionMap.values()); - } + idToSegmentMap.remove(segmentId); return this; } - public DruidDataSource removePartition(String partitionName) + public DataSegment getSegment(String identifier) { - synchronized (lock) { - DataSegment dataPart = partitionNames.remove(partitionName); - - if (dataPart == null) { - return this; - } - - segmentsHolder.remove(dataPart); - } - - return this; + return idToSegmentMap.get(identifier); } public boolean isEmpty() { - return segmentsHolder.isEmpty(); + return idToSegmentMap.isEmpty(); + } + + public ImmutableDruidDataSource toImmutableDruidDataSource() + { + return new ImmutableDruidDataSource( + name, + ImmutableMap.copyOf(properties), + ImmutableMap.copyOf(idToSegmentMap) + ); } @Override public String toString() { - synchronized (lock) { - return "DruidDataSource{" + - "properties=" + properties + - ", partitions=" + segmentsHolder.toString() + - '}'; - } - } - - public ImmutableDruidDataSource toImmutableDruidDataSource() - { - synchronized (lock) { - return new ImmutableDruidDataSource( - name, - ImmutableMap.copyOf(properties), - ImmutableMap.copyOf(partitionNames), - ImmutableSet.copyOf(segmentsHolder) - ); - } + return "DruidDataSource{" + + "properties=" + properties + + ", partitions=" + idToSegmentMap.values() + + '}'; } @Override public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - DruidDataSource that = (DruidDataSource) o; - - if (name != null ? !name.equals(that.name) : that.name != null) { - return false; - } - if (partitionNames != null ? !partitionNames.equals(that.partitionNames) : that.partitionNames != null) { - return false; - } - if (properties != null ? !properties.equals(that.properties) : that.properties != null) { - return false; - } - if (segmentsHolder != null ? !segmentsHolder.equals(that.segmentsHolder) : that.segmentsHolder != null) { - return false; - } - - return true; + throw new UnsupportedOperationException("Use ImmutableDruidDataSource instead"); } @Override public int hashCode() { - int result = name != null ? name.hashCode() : 0; - result = 31 * result + (properties != null ? properties.hashCode() : 0); - result = 31 * result + (partitionNames != null ? partitionNames.hashCode() : 0); - result = 31 * result + (segmentsHolder != null ? segmentsHolder.hashCode() : 0); - return result; - } - - public DataSegment getSegment(String identifier) - { - return partitionNames.get(identifier); + throw new UnsupportedOperationException("Use ImmutableDruidDataSource instead"); } } diff --git a/server/src/main/java/io/druid/client/DruidServer.java b/server/src/main/java/io/druid/client/DruidServer.java index bf15d6a8987..842e6e4ad8e 100644 --- a/server/src/main/java/io/druid/client/DruidServer.java +++ b/server/src/main/java/io/druid/client/DruidServer.java @@ -24,13 +24,13 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Function; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; - import io.druid.java.util.common.logger.Logger; import io.druid.server.DruidNode; import io.druid.server.coordination.DruidServerMetadata; import io.druid.server.coordination.ServerType; import io.druid.timeline.DataSegment; +import java.util.Collection; import java.util.Collections; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; @@ -167,9 +167,10 @@ public class DruidServer implements Comparable return segments.get(segmentName); } - public DruidServer addDataSegment(String segmentId, DataSegment segment) + public DruidServer addDataSegment(DataSegment segment) { synchronized (lock) { + final String segmentId = segment.getIdentifier(); DataSegment shouldNotExist = segments.get(segmentId); if (shouldNotExist != null) { @@ -188,7 +189,7 @@ public class DruidServer implements Comparable dataSources.put(dataSourceName, dataSource); } - dataSource.addSegment(segmentId, segment); + dataSource.addSegment(segment); segments.put(segmentId, segment); currSize += segment.getSize(); @@ -199,9 +200,7 @@ public class DruidServer implements Comparable public DruidServer addDataSegments(DruidServer server) { synchronized (lock) { - for (Map.Entry entry : server.segments.entrySet()) { - addDataSegment(entry.getKey(), entry.getValue()); - } + server.segments.values().forEach(this::addDataSegment); } return this; } @@ -246,7 +245,7 @@ public class DruidServer implements Comparable return dataSources.get(dataSource); } - public Iterable getDataSources() + public Collection getDataSources() { return dataSources.values(); } @@ -272,7 +271,7 @@ public class DruidServer implements Comparable DruidServer that = (DruidServer) o; - if (getName() != null ? !getName().equals(that.getName()) : that.getName() != null) { + if (!metadata.equals(that.metadata)) { return false; } @@ -282,7 +281,7 @@ public class DruidServer implements Comparable @Override public int hashCode() { - return getName() != null ? getName().hashCode() : 0; + return metadata.hashCode(); } @Override diff --git a/server/src/main/java/io/druid/client/HttpServerInventoryView.java b/server/src/main/java/io/druid/client/HttpServerInventoryView.java index ad18f49ae3f..33e527425ab 100644 --- a/server/src/main/java/io/druid/client/HttpServerInventoryView.java +++ b/server/src/main/java/io/druid/client/HttpServerInventoryView.java @@ -24,7 +24,6 @@ import com.fasterxml.jackson.jaxrs.smile.SmileMediaTypes; import com.google.common.base.Function; import com.google.common.base.Predicate; import com.google.common.base.Predicates; -import com.google.common.collect.Iterables; import com.google.common.collect.Maps; import com.google.common.net.HostAndPort; import com.google.common.util.concurrent.FutureCallback; @@ -67,6 +66,7 @@ import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.io.InputStream; import java.net.URL; +import java.util.Collection; import java.util.List; import java.util.Map; import java.util.concurrent.BlockingQueue; @@ -78,6 +78,7 @@ import java.util.concurrent.LinkedBlockingDeque; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.stream.Collectors; /** * This class uses CuratorInventoryManager to listen for queryable server membership which serve segments(e.g. Historicals). @@ -294,18 +295,12 @@ public class HttpServerInventoryView implements ServerInventoryView, FilteredSer } @Override - public Iterable getInventory() + public Collection getInventory() { - return Iterables.transform( - servers.values(), new Function() - { - @Override - public DruidServer apply(DruidServerHolder input) - { - return input.druidServer; - } - } - ); + return servers.values() + .stream() + .map(serverHolder -> serverHolder.druidServer) + .collect(Collectors.toList()); } private void runSegmentCallbacks( @@ -657,7 +652,7 @@ public class HttpServerInventoryView implements ServerInventoryView, FilteredSer { if (finalPredicate.apply(Pair.of(druidServer.getMetadata(), segment))) { if (druidServer.getSegment(segment.getIdentifier()) == null) { - druidServer.addDataSegment(segment.getIdentifier(), segment); + druidServer.addDataSegment(segment); runSegmentCallbacks( new Function() { diff --git a/server/src/main/java/io/druid/client/ImmutableDruidDataSource.java b/server/src/main/java/io/druid/client/ImmutableDruidDataSource.java index c83d674e428..95532f89195 100644 --- a/server/src/main/java/io/druid/client/ImmutableDruidDataSource.java +++ b/server/src/main/java/io/druid/client/ImmutableDruidDataSource.java @@ -19,12 +19,13 @@ package io.druid.client; +import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.ImmutableSet; import io.druid.timeline.DataSegment; +import java.util.Collection; import java.util.Map; -import java.util.Set; +import java.util.Objects; /** */ @@ -32,20 +33,17 @@ public class ImmutableDruidDataSource { private final String name; private final ImmutableMap properties; - private final ImmutableMap partitionNames; - private final ImmutableSet segmentsHolder; + private final ImmutableMap idToSegments; public ImmutableDruidDataSource( String name, ImmutableMap properties, - ImmutableMap partitionNames, - ImmutableSet segmentsHolder + ImmutableMap idToSegments ) { - this.name = name; + this.name = Preconditions.checkNotNull(name); this.properties = properties; - this.partitionNames = partitionNames; - this.segmentsHolder = segmentsHolder; + this.idToSegments = idToSegments; } public String getName() @@ -58,29 +56,58 @@ public class ImmutableDruidDataSource return properties; } - public Map getPartitionNames() - { - return partitionNames; - } - public boolean isEmpty() { - return segmentsHolder.isEmpty(); + return idToSegments.isEmpty(); } - public Set getSegments() + public Collection getSegments() { - return segmentsHolder; + return idToSegments.values(); + } + + public DataSegment getSegment(String segmentIdentifier) + { + return idToSegments.get(segmentIdentifier); } @Override public String toString() { - // partitionNames is intentionally ignored because it is usually large + // The detail of idToSegments is intentionally ignored because it is usually large return "ImmutableDruidDataSource{" + "name='" + name - + "', segments='" + segmentsHolder + + "', # of segments='" + idToSegments.size() + "', properties='" + properties + "'}"; } + + @Override + public boolean equals(Object o) + { + if (this == o) { + return true; + } + + if (o == null || !getClass().equals(o.getClass())) { + return false; + } + + final ImmutableDruidDataSource that = (ImmutableDruidDataSource) o; + if (!this.name.equals(that.name)) { + return false; + } + + if (!this.properties.equals(that.properties)) { + return false; + } + + return this.idToSegments.equals(that.idToSegments); + } + + @Override + public int hashCode() + { + return Objects.hash(name, properties, idToSegments); + } } diff --git a/server/src/main/java/io/druid/client/ImmutableDruidServer.java b/server/src/main/java/io/druid/client/ImmutableDruidServer.java index af36ec9563c..d215a88aaba 100644 --- a/server/src/main/java/io/druid/client/ImmutableDruidServer.java +++ b/server/src/main/java/io/druid/client/ImmutableDruidServer.java @@ -19,6 +19,7 @@ package io.druid.client; +import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; import com.metamx.common.StringUtils; import io.druid.server.coordination.DruidServerMetadata; @@ -43,7 +44,7 @@ public class ImmutableDruidServer ImmutableMap segments ) { - this.metadata = metadata; + this.metadata = Preconditions.checkNotNull(metadata); this.currSize = currSize; this.segments = segments; this.dataSources = dataSources; @@ -128,4 +129,29 @@ public class ImmutableDruidServer + "', sources='" + dataSources + "'}"; } + + @Override + public boolean equals(Object o) + { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + ImmutableDruidServer that = (ImmutableDruidServer) o; + + if (metadata.equals(that.metadata)) { + return false; + } + + return true; + } + + @Override + public int hashCode() + { + return metadata.hashCode(); + } } diff --git a/server/src/main/java/io/druid/client/InventoryView.java b/server/src/main/java/io/druid/client/InventoryView.java index fd9ea0c0b2b..dab5d4c387b 100644 --- a/server/src/main/java/io/druid/client/InventoryView.java +++ b/server/src/main/java/io/druid/client/InventoryView.java @@ -21,12 +21,14 @@ package io.druid.client; import io.druid.timeline.DataSegment; +import java.util.Collection; + /** */ public interface InventoryView { DruidServer getInventoryValue(String string); - Iterable getInventory(); + Collection getInventory(); boolean isStarted(); boolean isSegmentLoadedByServer(String serverKey, DataSegment segment); } diff --git a/server/src/main/java/io/druid/curator/inventory/CuratorInventoryManager.java b/server/src/main/java/io/druid/curator/inventory/CuratorInventoryManager.java index 857b5b2250e..9c4f226f53c 100644 --- a/server/src/main/java/io/druid/curator/inventory/CuratorInventoryManager.java +++ b/server/src/main/java/io/druid/curator/inventory/CuratorInventoryManager.java @@ -19,8 +19,6 @@ package io.druid.curator.inventory; -import com.google.common.base.Function; -import com.google.common.collect.Iterables; import com.google.common.collect.Sets; import io.druid.curator.cache.PathChildrenCacheFactory; import io.druid.java.util.common.StringUtils; @@ -36,11 +34,13 @@ import org.apache.curator.framework.recipes.cache.PathChildrenCacheListener; import org.apache.curator.utils.ZKPaths; import java.io.IOException; +import java.util.Collection; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; /** * This class is deprecated. Use {@link io.druid.client.HttpServerInventoryView} for segment discovery. @@ -163,19 +163,12 @@ public class CuratorInventoryManager return containerHolder == null ? null : containerHolder.getContainer(); } - public Iterable getInventory() + public Collection getInventory() { - return Iterables.transform( - containers.values(), - new Function() - { - @Override - public ContainerClass apply(ContainerHolder input) - { - return input.getContainer(); - } - } - ); + return containers.values() + .stream() + .map(ContainerHolder::getContainer) + .collect(Collectors.toList()); } private byte[] getZkDataForNode(String path) diff --git a/server/src/main/java/io/druid/metadata/MetadataSegmentManager.java b/server/src/main/java/io/druid/metadata/MetadataSegmentManager.java index 5e92d90c2df..543ca8e2ac0 100644 --- a/server/src/main/java/io/druid/metadata/MetadataSegmentManager.java +++ b/server/src/main/java/io/druid/metadata/MetadataSegmentManager.java @@ -19,9 +19,10 @@ package io.druid.metadata; -import io.druid.client.DruidDataSource; +import io.druid.client.ImmutableDruidDataSource; import org.joda.time.Interval; +import javax.annotation.Nullable; import java.util.Collection; import java.util.List; @@ -44,9 +45,10 @@ public interface MetadataSegmentManager boolean isStarted(); - DruidDataSource getInventoryValue(String key); + @Nullable + ImmutableDruidDataSource getInventoryValue(String key); - Collection getInventory(); + Collection getInventory(); Collection getAllDatasourceNames(); diff --git a/server/src/main/java/io/druid/metadata/SQLMetadataSegmentManager.java b/server/src/main/java/io/druid/metadata/SQLMetadataSegmentManager.java index a7b073d0370..1a04d42c632 100644 --- a/server/src/main/java/io/druid/metadata/SQLMetadataSegmentManager.java +++ b/server/src/main/java/io/druid/metadata/SQLMetadataSegmentManager.java @@ -35,6 +35,7 @@ import com.google.common.util.concurrent.MoreExecutors; import com.google.inject.Inject; import com.metamx.emitter.EmittingLogger; import io.druid.client.DruidDataSource; +import io.druid.client.ImmutableDruidDataSource; import io.druid.concurrent.LifecycleLock; import io.druid.guice.ManageLifecycle; import io.druid.java.util.common.DateTimes; @@ -63,6 +64,7 @@ import org.skife.jdbi.v2.tweak.HandleCallback; import org.skife.jdbi.v2.tweak.ResultSetMapper; import org.skife.jdbi.v2.util.ByteArrayMapper; +import javax.annotation.Nullable; import java.io.IOException; import java.sql.ResultSet; import java.sql.SQLException; @@ -74,6 +76,7 @@ import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; /** */ @@ -369,15 +372,21 @@ public class SQLMetadataSegmentManager implements MetadataSegmentManager } @Override - public DruidDataSource getInventoryValue(String key) + @Nullable + public ImmutableDruidDataSource getInventoryValue(String key) { - return dataSourcesRef.get().get(key); + final DruidDataSource dataSource = dataSourcesRef.get().get(key); + return dataSource == null ? null : dataSource.toImmutableDruidDataSource(); } @Override - public Collection getInventory() + public Collection getInventory() { - return dataSourcesRef.get().values(); + return dataSourcesRef.get() + .values() + .stream() + .map(DruidDataSource::toImmutableDruidDataSource) + .collect(Collectors.toList()); } @Override @@ -495,7 +504,7 @@ public class SQLMetadataSegmentManager implements MetadataSegmentManager } if (!dataSource.getSegments().contains(segment)) { - dataSource.addSegment(segment.getIdentifier(), segment); + dataSource.addSegment(segment); } } diff --git a/server/src/main/java/io/druid/server/coordination/DruidServerMetadata.java b/server/src/main/java/io/druid/server/coordination/DruidServerMetadata.java index aa87780fc70..58139f65323 100644 --- a/server/src/main/java/io/druid/server/coordination/DruidServerMetadata.java +++ b/server/src/main/java/io/druid/server/coordination/DruidServerMetadata.java @@ -21,6 +21,9 @@ package io.druid.server.coordination; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.common.base.Preconditions; + +import java.util.Objects; /** */ @@ -45,7 +48,7 @@ public class DruidServerMetadata @JsonProperty("priority") int priority ) { - this.name = name; + this.name = Preconditions.checkNotNull(name); this.hostAndPort = hostAndPort; this.hostAndTlsPort = hostAndTlsPort; this.maxSize = maxSize; @@ -118,38 +121,31 @@ public class DruidServerMetadata DruidServerMetadata that = (DruidServerMetadata) o; + if (!name.equals(that.name)) { + return false; + } + if (!Objects.equals(hostAndPort, that.hostAndPort)) { + return false; + } + if (!Objects.equals(hostAndTlsPort, that.hostAndTlsPort)) { + return false; + } if (maxSize != that.maxSize) { return false; } - if (priority != that.priority) { + if (!Objects.equals(tier, that.tier)) { return false; } - if (name != null ? !name.equals(that.name) : that.name != null) { + if (type != that.type) { return false; } - if (hostAndPort != null ? !hostAndPort.equals(that.hostAndPort) : that.hostAndPort != null) { - return false; - } - if (hostAndTlsPort != null ? !hostAndTlsPort.equals(that.hostAndTlsPort) : that.hostAndTlsPort != null) { - return false; - } - if (tier != null ? !tier.equals(that.tier) : that.tier != null) { - return false; - } - return type == that.type; + return priority == that.priority; } @Override public int hashCode() { - int result = name != null ? name.hashCode() : 0; - result = 31 * result + (hostAndPort != null ? hostAndPort.hashCode() : 0); - result = 31 * result + (hostAndTlsPort != null ? hostAndTlsPort.hashCode() : 0); - result = 31 * result + (int) (maxSize ^ (maxSize >>> 32)); - result = 31 * result + (tier != null ? tier.hashCode() : 0); - result = 31 * result + (type != null ? type.hashCode() : 0); - result = 31 * result + priority; - return result; + return Objects.hash(name, hostAndPort, hostAndTlsPort, maxSize, tier, type, priority); } @Override diff --git a/server/src/main/java/io/druid/server/coordinator/DruidCoordinator.java b/server/src/main/java/io/druid/server/coordinator/DruidCoordinator.java index 865d72bb0a0..7239aca5b22 100644 --- a/server/src/main/java/io/druid/server/coordinator/DruidCoordinator.java +++ b/server/src/main/java/io/druid/server/coordinator/DruidCoordinator.java @@ -23,7 +23,6 @@ import com.google.common.base.Function; import com.google.common.base.Predicate; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Ordering; @@ -41,7 +40,6 @@ import io.druid.client.ServerInventoryView; import io.druid.client.coordinator.Coordinator; import io.druid.client.indexing.IndexingServiceClient; import io.druid.common.config.JacksonConfigManager; -import io.druid.java.util.common.concurrent.Execs; import io.druid.curator.discovery.ServiceAnnouncer; import io.druid.discovery.DruidLeaderSelector; import io.druid.guice.ManageLifecycle; @@ -49,7 +47,9 @@ import io.druid.guice.annotations.CoordinatorIndexingServiceHelper; import io.druid.guice.annotations.Self; import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.IAE; +import io.druid.java.util.common.ISE; import io.druid.java.util.common.Pair; +import io.druid.java.util.common.concurrent.Execs; import io.druid.java.util.common.concurrent.ScheduledExecutorFactory; import io.druid.java.util.common.concurrent.ScheduledExecutors; import io.druid.java.util.common.guava.Comparators; @@ -88,6 +88,7 @@ import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ScheduledExecutorService; +import java.util.stream.Collectors; /** */ @@ -290,7 +291,7 @@ public class DruidCoordinator public Map getLoadStatus() { Map loadStatus = Maps.newHashMap(); - for (DruidDataSource dataSource : metadataSegmentManager.getInventory()) { + for (ImmutableDruidDataSource dataSource : metadataSegmentManager.getInventory()) { final Set segments = Sets.newHashSet(dataSource.getSegments()); final int availableSegmentSize = segments.size(); @@ -326,16 +327,6 @@ public class DruidCoordinator metadataSegmentManager.removeSegment(segment.getDataSource(), segment.getIdentifier()); } - public void removeDatasource(String ds) - { - metadataSegmentManager.removeDatasource(ds); - } - - public void enableDatasource(String ds) - { - metadataSegmentManager.enableDatasource(ds); - } - public String getCurrentLeader() { return coordLeaderSelector.getCurrentLeader(); @@ -353,6 +344,7 @@ public class DruidCoordinator if (callback != null) { callback.execute(); } + throw new ISE("Cannot move null DataSegment"); } String segmentName = segment.getIdentifier(); try { @@ -360,7 +352,7 @@ public class DruidCoordinator throw new IAE("Cannot move [%s] to and from the same server [%s]", segmentName, fromServer.getName()); } - DruidDataSource dataSource = metadataSegmentManager.getInventoryValue(segment.getDataSource()); + ImmutableDruidDataSource dataSource = metadataSegmentManager.getInventoryValue(segment.getDataSource()); if (dataSource == null) { throw new IAE("Unable to find dataSource for segment [%s] in metadata", segmentName); } @@ -460,21 +452,12 @@ public class DruidCoordinator return availableSegments; } - public Iterable getAvailableDataSegments() + private List getAvailableDataSegments() { - return Iterables.concat( - Iterables.transform( - metadataSegmentManager.getInventory(), - new Function>() - { - @Override - public Iterable apply(DruidDataSource input) - { - return input.getSegments(); - } - } - ) - ); + return metadataSegmentManager.getInventory() + .stream() + .flatMap(source -> source.getSegments().stream()) + .collect(Collectors.toList()); } @LifecycleStart diff --git a/server/src/main/java/io/druid/server/coordinator/DruidCoordinatorRuntimeParams.java b/server/src/main/java/io/druid/server/coordinator/DruidCoordinatorRuntimeParams.java index 183cb781e8a..e2b51771c17 100644 --- a/server/src/main/java/io/druid/server/coordinator/DruidCoordinatorRuntimeParams.java +++ b/server/src/main/java/io/druid/server/coordinator/DruidCoordinatorRuntimeParams.java @@ -22,7 +22,7 @@ package io.druid.server.coordinator; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.metamx.emitter.service.ServiceEmitter; -import io.druid.client.DruidDataSource; +import io.druid.client.ImmutableDruidDataSource; import io.druid.java.util.common.DateTimes; import io.druid.metadata.MetadataRuleManager; import io.druid.timeline.DataSegment; @@ -30,8 +30,10 @@ import org.joda.time.DateTime; import java.util.Collection; import java.util.Collections; +import java.util.HashSet; import java.util.Map; import java.util.Set; +import java.util.TreeSet; /** */ @@ -41,7 +43,7 @@ public class DruidCoordinatorRuntimeParams private final DruidCluster druidCluster; private final MetadataRuleManager databaseRuleManager; private final SegmentReplicantLookup segmentReplicantLookup; - private final Set dataSources; + private final Set dataSources; private final Set availableSegments; private final Map loadManagementPeons; private final ReplicationThrottler replicationManager; @@ -56,7 +58,7 @@ public class DruidCoordinatorRuntimeParams DruidCluster druidCluster, MetadataRuleManager databaseRuleManager, SegmentReplicantLookup segmentReplicantLookup, - Set dataSources, + Set dataSources, Set availableSegments, Map loadManagementPeons, ReplicationThrottler replicationManager, @@ -102,7 +104,7 @@ public class DruidCoordinatorRuntimeParams return segmentReplicantLookup; } - public Set getDataSources() + public Set getDataSources() { return dataSources; } @@ -201,7 +203,7 @@ public class DruidCoordinatorRuntimeParams private DruidCluster druidCluster; private MetadataRuleManager databaseRuleManager; private SegmentReplicantLookup segmentReplicantLookup; - private final Set dataSources; + private final Set dataSources; private final Set availableSegments; private final Map loadManagementPeons; private ReplicationThrottler replicationManager; @@ -217,8 +219,8 @@ public class DruidCoordinatorRuntimeParams this.druidCluster = null; this.databaseRuleManager = null; this.segmentReplicantLookup = null; - this.dataSources = Sets.newHashSet(); - this.availableSegments = Sets.newTreeSet(DruidCoordinator.SEGMENT_COMPARATOR); + this.dataSources = new HashSet<>(); + this.availableSegments = new TreeSet<>(DruidCoordinator.SEGMENT_COMPARATOR); this.loadManagementPeons = Maps.newHashMap(); this.replicationManager = null; this.emitter = null; @@ -232,7 +234,7 @@ public class DruidCoordinatorRuntimeParams DruidCluster cluster, MetadataRuleManager databaseRuleManager, SegmentReplicantLookup segmentReplicantLookup, - Set dataSources, + Set dataSources, Set availableSegments, Map loadManagementPeons, ReplicationThrottler replicationManager, @@ -301,7 +303,7 @@ public class DruidCoordinatorRuntimeParams return this; } - public Builder withDatasources(Collection dataSourcesCollection) + public Builder withDatasources(Collection dataSourcesCollection) { dataSources.addAll(Collections.unmodifiableCollection(dataSourcesCollection)); return this; diff --git a/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorLogger.java b/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorLogger.java index d295d47e493..ced5efb1dc7 100644 --- a/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorLogger.java +++ b/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorLogger.java @@ -21,7 +21,6 @@ package io.druid.server.coordinator.helper; import com.metamx.emitter.service.ServiceEmitter; import com.metamx.emitter.service.ServiceMetricEvent; -import io.druid.client.DruidDataSource; import io.druid.client.ImmutableDruidServer; import io.druid.java.util.common.logger.Logger; import io.druid.query.DruidMetrics; @@ -259,7 +258,7 @@ public class DruidCoordinatorLogger implements DruidCoordinatorHelper final Stream allSegments = params .getDataSources() .stream() - .flatMap((final DruidDataSource dataSource) -> dataSource.getSegments().stream()); + .flatMap(dataSource -> dataSource.getSegments().stream()); allSegments .collect(Collectors.groupingBy(DataSegment::getDataSource)) diff --git a/server/src/main/java/io/druid/server/http/DatasourcesResource.java b/server/src/main/java/io/druid/server/http/DatasourcesResource.java index 8ce337485ba..c53e37158fb 100644 --- a/server/src/main/java/io/druid/server/http/DatasourcesResource.java +++ b/server/src/main/java/io/druid/server/http/DatasourcesResource.java @@ -29,6 +29,7 @@ import com.sun.jersey.spi.container.ResourceFilters; import io.druid.client.CoordinatorServerView; import io.druid.client.DruidDataSource; import io.druid.client.DruidServer; +import io.druid.client.ImmutableDruidDataSource; import io.druid.client.ImmutableSegmentLoadInfo; import io.druid.client.SegmentLoadInfo; import io.druid.client.indexing.IndexingServiceClient; @@ -68,7 +69,9 @@ import java.util.Comparator; import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; +import java.util.stream.Collectors; /** */ @@ -108,33 +111,27 @@ public class DatasourcesResource ) { Response.ResponseBuilder builder = Response.ok(); - final Set datasources = InventoryViewUtils.getSecuredDataSources( + final Set datasources = InventoryViewUtils.getSecuredDataSources( req, serverInventoryView, authorizerMapper ); + final Object entity; + if (full != null) { - return builder.entity(datasources).build(); + entity = datasources; } else if (simple != null) { - return builder.entity( - Lists.newArrayList( - Iterables.transform( - datasources, - (DruidDataSource dataSource) -> makeSimpleDatasource(dataSource) - ) - ) - ).build(); + entity = datasources.stream() + .map(this::makeSimpleDatasource) + .collect(Collectors.toList()); + } else { + entity = datasources.stream() + .map(ImmutableDruidDataSource::getName) + .collect(Collectors.toList()); } - return builder.entity( - Lists.newArrayList( - Iterables.transform( - datasources, - (DruidDataSource dataSource) -> dataSource.getName() - ) - ) - ).build(); + return builder.entity(entity).build(); } @GET @@ -146,7 +143,7 @@ public class DatasourcesResource @QueryParam("full") final String full ) { - DruidDataSource dataSource = getDataSource(dataSourceName); + ImmutableDruidDataSource dataSource = getDataSource(dataSourceName); if (dataSource == null) { return Response.noContent().build(); @@ -270,7 +267,7 @@ public class DatasourcesResource @QueryParam("full") String full ) { - final DruidDataSource dataSource = getDataSource(dataSourceName); + final ImmutableDruidDataSource dataSource = getDataSource(dataSourceName); if (dataSource == null) { return Response.noContent().build(); @@ -335,7 +332,7 @@ public class DatasourcesResource @QueryParam("full") String full ) { - final DruidDataSource dataSource = getDataSource(dataSourceName); + final ImmutableDruidDataSource dataSource = getDataSource(dataSourceName); final Interval theInterval = Intervals.of(interval.replace("_", "/")); if (dataSource == null) { @@ -404,7 +401,7 @@ public class DatasourcesResource @QueryParam("full") String full ) { - DruidDataSource dataSource = getDataSource(dataSourceName); + ImmutableDruidDataSource dataSource = getDataSource(dataSourceName); if (dataSource == null) { return Response.noContent().build(); } @@ -431,7 +428,7 @@ public class DatasourcesResource @PathParam("segmentId") String segmentId ) { - DruidDataSource dataSource = getDataSource(dataSourceName); + ImmutableDruidDataSource dataSource = getDataSource(dataSourceName); if (dataSource == null) { return Response.noContent().build(); } @@ -496,40 +493,34 @@ public class DatasourcesResource return Response.ok(retVal).build(); } - private DruidDataSource getDataSource(final String dataSourceName) + @Nullable + private ImmutableDruidDataSource getDataSource(final String dataSourceName) { - Iterable dataSources = - Iterables.concat( - Iterables.transform( - serverInventoryView.getInventory(), - (DruidServer input) -> input.getDataSource(dataSourceName) - ) - ); + List dataSources = serverInventoryView + .getInventory() + .stream() + .map(server -> server.getDataSource(dataSourceName)) + .filter(Objects::nonNull) + .map(DruidDataSource::toImmutableDruidDataSource) + .collect(Collectors.toList()); - List validDataSources = Lists.newArrayList(); - for (DruidDataSource dataSource : dataSources) { - if (dataSource != null) { - validDataSources.add(dataSource); - } - } - if (validDataSources.isEmpty()) { + if (dataSources.isEmpty()) { return null; } Map segmentMap = Maps.newHashMap(); - for (DruidDataSource dataSource : validDataSources) { - if (dataSource != null) { - Iterable segments = dataSource.getSegments(); - for (DataSegment segment : segments) { - segmentMap.put(segment.getIdentifier(), segment); - } + for (ImmutableDruidDataSource dataSource : dataSources) { + Iterable segments = dataSource.getSegments(); + for (DataSegment segment : segments) { + segmentMap.put(segment.getIdentifier(), segment); } } - return new DruidDataSource( + return new ImmutableDruidDataSource( dataSourceName, - ImmutableMap.of() - ).addSegments(segmentMap); + ImmutableMap.of(), + ImmutableMap.copyOf(segmentMap) + ); } private Pair> getSegment(String segmentId) @@ -551,7 +542,7 @@ public class DatasourcesResource return new Pair<>(theSegment, servers); } - private Map makeSimpleDatasource(DruidDataSource input) + private Map makeSimpleDatasource(ImmutableDruidDataSource input) { return new ImmutableMap.Builder() .put("name", input.getName()) diff --git a/server/src/main/java/io/druid/server/http/IntervalsResource.java b/server/src/main/java/io/druid/server/http/IntervalsResource.java index 2181e9cb6f7..d7237f40fac 100644 --- a/server/src/main/java/io/druid/server/http/IntervalsResource.java +++ b/server/src/main/java/io/druid/server/http/IntervalsResource.java @@ -21,7 +21,7 @@ package io.druid.server.http; import com.google.common.collect.Maps; import com.google.inject.Inject; -import io.druid.client.DruidDataSource; +import io.druid.client.ImmutableDruidDataSource; import io.druid.client.InventoryView; import io.druid.java.util.common.Intervals; import io.druid.java.util.common.MapUtils; @@ -70,14 +70,14 @@ public class IntervalsResource public Response getIntervals(@Context final HttpServletRequest req) { final Comparator comparator = Comparators.inverse(Comparators.intervalsByStartThenEnd()); - final Set datasources = InventoryViewUtils.getSecuredDataSources( + final Set datasources = InventoryViewUtils.getSecuredDataSources( req, serverInventoryView, authorizerMapper ); final Map>> retVal = Maps.newTreeMap(comparator); - for (DruidDataSource dataSource : datasources) { + for (ImmutableDruidDataSource dataSource : datasources) { for (DataSegment dataSegment : dataSource.getSegments()) { Map> interval = retVal.get(dataSegment.getInterval()); if (interval == null) { @@ -102,7 +102,7 @@ public class IntervalsResource ) { final Interval theInterval = Intervals.of(interval.replace("_", "/")); - final Set datasources = InventoryViewUtils.getSecuredDataSources( + final Set datasources = InventoryViewUtils.getSecuredDataSources( req, serverInventoryView, authorizerMapper @@ -112,7 +112,7 @@ public class IntervalsResource if (full != null) { final Map>> retVal = Maps.newTreeMap(comparator); - for (DruidDataSource dataSource : datasources) { + for (ImmutableDruidDataSource dataSource : datasources) { for (DataSegment dataSegment : dataSource.getSegments()) { if (theInterval.contains(dataSegment.getInterval())) { Map> dataSourceInterval = retVal.get(dataSegment.getInterval()); @@ -130,7 +130,7 @@ public class IntervalsResource if (simple != null) { final Map> retVal = Maps.newHashMap(); - for (DruidDataSource dataSource : datasources) { + for (ImmutableDruidDataSource dataSource : datasources) { for (DataSegment dataSegment : dataSource.getSegments()) { if (theInterval.contains(dataSegment.getInterval())) { Map properties = retVal.get(dataSegment.getInterval()); @@ -152,7 +152,7 @@ public class IntervalsResource } final Map retVal = Maps.newHashMap(); - for (DruidDataSource dataSource : datasources) { + for (ImmutableDruidDataSource dataSource : datasources) { for (DataSegment dataSegment : dataSource.getSegments()) { if (theInterval.contains(dataSegment.getInterval())) { retVal.put("size", MapUtils.getLong(retVal, "size", 0L) + dataSegment.getSize()); @@ -166,7 +166,7 @@ public class IntervalsResource private void setProperties( final Map>> retVal, - DruidDataSource dataSource, DataSegment dataSegment + ImmutableDruidDataSource dataSource, DataSegment dataSegment ) { Map properties = retVal.get(dataSegment.getInterval()).get(dataSource.getName()); diff --git a/server/src/main/java/io/druid/server/http/InventoryViewUtils.java b/server/src/main/java/io/druid/server/http/InventoryViewUtils.java index dc7cb5e9736..c60f12d51db 100644 --- a/server/src/main/java/io/druid/server/http/InventoryViewUtils.java +++ b/server/src/main/java/io/druid/server/http/InventoryViewUtils.java @@ -19,60 +19,41 @@ package io.druid.server.http; -import com.google.common.base.Function; -import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import com.google.common.collect.Sets; - import io.druid.client.DruidDataSource; -import io.druid.client.DruidServer; +import io.druid.client.ImmutableDruidDataSource; import io.druid.client.InventoryView; import io.druid.java.util.common.ISE; import io.druid.server.security.AuthorizationUtils; import io.druid.server.security.AuthorizerMapper; import javax.servlet.http.HttpServletRequest; +import java.util.Collections; import java.util.Comparator; -import java.util.Set; +import java.util.SortedSet; import java.util.TreeSet; -public class InventoryViewUtils +public interface InventoryViewUtils { - - public static Set getDataSources(InventoryView serverInventoryView) + static Comparator comparingByName() { - TreeSet dataSources = Sets.newTreeSet( - new Comparator() - { - @Override - public int compare(DruidDataSource druidDataSource, DruidDataSource druidDataSource1) - { - return druidDataSource.getName().compareTo(druidDataSource1.getName()); - } - } - ); - dataSources.addAll( - Lists.newArrayList( - Iterables.concat( - Iterables.transform( - serverInventoryView.getInventory(), - new Function>() - { - @Override - public Iterable apply(DruidServer input) - { - return input.getDataSources(); - } - } - ) - ) - ) - ); - return dataSources; + return Comparator.comparing(ImmutableDruidDataSource::getName); } - public static Set getSecuredDataSources( + static SortedSet getDataSources(InventoryView serverInventoryView) + { + return serverInventoryView.getInventory() + .stream() + .flatMap(server -> server.getDataSources().stream()) + .map(DruidDataSource::toImmutableDruidDataSource) + .collect( + () -> new TreeSet<>(comparingByName()), + TreeSet::add, + TreeSet::addAll + ); + } + + static SortedSet getSecuredDataSources( HttpServletRequest request, InventoryView inventoryView, final AuthorizerMapper authorizerMapper @@ -82,17 +63,16 @@ public class InventoryViewUtils throw new ISE("No authorization mapper found"); } - return ImmutableSet.copyOf( - AuthorizationUtils.filterAuthorizedResources( - request, - getDataSources(inventoryView), - datasource -> { - return Lists.newArrayList( - AuthorizationUtils.DATASOURCE_READ_RA_GENERATOR.apply(datasource.getName()) - ); - }, - authorizerMapper - ) + Iterable filteredResources = AuthorizationUtils.filterAuthorizedResources( + request, + getDataSources(inventoryView), + datasource -> Lists.newArrayList( + AuthorizationUtils.DATASOURCE_READ_RA_GENERATOR.apply(datasource.getName()) + ), + authorizerMapper ); + SortedSet set = new TreeSet<>(comparingByName()); + filteredResources.forEach(set::add); + return Collections.unmodifiableSortedSet(set); } } diff --git a/server/src/main/java/io/druid/server/http/MetadataResource.java b/server/src/main/java/io/druid/server/http/MetadataResource.java index 5d1e6c74bdb..46d42e5f283 100644 --- a/server/src/main/java/io/druid/server/http/MetadataResource.java +++ b/server/src/main/java/io/druid/server/http/MetadataResource.java @@ -20,20 +20,19 @@ package io.druid.server.http; import com.google.common.base.Function; -import com.google.common.base.Predicate; import com.google.common.collect.Collections2; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.google.inject.Inject; import com.sun.jersey.spi.container.ResourceFilters; -import io.druid.client.DruidDataSource; +import io.druid.client.ImmutableDruidDataSource; import io.druid.indexing.overlord.IndexerMetadataStorageCoordinator; import io.druid.metadata.MetadataSegmentManager; import io.druid.server.http.security.DatasourceResourceFilter; import io.druid.server.security.AuthConfig; -import io.druid.server.security.AuthorizerMapper; import io.druid.server.security.AuthorizationUtils; +import io.druid.server.security.AuthorizerMapper; import io.druid.server.security.ResourceAction; import io.druid.timeline.DataSegment; import org.joda.time.Interval; @@ -49,6 +48,7 @@ import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import java.io.IOException; +import java.util.Collection; import java.util.List; import java.util.Set; @@ -85,22 +85,13 @@ public class MetadataResource @Context final HttpServletRequest req ) { + final Collection druidDataSources = metadataSegmentManager.getInventory(); final Set dataSourceNamesPreAuth; if (includeDisabled != null) { dataSourceNamesPreAuth = Sets.newTreeSet(metadataSegmentManager.getAllDatasourceNames()); } else { dataSourceNamesPreAuth = Sets.newTreeSet( - Iterables.transform( - metadataSegmentManager.getInventory(), - new Function() - { - @Override - public String apply(DruidDataSource input) - { - return input.getName(); - } - } - ) + Iterables.transform(druidDataSources, ImmutableDruidDataSource::getName) ); } @@ -123,17 +114,7 @@ public class MetadataResource // Always use dataSourceNamesPostAuth to determine the set of returned dataSources if (full != null && includeDisabled == null) { return Response.ok().entity( - Collections2.filter( - metadataSegmentManager.getInventory(), - new Predicate() - { - @Override - public boolean apply(DruidDataSource input) - { - return dataSourceNamesPostAuth.contains(input.getName()); - } - } - ) + Collections2.filter(druidDataSources, dataSource -> dataSourceNamesPostAuth.contains(dataSource.getName())) ).build(); } else { return Response.ok().entity(dataSourceNamesPostAuth).build(); @@ -148,7 +129,7 @@ public class MetadataResource @PathParam("dataSourceName") final String dataSourceName ) { - DruidDataSource dataSource = metadataSegmentManager.getInventoryValue(dataSourceName); + ImmutableDruidDataSource dataSource = metadataSegmentManager.getInventoryValue(dataSourceName); if (dataSource == null) { return Response.status(Response.Status.NOT_FOUND).build(); } @@ -165,7 +146,7 @@ public class MetadataResource @QueryParam("full") String full ) { - DruidDataSource dataSource = metadataSegmentManager.getInventoryValue(dataSourceName); + ImmutableDruidDataSource dataSource = metadataSegmentManager.getInventoryValue(dataSourceName); if (dataSource == null) { return Response.status(Response.Status.NOT_FOUND).build(); } @@ -237,7 +218,7 @@ public class MetadataResource @PathParam("segmentId") String segmentId ) { - DruidDataSource dataSource = metadataSegmentManager.getInventoryValue(dataSourceName); + ImmutableDruidDataSource dataSource = metadataSegmentManager.getInventoryValue(dataSourceName); if (dataSource == null) { return Response.status(Response.Status.NOT_FOUND).build(); } diff --git a/server/src/test/java/io/druid/metadata/MetadataSegmentManagerTest.java b/server/src/test/java/io/druid/metadata/MetadataSegmentManagerTest.java index 29dd39fe225..d4f4439d2af 100644 --- a/server/src/test/java/io/druid/metadata/MetadataSegmentManagerTest.java +++ b/server/src/test/java/io/druid/metadata/MetadataSegmentManagerTest.java @@ -124,7 +124,7 @@ public class MetadataSegmentManagerTest ); Assert.assertEquals( ImmutableSet.of(segment1, segment2), - manager.getInventoryValue("wikipedia").getSegments() + ImmutableSet.copyOf(manager.getInventoryValue("wikipedia").getSegments()) ); } diff --git a/server/src/test/java/io/druid/server/ClientInfoResourceTest.java b/server/src/test/java/io/druid/server/ClientInfoResourceTest.java index 7a3156d45f4..1dd47859314 100644 --- a/server/src/test/java/io/druid/server/ClientInfoResourceTest.java +++ b/server/src/test/java/io/druid/server/ClientInfoResourceTest.java @@ -380,7 +380,7 @@ public class ClientInfoResourceTest .metrics(metrics) .size(1) .build(); - server.addDataSegment(segment.getIdentifier(), segment); + server.addDataSegment(segment); ServerSelector ss = new ServerSelector(segment, new HighestPriorityTierSelectorStrategy(new RandomServerSelectorStrategy())); timeline.add(Intervals.of(interval), version, new SingleElementPartitionChunk(ss)); } @@ -404,7 +404,7 @@ public class ClientInfoResourceTest .shardSpec(shardSpec) .size(1) .build(); - server.addDataSegment(segment.getIdentifier(), segment); + server.addDataSegment(segment); ServerSelector ss = new ServerSelector(segment, new HighestPriorityTierSelectorStrategy(new RandomServerSelectorStrategy())); timeline.add(Intervals.of(interval), version, shardSpec.createChunk(ss)); } diff --git a/server/src/test/java/io/druid/server/coordinator/DruidClusterTest.java b/server/src/test/java/io/druid/server/coordinator/DruidClusterTest.java index d705a70ce3e..90eb5d49e30 100644 --- a/server/src/test/java/io/druid/server/coordinator/DruidClusterTest.java +++ b/server/src/test/java/io/druid/server/coordinator/DruidClusterTest.java @@ -75,15 +75,13 @@ public class DruidClusterTest new ImmutableDruidDataSource( "src1", ImmutableMap.of(), - ImmutableMap.of(), - ImmutableSet.of() + ImmutableMap.of() ), "src2", new ImmutableDruidDataSource( "src2", ImmutableMap.of(), - ImmutableMap.of(), - ImmutableSet.of() + ImmutableMap.of() ) ); diff --git a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorRuleRunnerTest.java b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorRuleRunnerTest.java index 3ef19d17483..7f251541509 100644 --- a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorRuleRunnerTest.java +++ b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorRuleRunnerTest.java @@ -363,7 +363,7 @@ public class DruidCoordinatorRuleRunnerTest 0 ); for (DataSegment availableSegment : availableSegments) { - normServer.addDataSegment(availableSegment.getIdentifier(), availableSegment); + normServer.addDataSegment(availableSegment); } DruidCluster druidCluster = new DruidCluster( @@ -583,7 +583,7 @@ public class DruidCoordinatorRuleRunnerTest 0 ); for (DataSegment segment : availableSegments) { - server.addDataSegment(segment.getIdentifier(), segment); + server.addDataSegment(segment); } DruidCluster druidCluster = new DruidCluster( @@ -653,7 +653,7 @@ public class DruidCoordinatorRuleRunnerTest "normal", 0 ); - server1.addDataSegment(availableSegments.get(0).getIdentifier(), availableSegments.get(0)); + server1.addDataSegment(availableSegments.get(0)); DruidServer server2 = new DruidServer( "serverNorm2", @@ -665,7 +665,7 @@ public class DruidCoordinatorRuleRunnerTest 0 ); for (DataSegment segment : availableSegments) { - server2.addDataSegment(segment.getIdentifier(), segment); + server2.addDataSegment(segment); } DruidCluster druidCluster = new DruidCluster( @@ -742,7 +742,7 @@ public class DruidCoordinatorRuleRunnerTest "hot", 0 ); - server1.addDataSegment(availableSegments.get(0).getIdentifier(), availableSegments.get(0)); + server1.addDataSegment(availableSegments.get(0)); DruidServer server2 = new DruidServer( "serverNorm2", "hostNorm2", @@ -753,7 +753,7 @@ public class DruidCoordinatorRuleRunnerTest 0 ); for (DataSegment segment : availableSegments) { - server2.addDataSegment(segment.getIdentifier(), segment); + server2.addDataSegment(segment); } DruidCluster druidCluster = new DruidCluster( @@ -841,7 +841,7 @@ public class DruidCoordinatorRuleRunnerTest 0 ); for (DataSegment segment : availableSegments) { - server2.addDataSegment(segment.getIdentifier(), segment); + server2.addDataSegment(segment); } DruidCluster druidCluster = new DruidCluster( null, @@ -913,7 +913,7 @@ public class DruidCoordinatorRuleRunnerTest "normal", 0 ); - server1.addDataSegment(availableSegments.get(0).getIdentifier(), availableSegments.get(0)); + server1.addDataSegment(availableSegments.get(0)); DruidServer server2 = new DruidServer( "serverNorm2", "hostNorm2", @@ -923,7 +923,7 @@ public class DruidCoordinatorRuleRunnerTest "normal", 0 ); - server2.addDataSegment(availableSegments.get(1).getIdentifier(), availableSegments.get(1)); + server2.addDataSegment(availableSegments.get(1)); DruidServer server3 = new DruidServer( "serverNorm3", "hostNorm3", @@ -933,8 +933,8 @@ public class DruidCoordinatorRuleRunnerTest "normal", 0 ); - server3.addDataSegment(availableSegments.get(1).getIdentifier(), availableSegments.get(1)); - server3.addDataSegment(availableSegments.get(2).getIdentifier(), availableSegments.get(2)); + server3.addDataSegment(availableSegments.get(1)); + server3.addDataSegment(availableSegments.get(2)); mockPeon.dropSegment(EasyMock.anyObject(), EasyMock.anyObject()); EasyMock.expectLastCall().atLeastOnce(); @@ -1248,7 +1248,7 @@ public class DruidCoordinatorRuleRunnerTest 0 ); for (DataSegment availableSegment : longerAvailableSegments) { - server1.addDataSegment(availableSegment.getIdentifier(), availableSegment); + server1.addDataSegment(availableSegment); } DruidServer server2 = new DruidServer( "serverNorm2", @@ -1260,7 +1260,7 @@ public class DruidCoordinatorRuleRunnerTest 0 ); for (DataSegment availableSegment : longerAvailableSegments) { - server2.addDataSegment(availableSegment.getIdentifier(), availableSegment); + server2.addDataSegment(availableSegment); } DruidCluster druidCluster = new DruidCluster( diff --git a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorTest.java b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorTest.java index e6be7efb447..d2f871c4e77 100644 --- a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorTest.java +++ b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorTest.java @@ -30,12 +30,12 @@ import io.druid.client.ImmutableDruidDataSource; import io.druid.client.ImmutableDruidServer; import io.druid.client.SingleServerInventoryView; import io.druid.common.config.JacksonConfigManager; -import io.druid.java.util.common.concurrent.Execs; import io.druid.curator.CuratorTestBase; import io.druid.curator.discovery.NoopServiceAnnouncer; import io.druid.discovery.DruidLeaderSelector; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.Intervals; +import io.druid.java.util.common.concurrent.Execs; import io.druid.java.util.common.concurrent.ScheduledExecutorFactory; import io.druid.metadata.MetadataRuleManager; import io.druid.metadata.MetadataSegmentManager; @@ -237,7 +237,7 @@ public class DruidCoordinatorTest extends CuratorTestBase EasyMock.expect(loadQueuePeon.getSegmentsToDrop()).andReturn(new HashSet<>()).once(); EasyMock.replay(loadQueuePeon); - DruidDataSource druidDataSource = EasyMock.createNiceMock(DruidDataSource.class); + ImmutableDruidDataSource druidDataSource = EasyMock.createNiceMock(ImmutableDruidDataSource.class); EasyMock.expect(druidDataSource.getSegment(EasyMock.anyString())).andReturn(segment); EasyMock.replay(druidDataSource); EasyMock.expect(databaseSegmentManager.getInventoryValue(EasyMock.anyString())).andReturn(druidDataSource); @@ -275,7 +275,8 @@ public class DruidCoordinatorTest extends CuratorTestBase coordinator.moveSegment( druidServer.toImmutableDruidServer(), druidServer2.toImmutableDruidServer(), - segment, null + segment, + null ); LoadPeonCallback loadCallback = loadCallbackCapture.getValue(); @@ -322,11 +323,11 @@ public class DruidCoordinatorTest extends CuratorTestBase 0x9, 0 ); - druidDataSources[0].addSegment("0", dataSegment); + druidDataSources[0].addSegment(dataSegment); EasyMock.expect(databaseSegmentManager.isStarted()).andReturn(true).anyTimes(); EasyMock.expect(databaseSegmentManager.getInventory()).andReturn( - ImmutableList.of(druidDataSources[0]) + ImmutableList.of(druidDataSources[0].toImmutableDruidDataSource()) ).atLeastOnce(); EasyMock.replay(databaseSegmentManager); ImmutableDruidDataSource immutableDruidDataSource = EasyMock.createNiceMock(ImmutableDruidDataSource.class); @@ -364,7 +365,7 @@ public class DruidCoordinatorTest extends CuratorTestBase if (assignSegmentLatch.getCount() > 0) { //Coordinator should try to assign segment to druidServer historical //Simulate historical loading segment - druidServer.addDataSegment(dataSegment.getIdentifier(), dataSegment); + druidServer.addDataSegment(dataSegment); assignSegmentLatch.countDown(); } else { Assert.fail("The same segment is assigned to the same server multiple times"); @@ -431,11 +432,11 @@ public class DruidCoordinatorTest extends CuratorTestBase getSegment("test", Intervals.of("2016-01-09T10:00:00Z/2016-01-09T12:00:00Z")) }; for (DataSegment segment : segments) { - dataSource.addSegment(segment.getIdentifier(), segment); + dataSource.addSegment(segment); } EasyMock.expect(databaseSegmentManager.getInventory()).andReturn( - ImmutableList.of(dataSource) + ImmutableList.of(dataSource.toImmutableDruidDataSource()) ).atLeastOnce(); EasyMock.replay(databaseSegmentManager); Set availableSegments = coordinator.getOrderedAvailableDataSegments(); diff --git a/server/src/test/java/io/druid/server/coordinator/ServerHolderTest.java b/server/src/test/java/io/druid/server/coordinator/ServerHolderTest.java index 30a97d0a9c6..c25f37bc49f 100644 --- a/server/src/test/java/io/druid/server/coordinator/ServerHolderTest.java +++ b/server/src/test/java/io/druid/server/coordinator/ServerHolderTest.java @@ -21,7 +21,6 @@ package io.druid.server.coordinator; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.ImmutableSet; import io.druid.client.ImmutableDruidDataSource; import io.druid.client.ImmutableDruidServer; import io.druid.java.util.common.Intervals; @@ -67,15 +66,13 @@ public class ServerHolderTest new ImmutableDruidDataSource( "src1", ImmutableMap.of(), - ImmutableMap.of(), - ImmutableSet.of() + ImmutableMap.of() ), "src2", new ImmutableDruidDataSource( "src2", ImmutableMap.of(), - ImmutableMap.of(), - ImmutableSet.of() + ImmutableMap.of() ) ); diff --git a/server/src/test/java/io/druid/server/coordinator/cost/CachingCostBalancerStrategyTest.java b/server/src/test/java/io/druid/server/coordinator/cost/CachingCostBalancerStrategyTest.java index 42cde312d06..7f5617eaae7 100644 --- a/server/src/test/java/io/druid/server/coordinator/cost/CachingCostBalancerStrategyTest.java +++ b/server/src/test/java/io/druid/server/coordinator/cost/CachingCostBalancerStrategyTest.java @@ -138,7 +138,7 @@ public class CachingCostBalancerStrategyTest { DruidServer druidServer = new DruidServer(name, host, null, maxSize, ServerType.HISTORICAL, "normal", 0); createDataSegments(numberOfSegments, random, referenceTime) - .forEach(segment -> druidServer.addDataSegment(segment.getIdentifier(), segment)); + .forEach(druidServer::addDataSegment); return new ServerHolder( druidServer.toImmutableDruidServer(), new LoadQueuePeonTester() diff --git a/server/src/test/java/io/druid/server/coordinator/rules/BroadcastDistributionRuleTest.java b/server/src/test/java/io/druid/server/coordinator/rules/BroadcastDistributionRuleTest.java index 4da23cdd801..daa5a4697ef 100644 --- a/server/src/test/java/io/druid/server/coordinator/rules/BroadcastDistributionRuleTest.java +++ b/server/src/test/java/io/druid/server/coordinator/rules/BroadcastDistributionRuleTest.java @@ -114,7 +114,7 @@ public class BroadcastDistributionRuleTest ServerType.HISTORICAL, "hot", 0 - ).addDataSegment(smallSegment.getIdentifier(), smallSegment) + ).addDataSegment(smallSegment) .toImmutableDruidServer(), new LoadQueuePeonTester() ); @@ -129,7 +129,7 @@ public class BroadcastDistributionRuleTest ServerType.HISTORICAL, "hot", 0 - ).addDataSegment(largeSegments.get(0).getIdentifier(), largeSegments.get(0)) + ).addDataSegment(largeSegments.get(0)) .toImmutableDruidServer(), new LoadQueuePeonTester() ) @@ -144,7 +144,7 @@ public class BroadcastDistributionRuleTest ServerType.HISTORICAL, DruidServer.DEFAULT_TIER, 0 - ).addDataSegment(largeSegments.get(1).getIdentifier(), largeSegments.get(1)) + ).addDataSegment(largeSegments.get(1)) .toImmutableDruidServer(), new LoadQueuePeonTester() ) @@ -159,7 +159,7 @@ public class BroadcastDistributionRuleTest ServerType.HISTORICAL, DruidServer.DEFAULT_TIER, 0 - ).addDataSegment(largeSegments.get(2).getIdentifier(), largeSegments.get(2)) + ).addDataSegment(largeSegments.get(2)) .toImmutableDruidServer(), new LoadQueuePeonTester() ) @@ -175,7 +175,7 @@ public class BroadcastDistributionRuleTest ServerType.HISTORICAL, "hot", 0 - ).addDataSegment(largeSegments2.get(0).getIdentifier(), largeSegments2.get(0)) + ).addDataSegment(largeSegments2.get(0)) .toImmutableDruidServer(), new LoadQueuePeonTester() ) @@ -190,7 +190,7 @@ public class BroadcastDistributionRuleTest ServerType.HISTORICAL, DruidServer.DEFAULT_TIER, 0 - ).addDataSegment(largeSegments2.get(1).getIdentifier(), largeSegments2.get(1)) + ).addDataSegment(largeSegments2.get(1)) .toImmutableDruidServer(), new LoadQueuePeonTester() ) diff --git a/server/src/test/java/io/druid/server/coordinator/rules/LoadRuleTest.java b/server/src/test/java/io/druid/server/coordinator/rules/LoadRuleTest.java index d57808a6d61..76174172d9f 100644 --- a/server/src/test/java/io/druid/server/coordinator/rules/LoadRuleTest.java +++ b/server/src/test/java/io/druid/server/coordinator/rules/LoadRuleTest.java @@ -303,7 +303,7 @@ public class LoadRuleTest "hot", 0 ); - server1.addDataSegment(segment.getIdentifier(), segment); + server1.addDataSegment(segment); DruidServer server2 = new DruidServer( "serverNorm", "hostNorm", @@ -313,7 +313,7 @@ public class LoadRuleTest DruidServer.DEFAULT_TIER, 0 ); - server2.addDataSegment(segment.getIdentifier(), segment); + server2.addDataSegment(segment); DruidServer server3 = new DruidServer( "serverNormNotServing", "hostNorm", @@ -457,8 +457,8 @@ public class LoadRuleTest "hot", 0 ); - server1.addDataSegment(segment.getIdentifier(), segment); - server2.addDataSegment(segment.getIdentifier(), segment); + server1.addDataSegment(segment); + server2.addDataSegment(segment); DruidCluster druidCluster = new DruidCluster( null, diff --git a/server/src/test/java/io/druid/server/http/DatasourcesResourceTest.java b/server/src/test/java/io/druid/server/http/DatasourcesResourceTest.java index 926e29312fc..315bfb0b487 100644 --- a/server/src/test/java/io/druid/server/http/DatasourcesResourceTest.java +++ b/server/src/test/java/io/druid/server/http/DatasourcesResourceTest.java @@ -24,6 +24,7 @@ import com.google.common.collect.ImmutableSet; import io.druid.client.CoordinatorServerView; import io.druid.client.DruidDataSource; import io.druid.client.DruidServer; +import io.druid.client.ImmutableDruidDataSource; import io.druid.client.indexing.IndexingServiceClient; import io.druid.java.util.common.Intervals; import io.druid.server.coordination.ServerType; @@ -46,11 +47,13 @@ import javax.servlet.http.HttpServletRequest; import javax.ws.rs.core.Response; import java.util.ArrayList; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import java.util.TreeSet; +import java.util.stream.Collectors; public class DatasourcesResourceTest { @@ -65,7 +68,7 @@ public class DatasourcesResourceTest { request = EasyMock.createStrictMock(HttpServletRequest.class); inventoryView = EasyMock.createStrictMock(CoordinatorServerView.class); - server = EasyMock.createStrictMock(DruidServer.class); + server = EasyMock.niceMock(DruidServer.class); dataSegmentList = new ArrayList<>(); dataSegmentList.add( new DataSegment( @@ -108,10 +111,10 @@ public class DatasourcesResourceTest ); listDataSources = new ArrayList<>(); listDataSources.add( - new DruidDataSource("datasource1", new HashMap()).addSegment("part1", dataSegmentList.get(0)) + new DruidDataSource("datasource1", new HashMap<>()).addSegment(dataSegmentList.get(0)) ); listDataSources.add( - new DruidDataSource("datasource2", new HashMap()).addSegment("part1", dataSegmentList.get(1)) + new DruidDataSource("datasource2", new HashMap<>()).addSegment(dataSegmentList.get(1)) ); } @@ -155,12 +158,13 @@ public class DatasourcesResourceTest AuthTestUtils.TEST_AUTHORIZER_MAPPER ); Response response = datasourcesResource.getQueryableDataSources("full", null, request); - Set result = (Set) response.getEntity(); - DruidDataSource[] resultantDruidDataSources = new DruidDataSource[result.size()]; - result.toArray(resultantDruidDataSources); + Set result = (Set) response.getEntity(); Assert.assertEquals(200, response.getStatus()); - Assert.assertEquals(2, resultantDruidDataSources.length); - Assert.assertArrayEquals(listDataSources.toArray(), resultantDruidDataSources); + Assert.assertEquals(2, result.size()); + Assert.assertEquals( + listDataSources.stream().map(DruidDataSource::toImmutableDruidDataSource).collect(Collectors.toSet()), + new HashSet<>(result) + ); response = datasourcesResource.getQueryableDataSources(null, null, request); List result1 = (List) response.getEntity(); @@ -236,13 +240,16 @@ public class DatasourcesResourceTest authMapper ); Response response = datasourcesResource.getQueryableDataSources("full", null, request); - Set result = (Set) response.getEntity(); - DruidDataSource[] resultantDruidDataSources = new DruidDataSource[result.size()]; - result.toArray(resultantDruidDataSources); + Set result = (Set) response.getEntity(); Assert.assertEquals(200, response.getStatus()); - Assert.assertEquals(1, resultantDruidDataSources.length); - Assert.assertArrayEquals(listDataSources.subList(0, 1).toArray(), resultantDruidDataSources); + Assert.assertEquals(1, result.size()); + Assert.assertEquals( + listDataSources.subList(0, 1).stream() + .map(DruidDataSource::toImmutableDruidDataSource) + .collect(Collectors.toSet()), + new HashSet<>(result) + ); response = datasourcesResource.getQueryableDataSources(null, null, request); List result1 = (List) response.getEntity(); @@ -267,7 +274,6 @@ public class DatasourcesResourceTest EasyMock.expect(server.getDataSource("datasource2")).andReturn( listDataSources.get(1) ).atLeastOnce(); - EasyMock.expect(server.getTier()).andReturn(null).atLeastOnce(); EasyMock.expect(inventoryView.getInventory()).andReturn( ImmutableList.of(server) ).atLeastOnce(); @@ -314,9 +320,9 @@ public class DatasourcesResourceTest EasyMock.replay(inventoryView, server); DatasourcesResource datasourcesResource = new DatasourcesResource(inventoryView, null, null, new AuthConfig(), null); Response response = datasourcesResource.getTheDataSource("datasource1", "full"); - DruidDataSource result = (DruidDataSource) response.getEntity(); + ImmutableDruidDataSource result = (ImmutableDruidDataSource) response.getEntity(); Assert.assertEquals(200, response.getStatus()); - Assert.assertEquals(dataSource1, result); + Assert.assertEquals(dataSource1.toImmutableDruidDataSource(), result); EasyMock.verify(inventoryView, server); } @@ -337,9 +343,8 @@ public class DatasourcesResourceTest @Test public void testSimpleGetTheDataSource() throws Exception { - DruidDataSource dataSource1 = new DruidDataSource("datasource1", new HashMap()); + DruidDataSource dataSource1 = new DruidDataSource("datasource1", new HashMap<>()); dataSource1.addSegment( - "partition", new DataSegment("datasegment1", Intervals.of("2010-01-01/P1D"), null, null, null, null, null, 0x9, 10) ); EasyMock.expect(server.getDataSource("datasource1")).andReturn( @@ -410,9 +415,9 @@ public class DatasourcesResourceTest public void testGetSegmentDataSourceIntervals() { server = new DruidServer("who", "host", null, 1234, ServerType.HISTORICAL, "tier1", 0); - server.addDataSegment(dataSegmentList.get(0).getIdentifier(), dataSegmentList.get(0)); - server.addDataSegment(dataSegmentList.get(1).getIdentifier(), dataSegmentList.get(1)); - server.addDataSegment(dataSegmentList.get(2).getIdentifier(), dataSegmentList.get(2)); + server.addDataSegment(dataSegmentList.get(0)); + server.addDataSegment(dataSegmentList.get(1)); + server.addDataSegment(dataSegmentList.get(2)); EasyMock.expect(inventoryView.getInventory()).andReturn( ImmutableList.of(server) ).atLeastOnce(); @@ -460,9 +465,9 @@ public class DatasourcesResourceTest public void testGetSegmentDataSourceSpecificInterval() { server = new DruidServer("who", "host", null, 1234, ServerType.HISTORICAL, "tier1", 0); - server.addDataSegment(dataSegmentList.get(0).getIdentifier(), dataSegmentList.get(0)); - server.addDataSegment(dataSegmentList.get(1).getIdentifier(), dataSegmentList.get(1)); - server.addDataSegment(dataSegmentList.get(2).getIdentifier(), dataSegmentList.get(2)); + server.addDataSegment(dataSegmentList.get(0)); + server.addDataSegment(dataSegmentList.get(1)); + server.addDataSegment(dataSegmentList.get(2)); EasyMock.expect(inventoryView.getInventory()).andReturn( ImmutableList.of(server) ).atLeastOnce(); diff --git a/server/src/test/java/io/druid/server/http/IntervalsResourceTest.java b/server/src/test/java/io/druid/server/http/IntervalsResourceTest.java index 1a66b5926a4..067bf869d1b 100644 --- a/server/src/test/java/io/druid/server/http/IntervalsResourceTest.java +++ b/server/src/test/java/io/druid/server/http/IntervalsResourceTest.java @@ -97,9 +97,9 @@ public class IntervalsResourceTest ) ); server = new DruidServer("who", "host", null, 1234, ServerType.HISTORICAL, "tier1", 0); - server.addDataSegment(dataSegmentList.get(0).getIdentifier(), dataSegmentList.get(0)); - server.addDataSegment(dataSegmentList.get(1).getIdentifier(), dataSegmentList.get(1)); - server.addDataSegment(dataSegmentList.get(2).getIdentifier(), dataSegmentList.get(2)); + server.addDataSegment(dataSegmentList.get(0)); + server.addDataSegment(dataSegmentList.get(1)); + server.addDataSegment(dataSegmentList.get(2)); } @Test diff --git a/server/src/test/java/io/druid/server/http/ServersResourceTest.java b/server/src/test/java/io/druid/server/http/ServersResourceTest.java index 3d3431e39f7..95dc09a1281 100644 --- a/server/src/test/java/io/druid/server/http/ServersResourceTest.java +++ b/server/src/test/java/io/druid/server/http/ServersResourceTest.java @@ -51,7 +51,7 @@ public class ServersResourceTest .version("v0") .size(1L) .build(); - dummyServer.addDataSegment(segment.getIdentifier(), segment); + dummyServer.addDataSegment(segment); CoordinatorServerView inventoryView = EasyMock.createMock(CoordinatorServerView.class); EasyMock.expect(inventoryView.getInventory()).andReturn(ImmutableList.of(dummyServer)).anyTimes(); diff --git a/services/src/main/java/io/druid/cli/CliRealtimeExample.java b/services/src/main/java/io/druid/cli/CliRealtimeExample.java index 29c08235de6..b4ebfc3eeee 100644 --- a/services/src/main/java/io/druid/cli/CliRealtimeExample.java +++ b/services/src/main/java/io/druid/cli/CliRealtimeExample.java @@ -44,6 +44,7 @@ import io.druid.timeline.DataSegment; import java.io.File; import java.io.IOException; import java.net.URI; +import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Properties; @@ -119,7 +120,7 @@ public class CliRealtimeExample extends ServerRunnable } @Override - public Iterable getInventory() + public Collection getInventory() { return ImmutableList.of(); }