diff --git a/server/src/main/java/io/druid/client/ImmutableDruidServer.java b/server/src/main/java/io/druid/client/ImmutableDruidServer.java index 6846d0b89a7..447c9f3edef 100644 --- a/server/src/main/java/io/druid/client/ImmutableDruidServer.java +++ b/server/src/main/java/io/druid/client/ImmutableDruidServer.java @@ -101,29 +101,4 @@ public class ImmutableDruidServer { return segments; } - - @Override - public boolean equals(Object o) - { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - ImmutableDruidServer that = (ImmutableDruidServer) o; - - if (metadata != null ? !metadata.equals(that.metadata) : that.metadata != null) { - return false; - } - - return true; - } - - @Override - public int hashCode() - { - return metadata != null ? metadata.hashCode() : 0; - } } diff --git a/server/src/main/java/io/druid/db/DatabaseRuleManager.java b/server/src/main/java/io/druid/db/DatabaseRuleManager.java index 682e6fc8d8c..b8cb792b57f 100644 --- a/server/src/main/java/io/druid/db/DatabaseRuleManager.java +++ b/server/src/main/java/io/druid/db/DatabaseRuleManager.java @@ -51,7 +51,6 @@ import java.sql.SQLException; import java.util.Arrays; import java.util.List; import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.atomic.AtomicReference; @@ -125,7 +124,7 @@ public class DatabaseRuleManager private final Supplier config; private final Supplier dbTables; private final IDBI dbi; - private final AtomicReference>> rules; + private final AtomicReference>> rules; private volatile ScheduledExecutorService exec; @@ -146,8 +145,8 @@ public class DatabaseRuleManager this.dbTables = dbTables; this.dbi = dbi; - this.rules = new AtomicReference>>( - new ConcurrentHashMap>() + this.rules = new AtomicReference<>( + ImmutableMap.>of() ); } @@ -188,7 +187,7 @@ public class DatabaseRuleManager return; } - rules.set(new ConcurrentHashMap>()); + rules.set(ImmutableMap.>of()); started = false; exec.shutdownNow(); @@ -199,7 +198,7 @@ public class DatabaseRuleManager public void poll() { try { - ConcurrentHashMap> newRules = new ConcurrentHashMap>( + ImmutableMap> newRules = ImmutableMap.copyOf( dbi.withHandle( new HandleCallback>>() { @@ -309,12 +308,6 @@ public class DatabaseRuleManager } } ); - - ConcurrentHashMap> existingRules = rules.get(); - if (existingRules == null) { - existingRules = new ConcurrentHashMap>(); - } - existingRules.put(dataSource, newRules); } catch (Exception e) { log.error(e, String.format("Exception while overriding rule for %s", dataSource)); diff --git a/server/src/main/java/io/druid/server/coordinator/DruidCoordinator.java b/server/src/main/java/io/druid/server/coordinator/DruidCoordinator.java index 8aa0f88e5db..f57bd87ebd5 100644 --- a/server/src/main/java/io/druid/server/coordinator/DruidCoordinator.java +++ b/server/src/main/java/io/druid/server/coordinator/DruidCoordinator.java @@ -350,6 +350,10 @@ public class DruidCoordinator ) { try { + if (fromServer.getMetadata().equals(toServer.getMetadata())) { + throw new IAE("Cannot move [%s] to and from the same server [%s]", segmentName, fromServer.getName()); + } + final DataSegment segment = fromServer.getSegment(segmentName); if (segment == null) { throw new IAE("Unable to find segment [%s] on server [%s]", segmentName, fromServer.getName()); diff --git a/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorRuleRunner.java b/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorRuleRunner.java index 15edf27d428..e0cb457418b 100644 --- a/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorRuleRunner.java +++ b/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorRuleRunner.java @@ -76,8 +76,8 @@ public class DruidCoordinatorRuleRunner implements DruidCoordinatorHelper } DruidCoordinatorRuntimeParams paramsWithReplicationManager = params.buildFromExisting() - .withReplicationManager(replicatorThrottler) - .build(); + .withReplicationManager(replicatorThrottler) + .build(); // Run through all matched rules for available segments DateTime now = new DateTime(); @@ -94,10 +94,8 @@ public class DruidCoordinatorRuleRunner implements DruidCoordinatorHelper } if (!foundMatchingRule) { - log.makeAlert( - "Unable to find a matching rule for dataSource[%s]", - segment.getDataSource() - ) + log.makeAlert("Unable to find a matching rule!") + .addData("dataSource", segment.getDataSource()) .addData("segment", segment.getIdentifier()) .emit(); }