Miscellaneous cleanup in the supervisor API flow. (#17144)

Extracting a few miscellaneous non-functional changes from the batch supervisor branch:

- Replace anonymous inner classes with lambda expressions in the SQL supervisor manager layer
- Add explicit @Nullable annotations in DynamicConfigProviderUtils to make IDE happy
- Small variable renames (copy-paste error perhaps) and fix typos
- Add table name for this exception message: Delete the supervisor from the table[%s] in the database...
- Prefer CollectionUtils.isEmptyOrNull() over list == null || list.size() > 0. We can change the Precondition checks to throwing DruidException separately for a batch of APIs at a time.
This commit is contained in:
Abhishek Radhakrishnan 2024-09-24 13:06:23 -07:00 committed by GitHub
parent d1bfabbf4d
commit 83299e9882
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 110 additions and 190 deletions

View File

@ -50,6 +50,7 @@ import org.apache.druid.server.security.ForbiddenException;
import org.apache.druid.server.security.Resource; import org.apache.druid.server.security.Resource;
import org.apache.druid.server.security.ResourceAction; import org.apache.druid.server.security.ResourceAction;
import org.apache.druid.server.security.ResourceType; import org.apache.druid.server.security.ResourceType;
import org.apache.druid.utils.CollectionUtils;
import javax.annotation.Nonnull; import javax.annotation.Nonnull;
import javax.annotation.Nullable; import javax.annotation.Nullable;
@ -123,7 +124,7 @@ public class SupervisorResource
return asLeaderWithSupervisorManager( return asLeaderWithSupervisorManager(
manager -> { manager -> {
Preconditions.checkArgument( Preconditions.checkArgument(
spec.getDataSources() != null && spec.getDataSources().size() > 0, !CollectionUtils.isNullOrEmpty(spec.getDataSources()),
"No dataSources found to perform authorization checks" "No dataSources found to perform authorization checks"
); );
final Set<ResourceAction> resourceActions; final Set<ResourceAction> resourceActions;
@ -412,7 +413,7 @@ public class SupervisorResource
public Response handoffTaskGroups(@PathParam("id") final String id, @Nonnull final HandoffTaskGroupsRequest handoffTaskGroupsRequest) public Response handoffTaskGroups(@PathParam("id") final String id, @Nonnull final HandoffTaskGroupsRequest handoffTaskGroupsRequest)
{ {
List<Integer> taskGroupIds = handoffTaskGroupsRequest.getTaskGroupIds(); List<Integer> taskGroupIds = handoffTaskGroupsRequest.getTaskGroupIds();
if (taskGroupIds == null || taskGroupIds.isEmpty()) { if (CollectionUtils.isNullOrEmpty(taskGroupIds)) {
return Response.status(Response.Status.BAD_REQUEST) return Response.status(Response.Status.BAD_REQUEST)
.entity(ImmutableMap.of("error", "List of task groups to handoff can't be empty")) .entity(ImmutableMap.of("error", "List of task groups to handoff can't be empty"))
.build(); .build();
@ -533,7 +534,7 @@ public class SupervisorResource
authorizerMapper authorizerMapper
) )
); );
if (authorizedHistoryForId.size() > 0) { if (!authorizedHistoryForId.isEmpty()) {
return Response.ok(authorizedHistoryForId).build(); return Response.ok(authorizedHistoryForId).build();
} }
} }

View File

@ -22,13 +22,14 @@ package org.apache.druid.utils;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.druid.metadata.DynamicConfigProvider; import org.apache.druid.metadata.DynamicConfigProvider;
import javax.annotation.Nullable;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
public class DynamicConfigProviderUtils public class DynamicConfigProviderUtils
{ {
public static Map<String, String> extraConfigAndSetStringMap(Map<String, Object> config, String dynamicConfigProviderKey, ObjectMapper mapper) public static Map<String, String> extraConfigAndSetStringMap(@Nullable Map<String, Object> config, String dynamicConfigProviderKey, ObjectMapper mapper)
{ {
HashMap<String, String> newConfig = new HashMap<>(); HashMap<String, String> newConfig = new HashMap<>();
if (config != null) { if (config != null) {
@ -43,7 +44,7 @@ public class DynamicConfigProviderUtils
return newConfig; return newConfig;
} }
public static Map<String, Object> extraConfigAndSetObjectMap(Map<String, Object> config, String dynamicConfigProviderKey, ObjectMapper mapper) public static Map<String, Object> extraConfigAndSetObjectMap(@Nullable Map<String, Object> config, String dynamicConfigProviderKey, ObjectMapper mapper)
{ {
HashMap<String, Object> newConfig = new HashMap<>(); HashMap<String, Object> newConfig = new HashMap<>();
if (config != null) { if (config != null) {
@ -58,7 +59,7 @@ public class DynamicConfigProviderUtils
return newConfig; return newConfig;
} }
private static Map<String, String> extraConfigFromProvider(Object dynamicConfigProviderJson, ObjectMapper mapper) private static Map<String, String> extraConfigFromProvider(@Nullable Object dynamicConfigProviderJson, ObjectMapper mapper)
{ {
if (dynamicConfigProviderJson != null) { if (dynamicConfigProviderJson != null) {
DynamicConfigProvider dynamicConfigProvider = mapper.convertValue(dynamicConfigProviderJson, DynamicConfigProvider.class); DynamicConfigProvider dynamicConfigProvider = mapper.convertValue(dynamicConfigProviderJson, DynamicConfigProvider.class);

View File

@ -20,7 +20,6 @@
package org.apache.druid.metadata; package org.apache.druid.metadata;
import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Supplier; import com.google.common.base.Supplier;
@ -38,9 +37,7 @@ import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.lifecycle.LifecycleStart; import org.apache.druid.java.util.common.lifecycle.LifecycleStart;
import org.apache.druid.java.util.common.logger.Logger; import org.apache.druid.java.util.common.logger.Logger;
import org.joda.time.DateTime; import org.joda.time.DateTime;
import org.skife.jdbi.v2.FoldController;
import org.skife.jdbi.v2.Folder3; import org.skife.jdbi.v2.Folder3;
import org.skife.jdbi.v2.Handle;
import org.skife.jdbi.v2.IDBI; import org.skife.jdbi.v2.IDBI;
import org.skife.jdbi.v2.PreparedBatch; import org.skife.jdbi.v2.PreparedBatch;
import org.skife.jdbi.v2.StatementContext; import org.skife.jdbi.v2.StatementContext;
@ -91,11 +88,7 @@ public class SQLMetadataSupervisorManager implements MetadataSupervisorManager
public void insert(final String id, final SupervisorSpec spec) public void insert(final String id, final SupervisorSpec spec)
{ {
dbi.withHandle( dbi.withHandle(
new HandleCallback<Void>() handle -> {
{
@Override
public Void withHandle(Handle handle) throws Exception
{
handle.createStatement( handle.createStatement(
StringUtils.format( StringUtils.format(
"INSERT INTO %s (spec_id, created_date, payload) VALUES (:spec_id, :created_date, :payload)", "INSERT INTO %s (spec_id, created_date, payload) VALUES (:spec_id, :created_date, :payload)",
@ -109,7 +102,6 @@ public class SQLMetadataSupervisorManager implements MetadataSupervisorManager
return null; return null;
} }
}
); );
} }
@ -118,41 +110,19 @@ public class SQLMetadataSupervisorManager implements MetadataSupervisorManager
{ {
return ImmutableMap.copyOf( return ImmutableMap.copyOf(
dbi.withHandle( dbi.withHandle(
new HandleCallback<Map<String, List<VersionedSupervisorSpec>>>() (HandleCallback<Map<String, List<VersionedSupervisorSpec>>>) handle -> handle.createQuery(
{
@Override
public Map<String, List<VersionedSupervisorSpec>> withHandle(Handle handle)
{
return handle.createQuery(
StringUtils.format( StringUtils.format(
"SELECT id, spec_id, created_date, payload FROM %1$s ORDER BY id DESC", "SELECT id, spec_id, created_date, payload FROM %1$s ORDER BY id DESC",
getSupervisorsTable() getSupervisorsTable()
) )
).map( ).map(
new ResultSetMapper<Pair<String, VersionedSupervisorSpec>>() (index, r, ctx) -> Pair.of(
{
@Override
public Pair<String, VersionedSupervisorSpec> map(int index, ResultSet r, StatementContext ctx)
throws SQLException
{
return Pair.of(
r.getString("spec_id"), r.getString("spec_id"),
createVersionSupervisorSpecFromResponse(r) createVersionSupervisorSpecFromResponse(r)
); )
}
}
).fold( ).fold(
new HashMap<>(), new HashMap<>(),
new Folder3<Map<String, List<VersionedSupervisorSpec>>, Pair<String, VersionedSupervisorSpec>>() (Folder3<Map<String, List<VersionedSupervisorSpec>>, Pair<String, VersionedSupervisorSpec>>) (retVal, pair, foldController, statementContext) -> {
{
@Override
public Map<String, List<VersionedSupervisorSpec>> fold(
Map<String, List<VersionedSupervisorSpec>> retVal,
Pair<String, VersionedSupervisorSpec> pair,
FoldController foldController,
StatementContext statementContext
)
{
try { try {
String specId = pair.lhs; String specId = pair.lhs;
retVal.computeIfAbsent(specId, sId -> new ArrayList<>()).add(pair.rhs); retVal.computeIfAbsent(specId, sId -> new ArrayList<>()).add(pair.rhs);
@ -162,10 +132,7 @@ public class SQLMetadataSupervisorManager implements MetadataSupervisorManager
throw new RuntimeException(e); throw new RuntimeException(e);
} }
} }
} )
);
}
}
) )
); );
} }
@ -175,30 +142,15 @@ public class SQLMetadataSupervisorManager implements MetadataSupervisorManager
{ {
return ImmutableList.copyOf( return ImmutableList.copyOf(
dbi.withHandle( dbi.withHandle(
new HandleCallback<List<VersionedSupervisorSpec>>() (HandleCallback<List<VersionedSupervisorSpec>>) handle -> handle.createQuery(
{
@Override
public List<VersionedSupervisorSpec> withHandle(Handle handle)
{
return handle.createQuery(
StringUtils.format( StringUtils.format(
"SELECT id, spec_id, created_date, payload FROM %1$s WHERE spec_id = :spec_id ORDER BY id DESC", "SELECT id, spec_id, created_date, payload FROM %1$s WHERE spec_id = :spec_id ORDER BY id DESC",
getSupervisorsTable() getSupervisorsTable()
) )
).bind("spec_id", id )
).map( .bind("spec_id", id)
new ResultSetMapper<VersionedSupervisorSpec>() .map((index, r, ctx) -> createVersionSupervisorSpecFromResponse(r))
{ .list()
@Override
public VersionedSupervisorSpec map(int index, ResultSet r, StatementContext ctx)
throws SQLException
{
return createVersionSupervisorSpecFromResponse(r);
}
}
).list();
}
}
) )
); );
} }
@ -207,12 +159,7 @@ public class SQLMetadataSupervisorManager implements MetadataSupervisorManager
{ {
SupervisorSpec payload; SupervisorSpec payload;
try { try {
payload = jsonMapper.readValue( payload = jsonMapper.readValue(r.getBytes("payload"), SupervisorSpec.class);
r.getBytes("payload"),
new TypeReference<SupervisorSpec>()
{
}
);
} }
catch (JsonParseException | JsonMappingException e) { catch (JsonParseException | JsonMappingException e) {
log.warn("Failed to deserialize payload for spec_id[%s]", r.getString("spec_id")); log.warn("Failed to deserialize payload for spec_id[%s]", r.getString("spec_id"));
@ -229,12 +176,7 @@ public class SQLMetadataSupervisorManager implements MetadataSupervisorManager
{ {
return ImmutableMap.copyOf( return ImmutableMap.copyOf(
dbi.withHandle( dbi.withHandle(
new HandleCallback<Map<String, SupervisorSpec>>() (HandleCallback<Map<String, SupervisorSpec>>) handle -> handle.createQuery(
{
@Override
public Map<String, SupervisorSpec> withHandle(Handle handle)
{
return handle.createQuery(
StringUtils.format( StringUtils.format(
"SELECT r.spec_id, r.payload " "SELECT r.spec_id, r.payload "
+ "FROM %1$s r " + "FROM %1$s r "
@ -253,18 +195,15 @@ public class SQLMetadataSupervisorManager implements MetadataSupervisorManager
try { try {
return Pair.of( return Pair.of(
r.getString("spec_id"), r.getString("spec_id"),
jsonMapper.readValue( jsonMapper.readValue(r.getBytes("payload"), SupervisorSpec.class)
r.getBytes("payload"), new TypeReference<SupervisorSpec>()
{
}
)
); );
} }
catch (IOException e) { catch (IOException e) {
String exceptionMessage = StringUtils.format( String exceptionMessage = StringUtils.format(
"Could not map json payload to a SupervisorSpec for spec_id: [%s]." "Could not map json payload to a SupervisorSpec for spec_id: [%s]."
+ " Delete the supervisor from the database and re-submit it to the overlord.", + " Delete the supervisor from the table[%s] in the database and re-submit it to the overlord.",
r.getString("spec_id") r.getString("spec_id"),
getSupervisorsTable()
); );
log.error(e, exceptionMessage); log.error(e, exceptionMessage);
return null; return null;
@ -273,16 +212,7 @@ public class SQLMetadataSupervisorManager implements MetadataSupervisorManager
} }
).fold( ).fold(
new HashMap<>(), new HashMap<>(),
new Folder3<Map<String, SupervisorSpec>, Pair<String, SupervisorSpec>>() (Folder3<Map<String, SupervisorSpec>, Pair<String, SupervisorSpec>>) (retVal, stringObjectMap, foldController, statementContext) -> {
{
@Override
public Map<String, SupervisorSpec> fold(
Map<String, SupervisorSpec> retVal,
Pair<String, SupervisorSpec> stringObjectMap,
FoldController foldController,
StatementContext statementContext
)
{
try { try {
if (null != stringObjectMap) { if (null != stringObjectMap) {
retVal.put(stringObjectMap.lhs, stringObjectMap.rhs); retVal.put(stringObjectMap.lhs, stringObjectMap.rhs);
@ -293,10 +223,7 @@ public class SQLMetadataSupervisorManager implements MetadataSupervisorManager
throw new RuntimeException(e); throw new RuntimeException(e);
} }
} }
} )
);
}
}
) )
); );
} }
@ -304,10 +231,10 @@ public class SQLMetadataSupervisorManager implements MetadataSupervisorManager
@Override @Override
public Map<String, SupervisorSpec> getLatestActiveOnly() public Map<String, SupervisorSpec> getLatestActiveOnly()
{ {
Map<String, SupervisorSpec> supervisors = getLatest(); final Map<String, SupervisorSpec> supervisors = getLatest();
Map<String, SupervisorSpec> activeSupervisors = new HashMap<>(); final Map<String, SupervisorSpec> activeSupervisors = new HashMap<>();
for (Map.Entry<String, SupervisorSpec> entry : supervisors.entrySet()) { for (Map.Entry<String, SupervisorSpec> entry : supervisors.entrySet()) {
// Terminated supervisor will have it's latest supervisorSpec as NoopSupervisorSpec // Terminated supervisor will have its latest supervisorSpec as NoopSupervisorSpec
// (NoopSupervisorSpec is used as a tombstone marker) // (NoopSupervisorSpec is used as a tombstone marker)
if (!(entry.getValue() instanceof NoopSupervisorSpec)) { if (!(entry.getValue() instanceof NoopSupervisorSpec)) {
activeSupervisors.put(entry.getKey(), entry.getValue()); activeSupervisors.put(entry.getKey(), entry.getValue());
@ -319,16 +246,16 @@ public class SQLMetadataSupervisorManager implements MetadataSupervisorManager
@Override @Override
public Map<String, SupervisorSpec> getLatestTerminatedOnly() public Map<String, SupervisorSpec> getLatestTerminatedOnly()
{ {
Map<String, SupervisorSpec> supervisors = getLatest(); final Map<String, SupervisorSpec> supervisors = getLatest();
Map<String, SupervisorSpec> activeSupervisors = new HashMap<>(); final Map<String, SupervisorSpec> terminatedSupervisors = new HashMap<>();
for (Map.Entry<String, SupervisorSpec> entry : supervisors.entrySet()) { for (Map.Entry<String, SupervisorSpec> entry : supervisors.entrySet()) {
// Terminated supervisor will have it's latest supervisorSpec as NoopSupervisorSpec // Terminated supervisor will have its latest supervisorSpec as NoopSupervisorSpec
// (NoopSupervisorSpec is used as a tombstone marker) // (NoopSupervisorSpec is used as a tombstone marker)
if (entry.getValue() instanceof NoopSupervisorSpec) { if (entry.getValue() instanceof NoopSupervisorSpec) {
activeSupervisors.put(entry.getKey(), entry.getValue()); terminatedSupervisors.put(entry.getKey(), entry.getValue());
} }
} }
return ImmutableMap.copyOf(activeSupervisors); return ImmutableMap.copyOf(terminatedSupervisors);
} }
@Override @Override

View File

@ -37,8 +37,6 @@ import org.junit.Before;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Rule; import org.junit.Rule;
import org.junit.Test; import org.junit.Test;
import org.skife.jdbi.v2.Handle;
import org.skife.jdbi.v2.tweak.HandleCallback;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
@ -65,16 +63,9 @@ public class SQLMetadataSupervisorManagerTest
public void cleanup() public void cleanup()
{ {
connector.getDBI().withHandle( connector.getDBI().withHandle(
new HandleCallback<Void>() handle -> handle.createStatement(StringUtils.format("DROP TABLE %s", tablesConfig.getSupervisorTable()))
{ .execute()
@Override
public Void withHandle(Handle handle)
{
handle.createStatement(StringUtils.format("DROP TABLE %s", tablesConfig.getSupervisorTable()))
.execute();
return null;
}
}
); );
} }