mirror of https://github.com/apache/druid.git
add endpoint to fetch rule history for all datasources.
review comments Review comment fix compilation
This commit is contained in:
parent
dfe3be0f6d
commit
0835e12f2a
|
@ -54,4 +54,12 @@ public interface AuditManager
|
|||
*/
|
||||
public List<AuditEntry> fetchAuditHistory(String key, String type, Interval interval);
|
||||
|
||||
/**
|
||||
* provides audit history for given type and interval
|
||||
* @param type type of auditEntry
|
||||
* @param interval interval for which to fetch auditHistory
|
||||
* @return list of AuditEntries satisfying the passed parameters
|
||||
*/
|
||||
public List<AuditEntry> fetchAuditHistory(String type, Interval interval);
|
||||
|
||||
}
|
||||
|
|
|
@ -183,10 +183,13 @@ Returns all rules for a specified datasource.
|
|||
|
||||
Returns all rules for a specified datasource and includes default datasource.
|
||||
|
||||
* `/druid/coordinator/v1/rules/history?interval=<interval>`
|
||||
|
||||
Returns audit history of rules for all datasources. default value of interval can be specified by setting `druid.audit.manager.auditHistoryMillis` (1 week if not configured) in coordinator runtime.properties
|
||||
|
||||
* `/druid/coordinator/v1/rules/{dataSourceName}/history?interval=<interval>`
|
||||
|
||||
Returns audit history of rules. default value of interval can be specified by setting `druid.audit.manager.auditHistoryMillis` (1 week if not configured) in coordinator runtime.properties
|
||||
Returns audit history of rules for a specified datasource. default value of interval can be specified by setting `druid.audit.manager.auditHistoryMillis` (1 week if not configured) in coordinator runtime.properties
|
||||
|
||||
|
||||
### POST
|
||||
|
|
|
@ -116,13 +116,7 @@ public class SQLAuditManager implements AuditManager
|
|||
@Override
|
||||
public List<AuditEntry> fetchAuditHistory(final String key, final String type, Interval interval)
|
||||
{
|
||||
final Interval theInterval;
|
||||
if (interval == null) {
|
||||
DateTime now = new DateTime();
|
||||
theInterval = new Interval(now.minus(config.getAuditHistoryMillis()), now);
|
||||
} else {
|
||||
theInterval = interval;
|
||||
}
|
||||
final Interval theInterval = getIntervalOrDefault(interval);
|
||||
return dbi.withHandle(
|
||||
new HandleCallback<List<AuditEntry>>()
|
||||
{
|
||||
|
@ -160,4 +154,57 @@ public class SQLAuditManager implements AuditManager
|
|||
);
|
||||
}
|
||||
|
||||
private Interval getIntervalOrDefault(Interval interval)
|
||||
{
|
||||
final Interval theInterval;
|
||||
if (interval == null) {
|
||||
DateTime now = new DateTime();
|
||||
theInterval = new Interval(now.minus(config.getAuditHistoryMillis()), now);
|
||||
} else {
|
||||
theInterval = interval;
|
||||
}
|
||||
return theInterval;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<AuditEntry> fetchAuditHistory(final String type, Interval interval)
|
||||
{
|
||||
final Interval theInterval = getIntervalOrDefault(interval);
|
||||
return dbi.withHandle(
|
||||
new HandleCallback<List<AuditEntry>>()
|
||||
{
|
||||
@Override
|
||||
public List<AuditEntry> withHandle(Handle handle) throws Exception
|
||||
{
|
||||
return handle.createQuery(
|
||||
String.format(
|
||||
"SELECT payload FROM %s WHERE type = :type and created_date between :start_date and :end_date ORDER BY created_date",
|
||||
getAuditTable()
|
||||
)
|
||||
)
|
||||
.bind("type", type)
|
||||
.bind("start_date", theInterval.getStart().toString())
|
||||
.bind("end_date", theInterval.getEnd().toString())
|
||||
.map(
|
||||
new ResultSetMapper<AuditEntry>()
|
||||
{
|
||||
@Override
|
||||
public AuditEntry map(int index, ResultSet r, StatementContext ctx)
|
||||
throws SQLException
|
||||
{
|
||||
try {
|
||||
return jsonMapper.readValue(r.getBytes("payload"), AuditEntry.class);
|
||||
}
|
||||
catch (IOException e) {
|
||||
throw new SQLException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
.list();
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
|
||||
package io.druid.server.http;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.inject.Inject;
|
||||
import io.druid.audit.AuditInfo;
|
||||
import io.druid.audit.AuditManager;
|
||||
|
@ -79,6 +80,7 @@ public class RulesResource
|
|||
return Response.ok(databaseRuleManager.getRules(dataSourceName))
|
||||
.build();
|
||||
}
|
||||
|
||||
// default value is used for backwards compatibility
|
||||
@POST
|
||||
@Path("/{dataSourceName}")
|
||||
|
@ -114,4 +116,21 @@ public class RulesResource
|
|||
.build();
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/history")
|
||||
@Produces(MediaType.APPLICATION_JSON)
|
||||
public Response getDatasourceRuleHistory(
|
||||
@QueryParam("interval") final String interval
|
||||
)
|
||||
{
|
||||
try {
|
||||
Interval theInterval = interval == null ? null : new Interval(interval);
|
||||
return Response.ok(auditManager.fetchAuditHistory("rules", theInterval))
|
||||
.build();
|
||||
}
|
||||
catch (IllegalArgumentException e) {
|
||||
return Response.serverError().entity(ImmutableMap.<String, Object>of("error", e.getMessage())).build();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -17,6 +17,8 @@
|
|||
|
||||
package io.druid.metadata;
|
||||
|
||||
|
||||
import com.fasterxml.jackson.core.type.TypeReference;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.google.common.base.Suppliers;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
|
@ -127,11 +129,61 @@ public class SQLMetadataRuleManagerTest
|
|||
List<AuditEntry> auditEntries = auditManager.fetchAuditHistory("test_dataSource", "rules", null);
|
||||
Assert.assertEquals(1, auditEntries.size());
|
||||
AuditEntry entry = auditEntries.get(0);
|
||||
Assert.assertEquals(mapper.writeValueAsString(rules), entry.getPayload());
|
||||
|
||||
Assert.assertEquals(
|
||||
rules, mapper.readValue(
|
||||
entry.getPayload(), new TypeReference<List<Rule>>()
|
||||
{
|
||||
}
|
||||
)
|
||||
);
|
||||
Assert.assertEquals(auditInfo, entry.getAuditInfo());
|
||||
Assert.assertEquals("test_dataSource", entry.getKey());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFetchAuditEntriesForAllDataSources() throws Exception
|
||||
{
|
||||
List<Rule> rules = Arrays.<Rule>asList(
|
||||
new IntervalLoadRule(
|
||||
new Interval("2015-01-01/2015-02-01"), ImmutableMap.<String, Integer>of(
|
||||
DruidServer.DEFAULT_TIER,
|
||||
DruidServer.DEFAULT_NUM_REPLICANTS
|
||||
)
|
||||
)
|
||||
);
|
||||
AuditInfo auditInfo = new AuditInfo("test_author", "test_comment", "127.0.0.1");
|
||||
ruleManager.overrideRule(
|
||||
"test_dataSource",
|
||||
rules,
|
||||
auditInfo
|
||||
);
|
||||
ruleManager.overrideRule(
|
||||
"test_dataSource2",
|
||||
rules,
|
||||
auditInfo
|
||||
);
|
||||
// fetch rules from metadata storage
|
||||
ruleManager.poll();
|
||||
|
||||
Assert.assertEquals(rules, ruleManager.getRules("test_dataSource"));
|
||||
Assert.assertEquals(rules, ruleManager.getRules("test_dataSource2"));
|
||||
|
||||
// test fetch audit entries
|
||||
List<AuditEntry> auditEntries = auditManager.fetchAuditHistory("rules", null);
|
||||
Assert.assertEquals(2, auditEntries.size());
|
||||
for (AuditEntry entry : auditEntries) {
|
||||
Assert.assertEquals(
|
||||
rules, mapper.readValue(
|
||||
entry.getPayload(), new TypeReference<List<Rule>>()
|
||||
{
|
||||
}
|
||||
)
|
||||
);
|
||||
Assert.assertEquals(auditInfo, entry.getAuditInfo());
|
||||
}
|
||||
}
|
||||
|
||||
@After
|
||||
public void cleanup()
|
||||
{
|
||||
|
|
Loading…
Reference in New Issue