mirror of https://github.com/apache/druid.git
Use DefaultQueryConfig in SqlLifecycle to correctly populate request logs (#12613)
Fixes an issue where sql query request logs do not include the default query context values set via `druid.query.default.context.xyz` runtime properties. # Change summary * Inject `DefaultQueryConfig` into `SqlLifecycleFactory` * Add params from `DefaultQueryConfig` to the query context in `SqlLifecycle` # Description - This change does not affect query execution. This is because the `DefaultQueryConfig` was already being used in `QueryLifecycle`, which is initialized when the SQL is translated to a native query. - This also handles any potential use case where a context parameter should be handled at the SQL stage itself.
This commit is contained in:
parent
8fbf92e047
commit
a3603ad6b0
|
@ -35,6 +35,7 @@ import org.apache.druid.java.util.common.guava.Sequences;
|
|||
import org.apache.druid.java.util.common.logger.Logger;
|
||||
import org.apache.druid.java.util.emitter.service.ServiceEmitter;
|
||||
import org.apache.druid.java.util.emitter.service.ServiceMetricEvent;
|
||||
import org.apache.druid.query.DefaultQueryConfig;
|
||||
import org.apache.druid.query.QueryContext;
|
||||
import org.apache.druid.query.QueryContexts;
|
||||
import org.apache.druid.query.QueryInterruptedException;
|
||||
|
@ -93,6 +94,7 @@ public class SqlLifecycle
|
|||
private final RequestLogger requestLogger;
|
||||
private final QueryScheduler queryScheduler;
|
||||
private final AuthConfig authConfig;
|
||||
private final DefaultQueryConfig defaultQueryConfig;
|
||||
private final long startMs;
|
||||
private final long startNs;
|
||||
|
||||
|
@ -120,6 +122,7 @@ public class SqlLifecycle
|
|||
RequestLogger requestLogger,
|
||||
QueryScheduler queryScheduler,
|
||||
AuthConfig authConfig,
|
||||
DefaultQueryConfig defaultQueryConfig,
|
||||
long startMs,
|
||||
long startNs
|
||||
)
|
||||
|
@ -129,6 +132,7 @@ public class SqlLifecycle
|
|||
this.requestLogger = requestLogger;
|
||||
this.queryScheduler = queryScheduler;
|
||||
this.authConfig = authConfig;
|
||||
this.defaultQueryConfig = defaultQueryConfig;
|
||||
this.startMs = startMs;
|
||||
this.startNs = startNs;
|
||||
this.parameters = Collections.emptyList();
|
||||
|
@ -144,6 +148,7 @@ public class SqlLifecycle
|
|||
transition(State.NEW, State.INITIALIZED);
|
||||
this.sql = sql;
|
||||
this.queryContext = contextWithSqlId(queryContext);
|
||||
this.queryContext.addDefaultParams(defaultQueryConfig.getContext());
|
||||
return sqlQueryId();
|
||||
}
|
||||
|
||||
|
|
|
@ -19,9 +19,11 @@
|
|||
|
||||
package org.apache.druid.sql;
|
||||
|
||||
import com.google.common.base.Supplier;
|
||||
import com.google.inject.Inject;
|
||||
import org.apache.druid.guice.LazySingleton;
|
||||
import org.apache.druid.java.util.emitter.service.ServiceEmitter;
|
||||
import org.apache.druid.query.DefaultQueryConfig;
|
||||
import org.apache.druid.server.QueryScheduler;
|
||||
import org.apache.druid.server.log.RequestLogger;
|
||||
import org.apache.druid.server.security.AuthConfig;
|
||||
|
@ -35,6 +37,7 @@ public class SqlLifecycleFactory
|
|||
private final RequestLogger requestLogger;
|
||||
private final QueryScheduler queryScheduler;
|
||||
private final AuthConfig authConfig;
|
||||
private final DefaultQueryConfig defaultQueryConfig;
|
||||
|
||||
@Inject
|
||||
public SqlLifecycleFactory(
|
||||
|
@ -42,7 +45,8 @@ public class SqlLifecycleFactory
|
|||
ServiceEmitter emitter,
|
||||
RequestLogger requestLogger,
|
||||
QueryScheduler queryScheduler,
|
||||
AuthConfig authConfig
|
||||
AuthConfig authConfig,
|
||||
Supplier<DefaultQueryConfig> defaultQueryConfig
|
||||
)
|
||||
{
|
||||
this.plannerFactory = plannerFactory;
|
||||
|
@ -50,6 +54,7 @@ public class SqlLifecycleFactory
|
|||
this.requestLogger = requestLogger;
|
||||
this.queryScheduler = queryScheduler;
|
||||
this.authConfig = authConfig;
|
||||
this.defaultQueryConfig = defaultQueryConfig.get();
|
||||
}
|
||||
|
||||
public SqlLifecycle factorize()
|
||||
|
@ -60,6 +65,7 @@ public class SqlLifecycleFactory
|
|||
requestLogger,
|
||||
queryScheduler,
|
||||
authConfig,
|
||||
defaultQueryConfig,
|
||||
System.currentTimeMillis(),
|
||||
System.nanoTime()
|
||||
);
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.apache.druid.sql;
|
||||
|
||||
import com.google.common.base.Suppliers;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.apache.calcite.avatica.SqlType;
|
||||
|
@ -29,6 +30,7 @@ import org.apache.calcite.tools.ValidationException;
|
|||
import org.apache.druid.java.util.common.guava.Sequences;
|
||||
import org.apache.druid.java.util.emitter.service.ServiceEmitter;
|
||||
import org.apache.druid.java.util.emitter.service.ServiceEventBuilder;
|
||||
import org.apache.druid.query.DefaultQueryConfig;
|
||||
import org.apache.druid.query.QueryContext;
|
||||
import org.apache.druid.query.QueryContexts;
|
||||
import org.apache.druid.server.QueryStackTests;
|
||||
|
@ -61,6 +63,7 @@ public class SqlLifecycleTest
|
|||
private ServiceEmitter serviceEmitter;
|
||||
private RequestLogger requestLogger;
|
||||
private SqlLifecycleFactory sqlLifecycleFactory;
|
||||
private DefaultQueryConfig defaultQueryConfig;
|
||||
|
||||
@Before
|
||||
public void setup()
|
||||
|
@ -68,12 +71,15 @@ public class SqlLifecycleTest
|
|||
this.plannerFactory = EasyMock.createMock(PlannerFactory.class);
|
||||
this.serviceEmitter = EasyMock.createMock(ServiceEmitter.class);
|
||||
this.requestLogger = EasyMock.createMock(RequestLogger.class);
|
||||
this.defaultQueryConfig = new DefaultQueryConfig(ImmutableMap.of("DEFAULT_KEY", "DEFAULT_VALUE"));
|
||||
|
||||
this.sqlLifecycleFactory = new SqlLifecycleFactory(
|
||||
plannerFactory,
|
||||
serviceEmitter,
|
||||
requestLogger,
|
||||
QueryStackTests.DEFAULT_NOOP_SCHEDULER,
|
||||
new AuthConfig()
|
||||
new AuthConfig(),
|
||||
Suppliers.ofInstance(defaultQueryConfig)
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -85,11 +91,33 @@ public class SqlLifecycleTest
|
|||
final Map<String, Object> queryContext = ImmutableMap.of(QueryContexts.BY_SEGMENT_KEY, "true");
|
||||
lifecycle.initialize(sql, new QueryContext(queryContext));
|
||||
Assert.assertEquals(SqlLifecycle.State.INITIALIZED, lifecycle.getState());
|
||||
Assert.assertEquals(1, lifecycle.getQueryContext().getMergedParams().size());
|
||||
Assert.assertEquals(2, lifecycle.getQueryContext().getMergedParams().size());
|
||||
// should contain only query id, not bySegment since it is not valid for SQL
|
||||
Assert.assertTrue(lifecycle.getQueryContext().getMergedParams().containsKey(PlannerContext.CTX_SQL_QUERY_ID));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDefaultQueryContextIsApplied()
|
||||
{
|
||||
SqlLifecycle lifecycle = sqlLifecycleFactory.factorize();
|
||||
// lifecycle should not have a query context is there on it when created/factorized
|
||||
Assert.assertNull(lifecycle.getQueryContext());
|
||||
final String sql = "select 1 + ?";
|
||||
final Map<String, Object> queryContext = ImmutableMap.of(QueryContexts.BY_SEGMENT_KEY, "true");
|
||||
QueryContext testQueryContext = new QueryContext(queryContext);
|
||||
// default query context isn't applied to query context until lifecycle is initialized
|
||||
for (String defaultContextKey : defaultQueryConfig.getContext().keySet()) {
|
||||
Assert.assertFalse(testQueryContext.getMergedParams().containsKey(defaultContextKey));
|
||||
}
|
||||
lifecycle.initialize(sql, testQueryContext);
|
||||
Assert.assertEquals(SqlLifecycle.State.INITIALIZED, lifecycle.getState());
|
||||
Assert.assertEquals(2, lifecycle.getQueryContext().getMergedParams().size());
|
||||
// should lifecycle should contain default query context values after initialization
|
||||
for (String defaultContextKey : defaultQueryConfig.getContext().keySet()) {
|
||||
Assert.assertTrue(lifecycle.getQueryContext().getMergedParams().containsKey(defaultContextKey));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStateTransition()
|
||||
throws ValidationException, SqlParseException, RelConversionException, IOException
|
||||
|
|
|
@ -19,6 +19,8 @@
|
|||
|
||||
package org.apache.druid.sql.avatica;
|
||||
|
||||
import com.google.common.base.Supplier;
|
||||
import com.google.common.base.Suppliers;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
|
@ -30,6 +32,7 @@ import com.google.common.util.concurrent.MoreExecutors;
|
|||
import com.google.inject.Binder;
|
||||
import com.google.inject.Injector;
|
||||
import com.google.inject.Module;
|
||||
import com.google.inject.TypeLiteral;
|
||||
import com.google.inject.multibindings.Multibinder;
|
||||
import com.google.inject.name.Names;
|
||||
import org.apache.calcite.avatica.AvaticaClientRuntimeException;
|
||||
|
@ -49,6 +52,7 @@ import org.apache.druid.java.util.common.io.Closer;
|
|||
import org.apache.druid.java.util.emitter.service.ServiceEmitter;
|
||||
import org.apache.druid.math.expr.ExprMacroTable;
|
||||
import org.apache.druid.query.BaseQuery;
|
||||
import org.apache.druid.query.DefaultQueryConfig;
|
||||
import org.apache.druid.query.QueryRunnerFactoryConglomerate;
|
||||
import org.apache.druid.server.QueryLifecycleFactory;
|
||||
import org.apache.druid.server.QueryScheduler;
|
||||
|
@ -214,6 +218,7 @@ public abstract class DruidAvaticaHandlerTest extends CalciteTestBase
|
|||
.toProvider(QuerySchedulerProvider.class)
|
||||
.in(LazySingleton.class);
|
||||
binder.bind(QueryMakerFactory.class).to(NativeQueryMakerFactory.class);
|
||||
binder.bind(new TypeLiteral<Supplier<DefaultQueryConfig>>(){}).toInstance(Suppliers.ofInstance(new DefaultQueryConfig(ImmutableMap.of())));
|
||||
}
|
||||
}
|
||||
)
|
||||
|
@ -237,6 +242,8 @@ public abstract class DruidAvaticaHandlerTest extends CalciteTestBase
|
|||
clientLosAngeles = DriverManager.getConnection(url, propertiesLosAngeles);
|
||||
}
|
||||
|
||||
|
||||
|
||||
@After
|
||||
public void tearDown() throws Exception
|
||||
{
|
||||
|
|
|
@ -845,7 +845,8 @@ public class CalciteTests
|
|||
new ServiceEmitter("dummy", "dummy", new NoopEmitter()),
|
||||
new NoopRequestLogger(),
|
||||
QueryStackTests.DEFAULT_NOOP_SCHEDULER,
|
||||
authConfig
|
||||
authConfig,
|
||||
Suppliers.ofInstance(new DefaultQueryConfig(ImmutableMap.of()))
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -23,6 +23,7 @@ import com.fasterxml.jackson.core.type.TypeReference;
|
|||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.google.common.base.Splitter;
|
||||
import com.google.common.base.Strings;
|
||||
import com.google.common.base.Suppliers;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.Maps;
|
||||
|
@ -46,6 +47,7 @@ import org.apache.druid.java.util.common.io.Closer;
|
|||
import org.apache.druid.java.util.emitter.service.ServiceEmitter;
|
||||
import org.apache.druid.math.expr.ExprMacroTable;
|
||||
import org.apache.druid.query.BaseQuery;
|
||||
import org.apache.druid.query.DefaultQueryConfig;
|
||||
import org.apache.druid.query.Query;
|
||||
import org.apache.druid.query.QueryCapacityExceededException;
|
||||
import org.apache.druid.query.QueryContexts;
|
||||
|
@ -255,12 +257,14 @@ public class SqlResourceTest extends CalciteTestBase
|
|||
};
|
||||
final ServiceEmitter emitter = new NoopServiceEmitter();
|
||||
final AuthConfig authConfig = new AuthConfig();
|
||||
final DefaultQueryConfig defaultQueryConfig = new DefaultQueryConfig(ImmutableMap.of());
|
||||
sqlLifecycleFactory = new SqlLifecycleFactory(
|
||||
plannerFactory,
|
||||
emitter,
|
||||
testRequestLogger,
|
||||
scheduler,
|
||||
authConfig
|
||||
authConfig,
|
||||
Suppliers.ofInstance(defaultQueryConfig)
|
||||
)
|
||||
{
|
||||
@Override
|
||||
|
@ -1776,7 +1780,7 @@ public class SqlResourceTest extends CalciteTestBase
|
|||
SettableSupplier<Function<Sequence<Object[]>, Sequence<Object[]>>> sequenceMapFnSupplier
|
||||
)
|
||||
{
|
||||
super(plannerFactory, emitter, requestLogger, queryScheduler, authConfig, startMs, startNs);
|
||||
super(plannerFactory, emitter, requestLogger, queryScheduler, authConfig, new DefaultQueryConfig(ImmutableMap.of()), startMs, startNs);
|
||||
this.validateAndAuthorizeLatchSupplier = validateAndAuthorizeLatchSupplier;
|
||||
this.planLatchSupplier = planLatchSupplier;
|
||||
this.executeLatchSupplier = executeLatchSupplier;
|
||||
|
|
Loading…
Reference in New Issue