Addressing review comments part 1

This commit is contained in:
Soumyava Das 2022-10-02 12:16:44 -07:00
parent 557081a11e
commit 1b75daf811
8 changed files with 35 additions and 72 deletions

View File

@ -157,11 +157,12 @@ public abstract class BaseLeafFrameProcessor implements FrameProcessor<Long>
final boolean retVal = broadcastJoinHelper.buildBroadcastTablesIncrementally(readableInputs);
if (retVal) {
InputNumberDataSource inputNumberDataSource = (InputNumberDataSource) query.getDataSource();
// The InputNumberData source was going through the broadcastJoinHelper which
// was using the JoinableFactoryWrapper to create segment map function.
// After refactoring, the segment map function creation is moved to data source
// Hence for InputNumberDataSource we are setting the broadcast join helper for the data source
// and moving the segment map function creation there
// The InputNumberDataSource requires a BroadcastJoinHelper to be able to create its
// segment map function. It would be a lot better if the InputNumberDataSource actually
// had a way to get that injected into it on its own, but the relationship between these objects
// was figured out during a refactor and using a setter here seemed like the least-bad way to
// make progress on the refactor without breaking functionality. Hopefully, some future
// developer will move this away from a setter.
inputNumberDataSource.setBroadcastJoinHelper(broadcastJoinHelper);
segmentMapFn = inputNumberDataSource.createSegmentMapFunction(query, new AtomicLong());
}

View File

@ -573,16 +573,6 @@ public class MSQTestBase extends BaseCalciteQueryTest
)
).registerSubtypes(ExternalDataSource.class));
DruidSecondaryModule.setupJackson(injector, mapper);
/*
mapper.setInjectableValues(
new InjectableValues.Std()
.addValue(ObjectMapper.class, mapper)
.addValue(Injector.class, injector)
.addValue(DataSegment.PruneSpecsHolder.class, DataSegment.PruneSpecsHolder.DEFAULT)
.addValue(LocalDataSegmentPuller.class, new LocalDataSegmentPuller())
.addValue(ExprMacroTable.class, CalciteTests.createExprMacroTable())
);
*/
mapper.registerSubtypes(new NamedType(LocalLoadSpec.class, "local"));

View File

@ -156,7 +156,7 @@ public class JoinDataSource implements DataSource
final JoinConditionAnalysis conditionAnalysis,
final JoinType joinType,
final DimFilter leftFilter,
@Nullable @JacksonInject final JoinableFactoryWrapper joinableFactoryWrapper
@Nullable final JoinableFactoryWrapper joinableFactoryWrapper
)
{
return new JoinDataSource(

View File

@ -1,3 +1,22 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.segment.join;
import org.apache.druid.query.DataSource;

View File

@ -1,46 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.segment.join;
import org.apache.druid.query.DataSource;
import java.util.Optional;
public class NoopJoinableFactory implements JoinableFactory
{
public static final NoopJoinableFactory INSTANCE = new NoopJoinableFactory();
protected NoopJoinableFactory()
{
// Singleton.
}
@Override
public boolean isDirectlyJoinable(DataSource dataSource)
{
return false;
}
@Override
public Optional<Joinable> build(DataSource dataSource, JoinConditionAnalysis condition)
{
return Optional.empty();
}
}

View File

@ -108,9 +108,9 @@ public class CacheUtil
ServerType serverType
)
{
return isQueryCacheable(query, cacheStrategy, cacheConfig, serverType)
return cacheConfig.isUseCache()
&& QueryContexts.isUseCache(query)
&& cacheConfig.isUseCache();
&& isQueryCacheable(query, cacheStrategy, cacheConfig, serverType);
}
/**

View File

@ -93,11 +93,12 @@ public class LocalQuerySegmentWalker implements QuerySegmentWalker
final AtomicLong cpuAccumulator = new AtomicLong(0L);
Function<SegmentReference, SegmentReference> segmentMapFn = analysis.getDataSource()
.createSegmentMapFunction(
query,
cpuAccumulator
);
Function<SegmentReference, SegmentReference> segmentMapFn = analysis
.getDataSource()
.createSegmentMapFunction(
query,
cpuAccumulator
);
final QueryRunnerFactory<T, Query<T>> queryRunnerFactory = conglomerate.findFactory(query);

View File

@ -26,7 +26,6 @@ import com.google.inject.Key;
import com.google.inject.Module;
import com.google.inject.TypeLiteral;
import com.google.inject.name.Names;
import com.google.inject.util.Providers;
import org.apache.druid.curator.discovery.DiscoveryModule;
import org.apache.druid.discovery.NodeRole;
import org.apache.druid.guice.Jerseys;
@ -43,7 +42,6 @@ import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.query.QuerySegmentWalker;
import org.apache.druid.query.lookup.LookupSerdeModule;
import org.apache.druid.segment.join.JoinableFactory;
import org.apache.druid.segment.join.JoinableFactoryWrapper;
import org.apache.druid.segment.join.NoopJoinableFactory;
import org.apache.druid.server.AsyncQueryForwardingServlet;
import org.apache.druid.server.NoopQuerySegmentWalker;