Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/master' into Unnest_MSQ
Browse files Browse the repository at this point in the history
  • Loading branch information
somu-imply committed Aug 24, 2023
2 parents eb024d0 + 388d5ec commit d3c1e76
Show file tree
Hide file tree
Showing 145 changed files with 1,069 additions and 4,376 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -52,13 +52,10 @@
import org.apache.druid.query.dimension.DefaultDimensionSpec;
import org.apache.druid.query.groupby.GroupByQuery;
import org.apache.druid.query.groupby.GroupByQueryConfig;
import org.apache.druid.query.groupby.GroupByQueryEngine;
import org.apache.druid.query.groupby.GroupByQueryQueryToolChest;
import org.apache.druid.query.groupby.GroupByQueryRunnerFactory;
import org.apache.druid.query.groupby.GroupingEngine;
import org.apache.druid.query.groupby.ResultRow;
import org.apache.druid.query.groupby.strategy.GroupByStrategySelector;
import org.apache.druid.query.groupby.strategy.GroupByStrategyV1;
import org.apache.druid.query.groupby.strategy.GroupByStrategyV2;
import org.apache.druid.query.spec.MultipleIntervalSegmentSpec;
import org.apache.druid.query.spec.QuerySegmentSpec;
import org.apache.druid.segment.IndexIO;
Expand Down Expand Up @@ -128,9 +125,6 @@ public class GroupByTypeInterfaceBenchmark
@Param({"100000"})
private int rowsPerSegment;

@Param({"v2"})
private String defaultStrategy;

@Param({"all"})
private String queryGranularity;

Expand Down Expand Up @@ -346,11 +340,6 @@ public void setup() throws IOException
);
final GroupByQueryConfig config = new GroupByQueryConfig()
{
@Override
public String getDefaultStrategy()
{
return defaultStrategy;
}

@Override
public int getBufferGrouperInitialBuckets()
Expand All @@ -365,8 +354,6 @@ public HumanReadableBytes getMaxOnDiskStorage()
}
};
config.setSingleThreaded(false);
config.setMaxIntermediateRows(Integer.MAX_VALUE);
config.setMaxResults(Integer.MAX_VALUE);

DruidProcessingConfig druidProcessingConfig = new DruidProcessingConfig()
{
Expand All @@ -385,27 +372,19 @@ public String getFormatString()
};

final Supplier<GroupByQueryConfig> configSupplier = Suppliers.ofInstance(config);
final GroupByStrategySelector strategySelector = new GroupByStrategySelector(
final GroupingEngine groupingEngine = new GroupingEngine(
druidProcessingConfig,
configSupplier,
new GroupByStrategyV1(
configSupplier,
new GroupByQueryEngine(configSupplier, bufferPool),
QueryBenchmarkUtil.NOOP_QUERYWATCHER
),
new GroupByStrategyV2(
druidProcessingConfig,
configSupplier,
bufferPool,
mergePool,
TestHelper.makeJsonMapper(),
new ObjectMapper(new SmileFactory()),
QueryBenchmarkUtil.NOOP_QUERYWATCHER
)
bufferPool,
mergePool,
TestHelper.makeJsonMapper(),
new ObjectMapper(new SmileFactory()),
QueryBenchmarkUtil.NOOP_QUERYWATCHER
);

factory = new GroupByQueryRunnerFactory(
strategySelector,
new GroupByQueryQueryToolChest(strategySelector)
groupingEngine,
new GroupByQueryQueryToolChest(groupingEngine)
);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,14 +77,11 @@
import org.apache.druid.query.expression.TestExprMacroTable;
import org.apache.druid.query.groupby.GroupByQuery;
import org.apache.druid.query.groupby.GroupByQueryConfig;
import org.apache.druid.query.groupby.GroupByQueryEngine;
import org.apache.druid.query.groupby.GroupByQueryQueryToolChest;
import org.apache.druid.query.groupby.GroupByQueryRunnerFactory;
import org.apache.druid.query.groupby.GroupByQueryRunnerTest;
import org.apache.druid.query.groupby.GroupingEngine;
import org.apache.druid.query.groupby.ResultRow;
import org.apache.druid.query.groupby.strategy.GroupByStrategySelector;
import org.apache.druid.query.groupby.strategy.GroupByStrategyV1;
import org.apache.druid.query.groupby.strategy.GroupByStrategyV2;
import org.apache.druid.query.planning.DataSourceAnalysis;
import org.apache.druid.query.spec.MultipleIntervalSegmentSpec;
import org.apache.druid.query.spec.QuerySegmentSpec;
Expand Down Expand Up @@ -287,11 +284,6 @@ public int getNumThreads()
GroupByQueryRunnerTest.DEFAULT_MAPPER,
new GroupByQueryConfig()
{
@Override
public String getDefaultStrategy()
{
return GroupByStrategySelector.STRATEGY_V2;
}
},
processingConfig
)
Expand Down Expand Up @@ -364,25 +356,17 @@ private static GroupByQueryRunnerFactory makeGroupByQueryRunnerFactory(
bufferSupplier,
processingConfig.getNumMergeBuffers()
);
final GroupByStrategySelector strategySelector = new GroupByStrategySelector(
final GroupingEngine groupingEngine = new GroupingEngine(
processingConfig,
configSupplier,
new GroupByStrategyV1(
configSupplier,
new GroupByQueryEngine(configSupplier, bufferPool),
QueryRunnerTestHelper.NOOP_QUERYWATCHER
),
new GroupByStrategyV2(
processingConfig,
configSupplier,
bufferPool,
mergeBufferPool,
mapper,
mapper,
QueryRunnerTestHelper.NOOP_QUERYWATCHER
)
bufferPool,
mergeBufferPool,
mapper,
mapper,
QueryRunnerTestHelper.NOOP_QUERYWATCHER
);
final GroupByQueryQueryToolChest toolChest = new GroupByQueryQueryToolChest(strategySelector);
return new GroupByQueryRunnerFactory(strategySelector, toolChest);
final GroupByQueryQueryToolChest toolChest = new GroupByQueryQueryToolChest(groupingEngine);
return new GroupByQueryRunnerFactory(groupingEngine, toolChest);
}

@TearDown(Level.Trial)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,15 +63,12 @@
import org.apache.druid.query.filter.BoundDimFilter;
import org.apache.druid.query.groupby.GroupByQuery;
import org.apache.druid.query.groupby.GroupByQueryConfig;
import org.apache.druid.query.groupby.GroupByQueryEngine;
import org.apache.druid.query.groupby.GroupByQueryQueryToolChest;
import org.apache.druid.query.groupby.GroupByQueryRunnerFactory;
import org.apache.druid.query.groupby.GroupingEngine;
import org.apache.druid.query.groupby.ResultRow;
import org.apache.druid.query.groupby.orderby.DefaultLimitSpec;
import org.apache.druid.query.groupby.orderby.OrderByColumnSpec;
import org.apache.druid.query.groupby.strategy.GroupByStrategySelector;
import org.apache.druid.query.groupby.strategy.GroupByStrategyV1;
import org.apache.druid.query.groupby.strategy.GroupByStrategyV2;
import org.apache.druid.query.ordering.StringComparators;
import org.apache.druid.query.spec.MultipleIntervalSegmentSpec;
import org.apache.druid.query.spec.QuerySegmentSpec;
Expand Down Expand Up @@ -139,9 +136,6 @@ public class GroupByBenchmark
@Param({"basic.A", "basic.nested"})
private String schemaAndQuery;

@Param({"v1", "v2"})
private String defaultStrategy;

@Param({"all", "day"})
private String queryGranularity;

Expand Down Expand Up @@ -461,11 +455,6 @@ public void setup()
);
final GroupByQueryConfig config = new GroupByQueryConfig()
{
@Override
public String getDefaultStrategy()
{
return defaultStrategy;
}

@Override
public int getBufferGrouperInitialBuckets()
Expand All @@ -480,8 +469,6 @@ public HumanReadableBytes getMaxOnDiskStorage()
}
};
config.setSingleThreaded(false);
config.setMaxIntermediateRows(Integer.MAX_VALUE);
config.setMaxResults(Integer.MAX_VALUE);

DruidProcessingConfig druidProcessingConfig = new DruidProcessingConfig()
{
Expand All @@ -500,27 +487,19 @@ public String getFormatString()
};

final Supplier<GroupByQueryConfig> configSupplier = Suppliers.ofInstance(config);
final GroupByStrategySelector strategySelector = new GroupByStrategySelector(
final GroupingEngine groupingEngine = new GroupingEngine(
druidProcessingConfig,
configSupplier,
new GroupByStrategyV1(
configSupplier,
new GroupByQueryEngine(configSupplier, bufferPool),
QueryBenchmarkUtil.NOOP_QUERYWATCHER
),
new GroupByStrategyV2(
druidProcessingConfig,
configSupplier,
bufferPool,
mergePool,
TestHelper.makeJsonMapper(),
new ObjectMapper(new SmileFactory()),
QueryBenchmarkUtil.NOOP_QUERYWATCHER
)
bufferPool,
mergePool,
TestHelper.makeJsonMapper(),
new ObjectMapper(new SmileFactory()),
QueryBenchmarkUtil.NOOP_QUERYWATCHER
);

factory = new GroupByQueryRunnerFactory(
strategySelector,
new GroupByQueryQueryToolChest(strategySelector)
groupingEngine,
new GroupByQueryQueryToolChest(groupingEngine)
);
}

Expand Down
Loading

0 comments on commit d3c1e76

Please sign in to comment.