Fix StringLastAggregatorFactory equals/toString (#14907)

* update test

* update test

* format

* test

* fix0

* Revert "fix0"

This reverts commit 44992cb393.

* ok resultset

* add plan

* update test

* before rewind

* test

* fix toString/compare/test

* move test

* add timeColumn to hashCode
This commit is contained in:
Zoltan Haindrich 2023-09-08 18:20:54 +02:00 committed by GitHub
parent 647686aee2
commit 699893bcff
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 58 additions and 3 deletions

View File

@ -286,13 +286,14 @@ public class StringLastAggregatorFactory extends AggregatorFactory
StringLastAggregatorFactory that = (StringLastAggregatorFactory) o;
return maxStringBytes == that.maxStringBytes &&
Objects.equals(fieldName, that.fieldName) &&
Objects.equals(timeColumn, that.timeColumn) &&
Objects.equals(name, that.name);
}
@Override
public int hashCode()
{
return Objects.hash(fieldName, name, maxStringBytes);
return Objects.hash(fieldName, name, maxStringBytes, timeColumn);
}
@Override
@ -302,6 +303,7 @@ public class StringLastAggregatorFactory extends AggregatorFactory
"fieldName='" + fieldName + '\'' +
", name='" + name + '\'' +
", maxStringBytes=" + maxStringBytes +
", timeColumn=" + timeColumn +
'}';
}
}

View File

@ -24,6 +24,8 @@ import org.apache.druid.common.config.NullHandling;
import org.apache.druid.java.util.common.Intervals;
import org.apache.druid.java.util.common.granularity.Granularities;
import org.apache.druid.query.aggregation.LongSumAggregatorFactory;
import org.apache.druid.query.aggregation.first.StringFirstAggregatorFactory;
import org.apache.druid.query.aggregation.last.StringLastAggregatorFactory;
import org.apache.druid.query.dimension.DefaultDimensionSpec;
import org.apache.druid.query.filter.LikeDimFilter;
import org.apache.druid.query.groupby.GroupByQuery;
@ -626,4 +628,55 @@ public class CalciteSimpleQueryTest extends BaseCalciteQueryTest
)
);
}
@Test
public void testEarliestByLatestByWithExpression()
{
cannotVectorize();
testBuilder()
.sql("SELECT\n"
+ " channel\n"
+ " ,cityName\n"
+ " ,EARLIEST_BY(\"cityName\", MILLIS_TO_TIMESTAMP(17), 125) as latest_by_time_page\n"
+ " ,LATEST_BY(\"cityName\", MILLIS_TO_TIMESTAMP(17), 126) as latest_by_time_page\n"
+ " ,EARLIEST_BY(\"cityName\", TIMESTAMPADD(HOUR, 1, \"__time\"), 127) as latest_by_time_page\n"
+ " ,LATEST_BY(\"cityName\", TIMESTAMPADD(HOUR, 1, \"__time\"), 128) as latest_by_time_page\n"
+ "FROM druid.wikipedia\n"
+ "where channel < '#b' and cityName < 'B'\n"
+ "GROUP BY 1,2"
)
.expectedQueries(
ImmutableList.of(
GroupByQuery.builder()
.setDataSource("wikipedia")
.setInterval(querySegmentSpec(Filtration.eternity()))
.setGranularity(Granularities.ALL)
.setVirtualColumns(
expressionVirtualColumn("v0", "17", ColumnType.LONG),
expressionVirtualColumn("v1", "(\"__time\" + 3600000)", ColumnType.LONG)
)
.setDimensions(dimensions(new DefaultDimensionSpec("channel", "d0"),
new DefaultDimensionSpec("cityName", "d1")))
.setDimFilter(
and(range("channel", ColumnType.STRING, null, "#b", false, true),
range("cityName", ColumnType.STRING, null, "B", false, true)))
.setAggregatorSpecs(
ImmutableList.of(
new StringFirstAggregatorFactory("a0", "cityName", "v0", 125),
new StringLastAggregatorFactory("a1", "cityName", "v0", 126),
new StringFirstAggregatorFactory("a2", "cityName", "v1", 127),
new StringLastAggregatorFactory("a3", "cityName", "v1", 128)
)
)
.setContext(QUERY_CONTEXT_DEFAULT)
.build()))
.expectedResults(
useDefault ? ImmutableList.of(
new Object[]{"#ar.wikipedia", "", "", "", "", ""},
new Object[] {"#ar.wikipedia", "Amman", "Amman", "Amman", "Amman", "Amman"})
: ImmutableList.of(
new Object[] {"#ar.wikipedia", "Amman", "Amman", "Amman", "Amman", "Amman"})
)
.run();
}
}