Catalog granularity accepts query format (#16680)

Previously, the segment granularity for tables in the catalog had to be defined in period format, ie `'PT1H'` , `'P1D'`, etc. This disallows a user from defining segment granularity of `'ALL'` for a table in the catalog, which may be a valid use case. This change makes it so that a user may define the segment granularity of a table in the catalog, as any string that results in a valid granularity using either the `Granularity.fromString(str)` method, or `new PeriodGranularity(new Period(value), null, null)`, and that granularity maps to a standard supported granularity, where `GranularityType.isStandard(granularity)` returns true. As a result a user may who wants to assign a catalog table's segment granularity to be hourly, may assign the segment granularity property of the table to be either `PT1H`, or `HOUR`. These are the same formats accepted at query time.
This commit is contained in:
zachjsh 2024-07-02 12:14:28 -04:00 committed by GitHub
parent bd49ecfd29
commit 5e05858ff7
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 20 additions and 14 deletions

View File

@ -109,7 +109,7 @@ public abstract class ITCatalogIngestAndQueryTest
{ {
String queryFile = "/catalog/implicitCast_select.sql"; String queryFile = "/catalog/implicitCast_select.sql";
String tableName = "testImplicitCast" + operationName; String tableName = "testImplicitCast" + operationName;
TableMetadata table = TableBuilder.datasource(tableName, "P1D") TableMetadata table = TableBuilder.datasource(tableName, "DAY")
.column(Columns.TIME_COLUMN, Columns.LONG) .column(Columns.TIME_COLUMN, Columns.LONG)
.column("double_col1", "DOUBLE") .column("double_col1", "DOUBLE")
.build(); .build();
@ -179,7 +179,7 @@ public abstract class ITCatalogIngestAndQueryTest
{ {
String queryFile = "/catalog/clustering_select.sql"; String queryFile = "/catalog/clustering_select.sql";
String tableName = "testWithClusteringFromCatalog" + operationName; String tableName = "testWithClusteringFromCatalog" + operationName;
TableMetadata table = TableBuilder.datasource(tableName, "P1D") TableMetadata table = TableBuilder.datasource(tableName, "ALL")
.column(Columns.TIME_COLUMN, Columns.LONG) .column(Columns.TIME_COLUMN, Columns.LONG)
.column("bigint_col1", "BIGINT") .column("bigint_col1", "BIGINT")
.property( .property(

View File

@ -33,6 +33,7 @@ import org.apache.druid.java.util.common.granularity.GranularityType;
import org.apache.druid.java.util.common.granularity.PeriodGranularity; import org.apache.druid.java.util.common.granularity.PeriodGranularity;
import org.joda.time.Period; import org.joda.time.Period;
import javax.annotation.Nonnull;
import javax.annotation.Nullable; import javax.annotation.Nullable;
import java.net.URI; import java.net.URI;
@ -63,17 +64,25 @@ public class CatalogUtils
* For the odd interval, the interval name is also accepted (for the other * For the odd interval, the interval name is also accepted (for the other
* intervals, the interval name is the descriptive string). * intervals, the interval name is the descriptive string).
*/ */
public static Granularity asDruidGranularity(String value) public static Granularity asDruidGranularity(@Nonnull String value)
{ {
if (Strings.isNullOrEmpty(value) || value.equalsIgnoreCase(DatasourceDefn.ALL_GRANULARITY)) { if (value.equalsIgnoreCase(DatasourceDefn.ALL_GRANULARITY)) {
return Granularities.ALL; return Granularities.ALL;
} }
Granularity granularity;
try { try {
return new PeriodGranularity(new Period(value), null, null); granularity = Granularity.fromString(value);
} }
catch (IllegalArgumentException e) { catch (IllegalArgumentException e) {
throw new IAE(StringUtils.format("'%s' is an invalid period string", value)); try {
granularity = new PeriodGranularity(new Period(value), null, null);
}
catch (IllegalArgumentException e2) {
throw new IAE("[%s] is an invalid granularity string.", value);
}
} }
return granularity;
} }
/** /**
@ -275,18 +284,12 @@ public class CatalogUtils
return merged; return merged;
} }
public static void validateGranularity(String value) public static void validateGranularity(final String value)
{ {
if (value == null) { if (value == null) {
return; return;
} }
Granularity granularity; final Granularity granularity = asDruidGranularity(value);
try {
granularity = new PeriodGranularity(new Period(value), null, null);
}
catch (IllegalArgumentException e) {
throw new IAE(StringUtils.format("[%s] is an invalid granularity string", value));
}
if (!GranularityType.isStandard(granularity)) { if (!GranularityType.isStandard(granularity)) {
throw new IAE( throw new IAE(
"Unsupported segment graularity. " "Unsupported segment graularity. "

View File

@ -30,6 +30,8 @@ import org.apache.druid.java.util.common.granularity.Granularity;
import org.apache.druid.java.util.common.logger.Logger; import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.segment.column.ColumnType; import org.apache.druid.segment.column.ColumnType;
import javax.annotation.Nullable;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -122,6 +124,7 @@ public class DatasourceFacade extends TableFacade
return stringProperty(DatasourceDefn.SEGMENT_GRANULARITY_PROPERTY); return stringProperty(DatasourceDefn.SEGMENT_GRANULARITY_PROPERTY);
} }
@Nullable
public Granularity segmentGranularity() public Granularity segmentGranularity()
{ {
String definedGranularity = segmentGranularityString(); String definedGranularity = segmentGranularityString();