Add @ExtensionPoint and @PublicApi annotations. (#4433)

* Add @ExtensionPoint and @PublicApi annotations.

* Clean up wording.

* Remove unused import.

* Remove unused imports.

* Only types can be extension points.

* Adjust annotations some more.

* Remove unused import.

* Make ServletFilterHolder an extension point.

* Add a couple extension points, and update docs.
This commit is contained in:
Gian Merlino 2017-08-28 14:50:58 -07:00 committed by GitHub
parent b04261e7a2
commit 9fbfc1be32
104 changed files with 326 additions and 73 deletions

View File

@ -18,15 +18,19 @@
*/
package io.druid.data.input;
import io.druid.guice.annotations.ExtensionPoint;
/**
* Committer includes a Runnable and a Jackson-serialized metadata object containing the offset
*/
@ExtensionPoint
public interface Committer extends Runnable
{
/**
* @return A json serialized representation of commit metadata,
* which needs to be serialized and deserialized by Jackson.
* Commit metadata can be a complex type, but we recommend keeping it to List/Map/"Primitive JSON" types
* */
public Object getMetadata();
/**
* @return A json serialized representation of commit metadata,
* which needs to be serialized and deserialized by Jackson.
* Commit metadata can be a complex type, but we recommend keeping it to List/Map/"Primitive JSON" types
*/
public Object getMetadata();
}

View File

@ -19,6 +19,8 @@
package io.druid.data.input;
import io.druid.guice.annotations.ExtensionPoint;
import javax.annotation.Nullable;
import java.io.Closeable;
@ -36,6 +38,7 @@ import java.io.Closeable;
* which will be called on another thread, so the operations inside of that callback must be thread-safe.
* </p>
*/
@ExtensionPoint
public interface Firehose extends Closeable
{
/**

View File

@ -22,6 +22,7 @@ package io.druid.data.input;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import io.druid.data.input.impl.InputRowParser;
import io.druid.data.input.impl.PrefetchableTextFilesFirehoseFactory;
import io.druid.guice.annotations.ExtensionPoint;
import io.druid.java.util.common.parsers.ParseException;
import java.io.File;
@ -32,6 +33,7 @@ import java.io.IOException;
* It currently provides two methods for creating a {@link Firehose} and their default implementations call each other
* for the backward compatibility. Implementations of this interface must implement one of these methods.
*/
@ExtensionPoint
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type")
public interface FirehoseFactory<T extends InputRowParser>
{

View File

@ -20,8 +20,8 @@
package io.druid.data.input;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import io.druid.data.input.impl.InputRowParser;
import io.druid.guice.annotations.ExtensionPoint;
import io.druid.java.util.common.parsers.ParseException;
import java.io.IOException;
@ -37,6 +37,7 @@ import java.io.IOException;
* value will throw a surprising NPE. Throwing IOException on connection failure or runtime exception on
* invalid configuration is preferred over returning null.
*/
@ExtensionPoint
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type")
public interface FirehoseFactoryV2<T extends InputRowParser>
{

View File

@ -19,6 +19,8 @@
package io.druid.data.input;
import io.druid.guice.annotations.ExtensionPoint;
import java.io.Closeable;
/**
* This is an interface that holds onto the stream of incoming data. Realtime data ingestion is built around this
@ -44,6 +46,7 @@ import java.io.Closeable;
* which will be called on another thread, so the operations inside of that callback must be thread-safe.
* </p>
*/
@ExtensionPoint
public interface FirehoseV2 extends Closeable
{
/**

View File

@ -19,6 +19,8 @@
package io.druid.data.input;
import io.druid.guice.annotations.ExtensionPoint;
import java.util.List;
/**
@ -28,8 +30,8 @@ import java.util.List;
* implement "schema-less" data ingestion that allows the system to add new dimensions as they appear.
*
*/
public interface
InputRow extends Row
@ExtensionPoint
public interface InputRow extends Row
{
/**
* Returns the dimensions that exist in this row.

View File

@ -19,6 +19,7 @@
package io.druid.data.input;
import io.druid.guice.annotations.PublicApi;
import io.druid.java.util.common.DateTimes;
import org.joda.time.DateTime;
@ -27,6 +28,7 @@ import java.util.Map;
/**
*/
@PublicApi
public class MapBasedInputRow extends MapBasedRow implements InputRow
{
private final List<String> dimensions;

View File

@ -22,6 +22,7 @@ package io.druid.data.input;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.collect.Lists;
import io.druid.guice.annotations.PublicApi;
import io.druid.java.util.common.DateTimes;
import io.druid.java.util.common.parsers.ParseException;
import org.joda.time.DateTime;
@ -33,6 +34,7 @@ import java.util.regex.Pattern;
/**
*/
@PublicApi
public class MapBasedRow implements Row
{
private static final Pattern LONG_PAT = Pattern.compile("[-|+]?\\d+");

View File

@ -21,6 +21,7 @@ package io.druid.data.input;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import io.druid.guice.annotations.PublicApi;
import org.joda.time.DateTime;
import java.util.List;
@ -29,6 +30,7 @@ import java.util.List;
* A Row of data. This can be used for both input and output into various parts of the system. It assumes
* that the user already knows the schema of the row and can query for the parts that they care about.
*/
@PublicApi
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "version", defaultImpl = MapBasedRow.class)
@JsonSubTypes(value = {
@JsonSubTypes.Type(name = "v1", value = MapBasedRow.class)

View File

@ -23,33 +23,14 @@ import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Maps;
import io.druid.java.util.common.ISE;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
/**
*/
public class Rows
{
public static InputRow toCaseInsensitiveInputRow(final Row row, final List<String> dimensions)
{
if (row instanceof MapBasedRow) {
MapBasedRow mapBasedRow = (MapBasedRow) row;
TreeMap<String, Object> caseInsensitiveMap = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
caseInsensitiveMap.putAll(mapBasedRow.getEvent());
return new MapBasedInputRow(
mapBasedRow.getTimestamp(),
dimensions,
caseInsensitiveMap
);
}
throw new ISE("Can only convert MapBasedRow objects because we are ghetto like that.");
}
/**
* @param timeStamp rollup up timestamp to be used to create group key
* @param inputRow input row

View File

@ -26,10 +26,12 @@ import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.annotation.JsonValue;
import com.google.common.base.Preconditions;
import io.druid.guice.annotations.PublicApi;
import io.druid.java.util.common.StringUtils;
/**
*/
@PublicApi
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = StringDimensionSchema.class)
@JsonSubTypes(value = {
@JsonSubTypes.Type(name = DimensionSchema.STRING_TYPE_NAME, value = StringDimensionSchema.class),

View File

@ -28,7 +28,7 @@ import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import io.druid.guice.annotations.PublicApi;
import io.druid.java.util.common.parsers.ParserUtils;
import javax.annotation.Nullable;
@ -37,7 +37,7 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
@PublicApi
public class DimensionsSpec
{
private final List<DimensionSchema> dimensions;

View File

@ -22,7 +22,9 @@ package io.druid.data.input.impl;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import io.druid.data.input.InputRow;
import io.druid.guice.annotations.ExtensionPoint;
@ExtensionPoint
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = StringInputRowParser.class)
@JsonSubTypes(value = {
@JsonSubTypes.Type(name = "string", value = StringInputRowParser.class),

View File

@ -22,7 +22,6 @@ package io.druid.data.input.impl;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.druid.java.util.common.parsers.JSONToLowerParser;
import io.druid.java.util.common.parsers.Parser;

View File

@ -23,7 +23,6 @@ import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonParser.Feature;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.druid.java.util.common.parsers.JSONPathParser;
import io.druid.java.util.common.parsers.Parser;

View File

@ -22,7 +22,6 @@ package io.druid.data.input.impl;
import com.fasterxml.jackson.annotation.JacksonInject;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import io.druid.java.util.common.ISE;
import io.druid.java.util.common.parsers.JavaScriptParser;
import io.druid.java.util.common.parsers.Parser;

View File

@ -22,13 +22,12 @@ package io.druid.data.input.impl;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import io.druid.guice.annotations.ExtensionPoint;
import io.druid.java.util.common.parsers.Parser;
import java.util.List;
/**
*/
@ExtensionPoint
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "format", defaultImpl = DelimitedParseSpec.class)
@JsonSubTypes(value = {
@JsonSubTypes.Type(name = "json", value = JSONParseSpec.class),

View File

@ -23,7 +23,6 @@ import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import io.druid.java.util.common.parsers.Parser;
import io.druid.java.util.common.parsers.RegexParser;

View File

@ -22,6 +22,7 @@ package io.druid.data.input.impl;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Function;
import io.druid.guice.annotations.PublicApi;
import io.druid.java.util.common.parsers.TimestampParser;
import org.joda.time.DateTime;
@ -31,6 +32,7 @@ import java.util.Objects;
/**
*/
@PublicApi
public class TimestampSpec
{
private static class ParseCtx

View File

@ -22,16 +22,18 @@ package io.druid.guice;
import com.google.inject.Binder;
import com.google.inject.Key;
import com.google.inject.multibindings.MapBinder;
import io.druid.guice.annotations.PublicApi;
import io.druid.segment.loading.DataSegmentArchiver;
import io.druid.segment.loading.DataSegmentFinder;
import io.druid.segment.loading.DataSegmentMover;
import io.druid.segment.loading.DataSegmentKiller;
import io.druid.segment.loading.DataSegmentMover;
import io.druid.segment.loading.DataSegmentPuller;
import io.druid.segment.loading.DataSegmentPusher;
import io.druid.tasklogs.TaskLogs;
/**
*/
@PublicApi
public class Binders
{
public static MapBinder<String, DataSegmentPuller> dataSegmentPullerBinder(Binder binder)

View File

@ -23,6 +23,7 @@ import com.google.common.base.Predicate;
import com.google.inject.Binder;
import com.google.inject.TypeLiteral;
import com.google.inject.multibindings.Multibinder;
import io.druid.guice.annotations.PublicApi;
import java.lang.annotation.Annotation;
import java.util.Properties;
@ -43,6 +44,7 @@ import java.util.Properties;
* At injection time, you will get the items that satisfy their corresponding predicates by calling
* injector.getInstance(Key.get(new TypeLiteral<Set<Animal>>(){}))
*/
@PublicApi
public class ConditionalMultibind<T>
{

View File

@ -21,9 +21,11 @@ package io.druid.guice;
import com.google.inject.Binder;
import com.google.inject.Module;
import io.druid.guice.annotations.PublicApi;
/**
*/
@PublicApi
public class DruidGuiceExtensions implements Module
{
@Override

View File

@ -23,9 +23,11 @@ import com.google.inject.Key;
import com.google.inject.Provider;
import com.google.inject.Scope;
import com.google.inject.Scopes;
import io.druid.guice.annotations.PublicApi;
/**
*/
@PublicApi
public class DruidScopes
{
public static final Scope SINGLETON = new Scope()

View File

@ -23,9 +23,11 @@ import com.google.inject.Binder;
import com.google.inject.TypeLiteral;
import com.google.inject.multibindings.Multibinder;
import io.druid.guice.annotations.JSR311Resource;
import io.druid.guice.annotations.PublicApi;
/**
*/
@PublicApi
public class Jerseys
{
public static void addResource(Binder binder, Class<?> resourceClazz)

View File

@ -26,6 +26,7 @@ import com.google.inject.Inject;
import com.google.inject.Key;
import com.google.inject.Provider;
import com.google.inject.util.Types;
import io.druid.guice.annotations.PublicApi;
import java.lang.annotation.Annotation;
import java.lang.reflect.ParameterizedType;
@ -76,6 +77,7 @@ import java.util.Properties;
*
* @param <T> type of config object to provide.
*/
@PublicApi
public class JsonConfigProvider<T> implements Provider<Supplier<T>>
{
@SuppressWarnings("unchecked")

View File

@ -20,6 +20,7 @@
package io.druid.guice;
import com.google.inject.ScopeAnnotation;
import io.druid.guice.annotations.PublicApi;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
@ -31,6 +32,7 @@ import java.lang.annotation.Target;
@Target({ElementType.TYPE, ElementType.METHOD})
@Retention(RetentionPolicy.RUNTIME)
@ScopeAnnotation
@PublicApi
public @interface LazySingleton
{
}

View File

@ -27,7 +27,6 @@ import com.google.inject.Provides;
import com.google.inject.TypeLiteral;
import com.google.inject.multibindings.Multibinder;
import com.google.inject.name.Names;
import io.druid.java.util.common.lifecycle.Lifecycle;
import java.lang.annotation.Annotation;

View File

@ -23,7 +23,6 @@ import com.google.common.collect.Lists;
import com.google.inject.Key;
import com.google.inject.Provider;
import com.google.inject.Scope;
import io.druid.java.util.common.lifecycle.Lifecycle;
import io.druid.java.util.common.logger.Logger;

View File

@ -20,6 +20,7 @@
package io.druid.guice;
import com.google.inject.ScopeAnnotation;
import io.druid.guice.annotations.PublicApi;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
@ -34,6 +35,7 @@ import java.lang.annotation.Target;
@Target({ ElementType.TYPE, ElementType.METHOD })
@Retention(RetentionPolicy.RUNTIME)
@ScopeAnnotation
@PublicApi
public @interface ManageLifecycle
{
}

View File

@ -20,6 +20,7 @@
package io.druid.guice;
import com.google.inject.ScopeAnnotation;
import io.druid.guice.annotations.PublicApi;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
@ -34,6 +35,7 @@ import java.lang.annotation.Target;
@Target({ ElementType.TYPE, ElementType.METHOD })
@Retention(RetentionPolicy.RUNTIME)
@ScopeAnnotation
@PublicApi
public @interface ManageLifecycleLast
{
}

View File

@ -30,6 +30,7 @@ import com.google.inject.TypeLiteral;
import com.google.inject.binder.ScopedBindingBuilder;
import com.google.inject.multibindings.MapBinder;
import com.google.inject.util.Types;
import io.druid.guice.annotations.PublicApi;
import io.druid.java.util.common.StringUtils;
import javax.annotation.Nullable;
@ -45,6 +46,7 @@ import java.util.Properties;
* returned by the optionBinder() method. Multiple different modules can call optionBinder and all options will be
* reflected at injection time as long as equivalent interface Key objects are passed into the various methods.
*/
@PublicApi
public class PolyBind
{
/**

View File

@ -0,0 +1,50 @@
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.guice.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Signifies that the annotated type is an extension point. Extension points are interfaces or non-final classes that
* may be subclassed in extensions in order to add functionality to Druid. Extension points may change in breaking ways
* only between major Druid release lines (e.g. 0.10.x -> 0.11.0), but otherwise must remain stable. Extension points
* may change at any time in non-breaking ways, however, such as by adding new default methods to an interface.
*
* All public and protected fields, methods, and constructors of annotated classes and interfaces are considered
* stable in this sense. If a class is not annotated, but an individual field, method, or constructor is
* annotated, then only that particular field, method, or constructor is considered an extension API.
*
* Extension points are all considered public APIs in the sense of {@link PublicApi}, even if not explicitly annotated
* as such.
*
* Note that there are number of injectable interfaces that are not annotated with {@code ExtensionPoint}. You may
* still extend these interfaces in extensions, but your extension may need to be recompiled even for a minor
* update of Druid.
*
* @see PublicApi
*/
@Target({ElementType.TYPE})
@Retention(RetentionPolicy.SOURCE)
public @interface ExtensionPoint
{
}

View File

@ -31,6 +31,7 @@ import java.lang.annotation.Target;
@BindingAnnotation
@Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD})
@Retention(RetentionPolicy.RUNTIME)
@PublicApi
public @interface Global
{
}

View File

@ -31,6 +31,7 @@ import java.lang.annotation.Target;
@BindingAnnotation
@Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD})
@Retention(RetentionPolicy.RUNTIME)
@PublicApi
public @interface JSR311Resource
{
}

View File

@ -31,6 +31,7 @@ import java.lang.annotation.Target;
@Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD})
@Retention(RetentionPolicy.RUNTIME)
@BindingAnnotation
@PublicApi
public @interface Json
{
}

View File

@ -0,0 +1,51 @@
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.guice.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Signifies that the annotated entity is a public API for extension authors. Public APIs may change in breaking ways
* only between major Druid release lines (e.g. 0.10.x -> 0.11.0), but otherwise must remain stable. Public APIs may
* change at any time in non-breaking ways, however, such as by adding new fields, methods, or constructors.
*
* Note that interfaces annotated with {@code PublicApi} but not with {@link ExtensionPoint} are not meant to be
* subclassed in extensions. In this case, the annotation simply signifies that the interface is stable for callers.
* In particular, since it is not meant to be subclassed, new non-default methods may be added to an interface and
* new abstract methods may be added to a class.
*
* If a class or interface is annotated, then all public and protected fields, methods, and constructors that class
* or interface are considered stable in this sense. If a class is not annotated, but an individual field, method, or
* constructor is annotated, then only that particular field, method, or constructor is considered a public API.
*
* Classes, fields, method, and constructors _not_ annotated with {@code @PublicApi} may be modified or removed
* in any Druid release, unless they are annotated with {@link ExtensionPoint} (which implies they are a public API
* as well).
*
* @see ExtensionPoint
*/
@Target({ElementType.TYPE, ElementType.FIELD, ElementType.METHOD, ElementType.CONSTRUCTOR})
@Retention(RetentionPolicy.SOURCE)
public @interface PublicApi
{
}

View File

@ -31,6 +31,7 @@ import java.lang.annotation.Target;
@Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD})
@Retention(RetentionPolicy.RUNTIME)
@BindingAnnotation
@PublicApi
public @interface Self
{
}

View File

@ -31,6 +31,7 @@ import java.lang.annotation.Target;
@Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD})
@Retention(RetentionPolicy.RUNTIME)
@BindingAnnotation
@PublicApi
public @interface Smile
{
}

View File

@ -20,11 +20,13 @@
package io.druid.initialization;
import com.fasterxml.jackson.databind.Module;
import io.druid.guice.annotations.ExtensionPoint;
import java.util.List;
/**
*/
@ExtensionPoint
public interface DruidModule extends com.google.inject.Module
{
public List<? extends Module> getJacksonModules();

View File

@ -21,7 +21,13 @@ package io.druid.js;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import io.druid.guice.annotations.PublicApi;
/**
* Should be used by extension filters, aggregators, etc, that use JavaScript to determine if JavaScript is enabled
* or not.
*/
@PublicApi
public class JavaScriptConfig
{
public static final int DEFAULT_OPTIMIZATION_LEVEL = 9;

View File

@ -21,6 +21,7 @@ package io.druid.segment;
import com.google.common.io.Files;
import com.google.common.primitives.Ints;
import io.druid.guice.annotations.PublicApi;
import io.druid.java.util.common.IOE;
import java.io.File;
@ -29,7 +30,9 @@ import java.io.IOException;
import java.io.InputStream;
/**
* Utility methods useful for implementing deep storage extensions.
*/
@PublicApi
public class SegmentUtils
{
public static int getVersionFromDir(File inDir) throws IOException

View File

@ -19,10 +19,12 @@
package io.druid.segment.loading;
import io.druid.guice.annotations.ExtensionPoint;
import io.druid.timeline.DataSegment;
import javax.annotation.Nullable;
@ExtensionPoint
public interface DataSegmentArchiver
{
/**

View File

@ -19,6 +19,7 @@
package io.druid.segment.loading;
import io.druid.guice.annotations.ExtensionPoint;
import io.druid.timeline.DataSegment;
import java.util.Set;
@ -27,6 +28,7 @@ import java.util.Set;
* A DataSegmentFinder is responsible for finding Druid segments underneath a specified directory and optionally updates
* all descriptor.json files on deep storage with correct loadSpec.
*/
@ExtensionPoint
public interface DataSegmentFinder
{
/**

View File

@ -19,12 +19,14 @@
package io.druid.segment.loading;
import io.druid.guice.annotations.ExtensionPoint;
import io.druid.timeline.DataSegment;
import java.io.IOException;
/**
*/
@ExtensionPoint
public interface DataSegmentKiller
{
void kill(DataSegment segments) throws SegmentLoadingException;

View File

@ -19,10 +19,12 @@
package io.druid.segment.loading;
import io.druid.guice.annotations.ExtensionPoint;
import io.druid.timeline.DataSegment;
import java.util.Map;
@ExtensionPoint
public interface DataSegmentMover
{
public DataSegment move(DataSegment segment, Map<String, Object> targetLoadSpec) throws SegmentLoadingException;

View File

@ -19,6 +19,7 @@
package io.druid.segment.loading;
import io.druid.guice.annotations.ExtensionPoint;
import io.druid.timeline.DataSegment;
import java.io.File;
@ -26,6 +27,7 @@ import java.io.File;
/**
* A DataSegmentPuller is responsible for pulling data for a particular segment into a particular directory
*/
@ExtensionPoint
public interface DataSegmentPuller
{
/**

View File

@ -20,6 +20,7 @@
package io.druid.segment.loading;
import com.google.common.base.Joiner;
import io.druid.guice.annotations.ExtensionPoint;
import io.druid.java.util.common.StringUtils;
import io.druid.timeline.DataSegment;
@ -30,6 +31,7 @@ import java.util.Collections;
import java.util.List;
import java.util.Map;
@ExtensionPoint
public interface DataSegmentPusher
{
Joiner JOINER = Joiner.on("/").skipNulls();

View File

@ -20,12 +20,14 @@
package io.druid.segment.loading;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import io.druid.guice.annotations.ExtensionPoint;
import java.io.File;
/**
* A means of pulling segment files into a destination directory
*/
@ExtensionPoint
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type")
public interface LoadSpec
{

View File

@ -19,10 +19,12 @@
package io.druid.segment.loading;
import io.druid.guice.annotations.PublicApi;
import io.druid.java.util.common.StringUtils;
/**
*/
@PublicApi
public class SegmentLoadingException extends Exception
{
public SegmentLoadingException(

View File

@ -20,6 +20,7 @@
package io.druid.segment.loading;
import com.google.common.base.Predicate;
import io.druid.guice.annotations.ExtensionPoint;
import java.io.IOException;
import java.io.InputStream;
@ -28,6 +29,7 @@ import java.net.URI;
/**
* A URIDataPuller has handlings for URI based data
*/
@ExtensionPoint
public interface URIDataPuller
{
/**

View File

@ -21,7 +21,6 @@ package io.druid.tasklogs;
import com.google.common.base.Optional;
import com.google.common.io.ByteSource;
import io.druid.java.util.common.logger.Logger;
import java.io.File;

View File

@ -19,10 +19,13 @@
package io.druid.tasklogs;
import io.druid.guice.annotations.ExtensionPoint;
import java.io.IOException;
/**
*/
@ExtensionPoint
public interface TaskLogKiller
{
void killAll() throws IOException;

View File

@ -19,12 +19,15 @@
package io.druid.tasklogs;
import io.druid.guice.annotations.ExtensionPoint;
import java.io.File;
import java.io.IOException;
/**
* Something that knows how to persist local task logs to some form of long-term storage.
*/
@ExtensionPoint
public interface TaskLogPusher
{
public void pushTaskLog(String taskid, File logFile) throws IOException;

View File

@ -21,12 +21,14 @@ package io.druid.tasklogs;
import com.google.common.base.Optional;
import com.google.common.io.ByteSource;
import io.druid.guice.annotations.ExtensionPoint;
import java.io.IOException;
/**
* Something that knows how to stream logs for tasks.
*/
@ExtensionPoint
public interface TaskLogStreamer
{
/**

View File

@ -19,6 +19,9 @@
package io.druid.tasklogs;
import io.druid.guice.annotations.ExtensionPoint;
@ExtensionPoint
public interface TaskLogs extends TaskLogStreamer, TaskLogPusher, TaskLogKiller
{
}

View File

@ -31,6 +31,7 @@ import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Interner;
import com.google.common.collect.Interners;
import com.google.common.collect.Iterables;
import io.druid.guice.annotations.PublicApi;
import io.druid.jackson.CommaListJoinDeserializer;
import io.druid.jackson.CommaListJoinSerializer;
import io.druid.java.util.common.granularity.Granularities;
@ -46,6 +47,7 @@ import java.util.Map;
/**
*/
@PublicApi
public class DataSegment implements Comparable<DataSegment>
{
public static String delimiter = "_";

View File

@ -20,6 +20,7 @@
package io.druid.timeline;
import com.google.common.base.Function;
import io.druid.guice.annotations.PublicApi;
import io.druid.java.util.common.IAE;
import io.druid.java.util.common.StringUtils;
import io.druid.java.util.common.logger.Logger;
@ -33,6 +34,7 @@ import java.util.Objects;
/**
* identifier to DataSegment.
*/
@PublicApi
public class DataSegmentUtils
{
private static final Logger LOGGER = new Logger(DataSegmentUtils.class);

View File

@ -28,7 +28,8 @@ import java.util.List;
import java.util.Map;
/**
* A Marker interface that exists to combine ShardSpec objects together for Jackson
* A Marker interface that exists to combine ShardSpec objects together for Jackson. Note that this is not an
* extension API. Extensions are not expected to create new kinds of ShardSpecs.
*/
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type")
@JsonSubTypes({

View File

@ -20,15 +20,17 @@
package io.druid.utils;
import io.druid.guice.annotations.PublicApi;
import io.druid.java.util.common.logger.Logger;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import io.druid.java.util.common.logger.Logger;
/**
*/
@PublicApi
public class CompressionUtils
{
private static final Logger log = new Logger(CompressionUtils.class);

View File

@ -19,8 +19,11 @@
package io.druid.utils;
import io.druid.guice.annotations.PublicApi;
/**
*/
@PublicApi
public class Runnables
{
public static Runnable getNoopRunnable()

View File

@ -20,7 +20,6 @@
package io.druid.data.input.impl;
import com.google.common.collect.Lists;
import io.druid.java.util.common.parsers.Parser;
import junit.framework.Assert;
import org.junit.Test;

View File

@ -20,9 +20,7 @@
package io.druid.data.input.impl;
import com.google.common.collect.Lists;
import io.druid.java.util.common.parsers.ParseException;
import org.junit.Test;
import java.util.Arrays;

View File

@ -21,11 +21,13 @@ package io.druid.metadata;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import io.druid.guice.annotations.ExtensionPoint;
/**
* Implement this for different ways to (optionally securely) access secrets.
*/
@ExtensionPoint
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = DefaultPasswordProvider.class)
@JsonSubTypes(value = {
@JsonSubTypes.Type(name = "default", value = DefaultPasswordProvider.class),

View File

@ -19,8 +19,10 @@
package io.druid.timeline;
import io.druid.guice.annotations.PublicApi;
import org.joda.time.Interval;
@PublicApi
public interface LogicalSegment
{
public Interval getInterval();

View File

@ -10,12 +10,21 @@ Druid uses a module system that allows for the addition of extensions at runtime
Druid's extensions leverage Guice in order to add things at runtime. Basically, Guice is a framework for Dependency Injection, but we use it to hold the expected object graph of the Druid process. Extensions can make any changes they want/need to the object graph via adding Guice bindings. While the extensions actually give you the capability to change almost anything however you want, in general, we expect people to want to extend one of the things listed below. This means that we honor our [versioning strategy](./versioning.html) for changes that affect the interfaces called out on this page, but other interfaces are deemed "internal" and can be changed in an incompatible manner even between patch releases.
1. Add a new deep storage implementation
1. Add a new Firehose
1. Add Aggregators
1. Add Complex metrics
1. Add new Query types
1. Add new Jersey resources
1. Add a new deep storage implementation by extending the `io.druid.segment.loading.DataSegment*` and
`io.druid.tasklogs.TaskLog*` classes.
1. Add a new Firehose by extending `io.druid.data.input.FirehoseFactory`.
1. Add a new input parser by extending `io.druid.data.input.impl.InputRowParser`.
1. Add a new string-based input format by extending `io.druid.data.input.impl.ParseSpec`.
1. Add Aggregators by extending `io.druid.query.aggregation.AggregatorFactory`, `io.druid.query.aggregation.Aggregator`,
and `io.druid.query.aggregation.BufferAggregator`.
1. Add PostAggregators by extending `io.druid.query.aggregation.PostAggregator`.
1. Add ExtractionFns by extending `io.druid.query.extraction.ExtractionFn`.
1. Add Complex metrics by extending `io.druid.segment.serde.ComplexMetricsSerde`.
1. Add new Query types by extending `io.druid.query.QueryRunnerFactory`, `io.druid.query.QueryToolChest`, and
`io.druid.query.Query`.
1. Add new Jersey resources by calling `Jerseys.addResource(binder, clazz)`.
1. Add new Jetty filters by extending `io.druid.server.initialization.jetty.ServletFilterHolder`.
1. Add new secret providers by extending `io.druid.metadata.PasswordProvider`.
1. Bundle your extension with all the other Druid extensions
Extensions are added to the system via an implementation of `io.druid.initialization.DruidModule`.

View File

@ -24,6 +24,7 @@ import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import com.google.common.collect.Ordering;
import io.druid.guice.annotations.ExtensionPoint;
import io.druid.query.spec.QuerySegmentSpec;
import org.joda.time.Duration;
import org.joda.time.Interval;
@ -33,6 +34,7 @@ import java.util.Map;
/**
*/
@ExtensionPoint
public abstract class BaseQuery<T extends Comparable<T>> implements Query<T>
{
public static void checkInterrupted()

View File

@ -19,12 +19,14 @@
package io.druid.query;
import io.druid.guice.annotations.PublicApi;
import org.joda.time.Interval;
import java.util.List;
/**
*/
@PublicApi
public interface BySegmentResultValue<T>
{
public List<T> getResults();

View File

@ -21,11 +21,13 @@ package io.druid.query;
import com.fasterxml.jackson.core.type.TypeReference;
import com.google.common.base.Function;
import io.druid.guice.annotations.ExtensionPoint;
import java.util.concurrent.ExecutorService;
/**
*/
@ExtensionPoint
public interface CacheStrategy<T, CacheType, QueryType extends Query<T>>
{
/**

View File

@ -19,6 +19,8 @@
package io.druid.query;
import io.druid.guice.annotations.PublicApi;
/**
* This factory is used for DI of custom {@link QueryMetrics} implementations for all query types, which don't (yet)
* need to emit custom dimensions and/or metrics, i. e. they are good with the generic {@link QueryMetrics} interface.
@ -32,7 +34,11 @@ package io.druid.query;
*
* And then setting property:
* druid.query.generic.queryMetricsFactory=myCustomGenericQueryMetricsFactory
*
* Unlike {@link QueryMetrics} itself, this interface is considered stable and is expected to be injected into custom
* Query extensions that do not want to worry about the potential instability of {@link QueryMetrics}.
*/
@PublicApi
public interface GenericQueryMetricsFactory
{
/**

View File

@ -22,6 +22,7 @@ package io.druid.query;
import com.google.inject.Inject;
import com.metamx.emitter.service.ServiceEmitter;
import io.druid.guice.annotations.Processing;
import io.druid.guice.annotations.PublicApi;
import java.util.concurrent.ExecutorService;
@ -40,6 +41,7 @@ public class IntervalChunkingQueryRunnerDecorator
this.emitter = emitter;
}
@PublicApi
public <T> QueryRunner<T> decorate(QueryRunner<T> delegate, QueryToolChest<T, ? extends Query<T>> toolChest)
{
return new IntervalChunkingQueryRunner<T>(delegate, (QueryToolChest<T, Query<T>>) toolChest,

View File

@ -22,6 +22,7 @@ package io.druid.query;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import io.druid.guice.annotations.PublicApi;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.PostAggregator;
@ -34,6 +35,7 @@ import java.util.Set;
/**
*/
@PublicApi
public class Queries
{
public static List<PostAggregator> decoratePostAggregators(

View File

@ -22,6 +22,7 @@ package io.druid.query;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.google.common.collect.Ordering;
import io.druid.guice.annotations.ExtensionPoint;
import io.druid.query.datasourcemetadata.DataSourceMetadataQuery;
import io.druid.query.filter.DimFilter;
import io.druid.query.groupby.GroupByQuery;
@ -38,6 +39,7 @@ import org.joda.time.Interval;
import java.util.List;
import java.util.Map;
@ExtensionPoint
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "queryType")
@JsonSubTypes(value = {
@JsonSubTypes.Type(name = Query.TIMESERIES, value = TimeseriesQuery.class),

View File

@ -21,9 +21,11 @@ package io.druid.query;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import io.druid.guice.annotations.PublicApi;
import io.druid.java.util.common.IAE;
import io.druid.java.util.common.ISE;
@PublicApi
public class QueryContexts
{
public static final String PRIORITY_KEY = "priority";

View File

@ -78,6 +78,9 @@ import java.util.List;
* dimension or metric is useful and not very expensive to process and store then emit, skip (see above Goals, 1.)
* otherwise.
*
* <p>This interface can be extended, but is not marked as an {@code ExtensionPoint}, because it may change in breaking
* ways even in minor releases.
*
* <p>If implementors of custom QueryMetrics don't want to fix builds on every Druid release (e. g. if they want to add
* a single dimension to emitted events and don't want to alter other dimensions and emitted metrics), they could
* inherit their custom QueryMetrics from {@link DefaultQueryMetrics} or query-specific default implementation class,

View File

@ -20,6 +20,7 @@
package io.druid.query;
import com.google.common.base.Preconditions;
import io.druid.guice.annotations.PublicApi;
import io.druid.java.util.common.guava.Sequence;
import io.druid.query.spec.QuerySegmentSpec;
@ -30,6 +31,7 @@ import java.util.Map;
* An immutable composite object of {@link Query} + extra stuff needed in {@link QueryRunner}s. This "extra stuff"
* is only {@link QueryMetrics} yet.
*/
@PublicApi
public final class QueryPlus<T>
{
/**

View File

@ -19,10 +19,12 @@
package io.druid.query;
import io.druid.guice.annotations.ExtensionPoint;
import io.druid.java.util.common.guava.Sequence;
import java.util.Map;
@ExtensionPoint
public interface QueryRunner<T>
{
/**

View File

@ -19,6 +19,7 @@
package io.druid.query;
import io.druid.guice.annotations.ExtensionPoint;
import io.druid.segment.Segment;
import java.util.concurrent.ExecutorService;
@ -26,6 +27,7 @@ import java.util.concurrent.ExecutorService;
/**
* An interface that defines the nitty gritty implementation detauls of a Query on a Segment
*/
@ExtensionPoint
public interface QueryRunnerFactory<T, QueryType extends Query<T>>
{
/**

View File

@ -21,6 +21,7 @@ package io.druid.query;
import com.fasterxml.jackson.core.type.TypeReference;
import com.google.common.base.Function;
import io.druid.guice.annotations.ExtensionPoint;
import io.druid.query.aggregation.MetricManipulationFn;
import io.druid.timeline.LogicalSegment;
@ -28,10 +29,9 @@ import javax.annotation.Nullable;
import java.util.List;
/**
* The broker-side (also used by server in some cases) API for a specific Query type. This API is still undergoing
* evolution and is only semi-stable, so proprietary Query implementations should be ready for the potential
* maintenance burden when upgrading versions.
* The broker-side (also used by server in some cases) API for a specific Query type.
*/
@ExtensionPoint
public abstract class QueryToolChest<ResultType, QueryType extends Query<ResultType>>
{
/**

View File

@ -21,12 +21,14 @@ package io.druid.query;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import io.druid.guice.annotations.PublicApi;
import org.joda.time.DateTime;
import java.util.function.Function;
/**
*/
@PublicApi
public class Result<T> implements Comparable<Result<T>>
{
public static String MISSING_SEGMENTS_KEY = "missingSegments";

View File

@ -21,12 +21,14 @@ package io.druid.query;
import com.google.common.collect.Ordering;
import com.google.common.primitives.Longs;
import io.druid.guice.annotations.PublicApi;
import io.druid.java.util.common.granularity.Granularity;
import java.util.Comparator;
/**
*/
@PublicApi
public class ResultGranularTimestampComparator<T> implements Comparator<Result<T>>
{
private final Granularity gran;

View File

@ -21,6 +21,7 @@ package io.druid.query;
import com.google.common.collect.Ordering;
import io.druid.common.guava.CombiningSequence;
import io.druid.guice.annotations.PublicApi;
import io.druid.java.util.common.guava.Sequence;
import io.druid.java.util.common.guava.nary.BinaryFn;
@ -28,6 +29,7 @@ import java.util.Map;
/**
*/
@PublicApi
public abstract class ResultMergeQueryRunner<T> extends BySegmentSkippingQueryRunner<T>
{
public ResultMergeQueryRunner(

View File

@ -19,6 +19,8 @@
package io.druid.query.aggregation;
import io.druid.guice.annotations.ExtensionPoint;
import java.io.Closeable;
/**
@ -30,6 +32,7 @@ import java.io.Closeable;
* to aggregate(). This is currently (as of this documentation) implemented through the use of {@link
* io.druid.segment.ColumnValueSelector} objects.
*/
@ExtensionPoint
public interface Aggregator extends Closeable
{
void aggregate();

View File

@ -19,6 +19,7 @@
package io.druid.query.aggregation;
import io.druid.guice.annotations.ExtensionPoint;
import io.druid.java.util.common.Cacheable;
import io.druid.java.util.common.UOE;
import io.druid.java.util.common.logger.Logger;
@ -35,6 +36,7 @@ import java.util.Map;
* max, sum of metric columns, or cardinality of dimension columns (see {@link
* io.druid.query.aggregation.cardinality.CardinalityAggregatorFactory}).
*/
@ExtensionPoint
public abstract class AggregatorFactory implements Cacheable
{
private static final Logger log = new Logger(AggregatorFactory.class);

View File

@ -20,6 +20,7 @@
package io.druid.query.aggregation;
import com.google.common.collect.Lists;
import io.druid.guice.annotations.PublicApi;
import io.druid.java.util.common.Pair;
import io.druid.math.expr.ExprMacroTable;
import io.druid.math.expr.Parser;
@ -34,6 +35,7 @@ import java.util.LinkedList;
import java.util.List;
import java.util.Set;
@PublicApi
public class AggregatorUtil
{
public static final byte STRING_SEPARATOR = (byte) 0xFF;

View File

@ -19,6 +19,7 @@
package io.druid.query.aggregation;
import io.druid.guice.annotations.ExtensionPoint;
import io.druid.query.monomorphicprocessing.CalledFromHotLoop;
import io.druid.query.monomorphicprocessing.HotLoopCallee;
import io.druid.query.monomorphicprocessing.RuntimeShapeInspector;
@ -33,6 +34,7 @@ import java.nio.ByteBuffer;
* Thus, an Aggregator can be thought of as a closure over some other thing that is stateful and changes between calls
* to aggregate(...).
*/
@ExtensionPoint
public interface BufferAggregator extends HotLoopCallee
{
/**

View File

@ -19,8 +19,11 @@
package io.druid.query.aggregation;
import io.druid.guice.annotations.PublicApi;
/**
*/
@PublicApi
public interface MetricManipulationFn
{
public Object manipulate(AggregatorFactory factory, Object object);

View File

@ -19,6 +19,7 @@
package io.druid.query.aggregation;
import io.druid.guice.annotations.ExtensionPoint;
import io.druid.java.util.common.Cacheable;
import java.util.Comparator;
@ -28,6 +29,7 @@ import java.util.Set;
/**
* Functionally similar to an Aggregator. See the Aggregator interface for more comments.
*/
@ExtensionPoint
public interface PostAggregator extends Cacheable
{
Set<String> getDependentFields();

View File

@ -26,6 +26,7 @@ import com.google.common.primitives.Doubles;
import com.google.common.primitives.Floats;
import com.google.common.primitives.Ints;
import com.google.common.primitives.UnsignedBytes;
import io.druid.guice.annotations.PublicApi;
import io.druid.java.util.common.StringUtils;
import io.druid.java.util.common.Cacheable;
@ -50,6 +51,7 @@ import java.util.List;
* +--------------------------------------------------------+
*
*/
@PublicApi
public class CacheKeyBuilder
{
static final byte BYTE_KEY = 0;

View File

@ -27,6 +27,8 @@ import io.druid.segment.DimensionSelector;
import io.druid.segment.column.ValueType;
/**
* Provides information about a dimension for a grouping query, like topN or groupBy. Note that this is not annotated
* with {@code PublicApi}, since it is not meant to be stable for usage by non-built-in queries.
*/
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = LegacyDimensionSpec.class)
@JsonSubTypes(value = {

View File

@ -21,6 +21,7 @@ package io.druid.query.extraction;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import io.druid.guice.annotations.ExtensionPoint;
import io.druid.query.lookup.LookupExtractionFn;
import io.druid.query.lookup.RegisteredLookupExtractionFn;
@ -28,6 +29,7 @@ import javax.annotation.Nullable;
/**
*/
@ExtensionPoint
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type")
@JsonSubTypes(value = {
@JsonSubTypes.Type(name = "time", value = TimeDimExtractionFn.class),

View File

@ -19,12 +19,14 @@
package io.druid.segment;
import io.druid.guice.annotations.ExtensionPoint;
import io.druid.java.util.common.io.smoosh.FileSmoosher;
import java.io.Closeable;
import java.io.IOException;
import java.nio.channels.WritableByteChannel;
@ExtensionPoint
public interface GenericColumnSerializer extends Closeable
{
public void open() throws IOException;

View File

@ -21,6 +21,7 @@ package io.druid.segment;
import com.fasterxml.jackson.annotation.JsonProperty;
import io.druid.data.input.impl.TimestampSpec;
import io.druid.guice.annotations.PublicApi;
import io.druid.java.util.common.granularity.Granularity;
import io.druid.query.aggregation.AggregatorFactory;
@ -33,6 +34,7 @@ import java.util.concurrent.ConcurrentHashMap;
/**
*/
@PublicApi
public class Metadata
{
// container is used for arbitrary key-value pairs in segment metadata e.g.

View File

@ -28,6 +28,11 @@ import java.io.IOException;
import java.util.Map;
/**
* Direct interface to memory mapped segments. Not a public API for extensions; site specific queries should be
* using {@link StorageAdapter}.
*
* @see QueryableIndexStorageAdapter for query path adapter
* @see QueryableIndexIndexableAdapter for indexing path adapter
*/
public interface QueryableIndex extends ColumnSelector, Closeable
{

View File

@ -173,12 +173,6 @@ public class QueryableIndexStorageAdapter implements StorageAdapter
return getColumnCapabilites(index, column);
}
@Override
public Map<String, DimensionHandler> getDimensionHandlers()
{
return index.getDimensionHandlers();
}
@Override
public String getColumnTypeName(String columnName)
{

View File

@ -19,12 +19,14 @@
package io.druid.segment;
import io.druid.guice.annotations.PublicApi;
import org.joda.time.Interval;
import java.io.Closeable;
/**
*/
@PublicApi
public interface Segment extends Closeable
{
public String getIdentifier();

View File

@ -19,16 +19,17 @@
package io.druid.segment;
import io.druid.guice.annotations.PublicApi;
import io.druid.segment.column.ColumnCapabilities;
import io.druid.segment.data.Indexed;
import org.joda.time.DateTime;
import org.joda.time.Interval;
import javax.annotation.Nullable;
import java.util.Map;
/**
*/
@PublicApi
public interface StorageAdapter extends CursorFactory
{
public String getSegmentIdentifier();
@ -63,8 +64,6 @@ public interface StorageAdapter extends CursorFactory
@Nullable
public ColumnCapabilities getColumnCapabilities(String column);
public Map<String, DimensionHandler> getDimensionHandlers();
/**
* Like {@link ColumnCapabilities#getType()}, but may return a more descriptive string for complex columns.
* @param column column name

View File

@ -19,9 +19,11 @@
package io.druid.segment.data;
import io.druid.guice.annotations.PublicApi;
import io.druid.query.monomorphicprocessing.CalledFromHotLoop;
import io.druid.query.monomorphicprocessing.HotLoopCallee;
@PublicApi
public interface Indexed<T> extends Iterable<T>, HotLoopCallee
{
Class<? extends T> getClazz();

View File

@ -19,9 +19,12 @@
package io.druid.segment.data;
import io.druid.guice.annotations.ExtensionPoint;
import java.nio.ByteBuffer;
import java.util.Comparator;
@ExtensionPoint
public interface ObjectStrategy<T> extends Comparator<T>
{
public Class<? extends T> getClazz();

View File

@ -35,7 +35,6 @@ import io.druid.query.filter.ValueMatcher;
import io.druid.query.monomorphicprocessing.RuntimeShapeInspector;
import io.druid.segment.Capabilities;
import io.druid.segment.Cursor;
import io.druid.segment.DimensionHandler;
import io.druid.segment.DimensionIndexer;
import io.druid.segment.DimensionSelector;
import io.druid.segment.DimensionSelectorUtils;
@ -64,7 +63,6 @@ import org.joda.time.Interval;
import javax.annotation.Nullable;
import java.util.Iterator;
import java.util.Map;
/**
*/
@ -173,12 +171,6 @@ public class IncrementalIndexStorageAdapter implements StorageAdapter
return index.getCapabilities(column);
}
@Override
public Map<String, DimensionHandler> getDimensionHandlers()
{
return index.getDimensionHandlers();
}
@Override
public String getColumnTypeName(String column)
{

View File

@ -19,6 +19,7 @@
package io.druid.segment.serde;
import io.druid.guice.annotations.PublicApi;
import io.druid.java.util.common.StringUtils;
import io.druid.java.util.common.io.smoosh.FileSmoosher;
import io.druid.segment.GenericColumnSerializer;
@ -46,6 +47,7 @@ public class ComplexColumnSerializer implements GenericColumnSerializer
this.strategy = strategy;
}
@PublicApi
public static ComplexColumnSerializer create(
IOPeon ioPeon,
String filenameBase,

Some files were not shown because too many files have changed in this diff Show More