mirror of https://github.com/apache/druid.git
fix tests
This commit is contained in:
parent
6dba81a046
commit
ea2c8f9db6
|
@ -34,7 +34,6 @@ import org.apache.druid.data.input.SplitHintSpec;
|
||||||
|
|
||||||
import javax.annotation.Nullable;
|
import javax.annotation.Nullable;
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.FileNotFoundException;
|
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.Spliterator;
|
import java.util.Spliterator;
|
||||||
|
@ -117,14 +116,7 @@ public class LocalInputSource extends AbstractInputSource implements SplittableI
|
||||||
// reader() is supposed to be called in each task that creates segments.
|
// reader() is supposed to be called in each task that creates segments.
|
||||||
// The task should already have only one split in parallel indexing,
|
// The task should already have only one split in parallel indexing,
|
||||||
// while there's no need to make splits using splitHintSpec in sequential indexing.
|
// while there's no need to make splits using splitHintSpec in sequential indexing.
|
||||||
createSplits(inputFormat, null).map(split -> {
|
createSplits(inputFormat, null).map(split -> new FileSource(split.get())),
|
||||||
try {
|
|
||||||
return new FileSource(split.get());
|
|
||||||
}
|
|
||||||
catch (FileNotFoundException e) {
|
|
||||||
throw new RuntimeException(e);
|
|
||||||
}
|
|
||||||
}),
|
|
||||||
temporaryDirectory
|
temporaryDirectory
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -287,16 +287,23 @@ public class FileUtils
|
||||||
String messageOnRetry
|
String messageOnRetry
|
||||||
) throws IOException
|
) throws IOException
|
||||||
{
|
{
|
||||||
try (InputStream inputStream = objectOpenFunction.open(object)) {
|
try {
|
||||||
return copyLarge(
|
return RetryUtils.retry(
|
||||||
inputStream,
|
() -> {
|
||||||
outFile,
|
try (InputStream inputStream = objectOpenFunction.open(object);
|
||||||
fetchBuffer,
|
OutputStream out = new FileOutputStream(outFile)) {
|
||||||
|
return IOUtils.copyLarge(inputStream, out, fetchBuffer);
|
||||||
|
}
|
||||||
|
},
|
||||||
retryCondition,
|
retryCondition,
|
||||||
|
outFile::delete,
|
||||||
numRetries,
|
numRetries,
|
||||||
messageOnRetry
|
messageOnRetry
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
catch (Exception e) {
|
||||||
|
throw new IOException(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static long copyLarge(
|
public static long copyLarge(
|
||||||
|
|
|
@ -72,14 +72,7 @@ public class ObjectIteratingReaderTest
|
||||||
false,
|
false,
|
||||||
0
|
0
|
||||||
),
|
),
|
||||||
files.stream().flatMap(file -> {
|
files.stream().flatMap(file -> ImmutableList.of(new FileSource(file)).stream()),
|
||||||
try {
|
|
||||||
return ImmutableList.of(new FileSource(file)).stream();
|
|
||||||
}
|
|
||||||
catch (IOException e) {
|
|
||||||
throw new RuntimeException(e);
|
|
||||||
}
|
|
||||||
}),
|
|
||||||
temporaryFolder.newFolder()
|
temporaryFolder.newFolder()
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
|
@ -198,7 +198,7 @@ public class DataSchema
|
||||||
}
|
}
|
||||||
|
|
||||||
@Nullable
|
@Nullable
|
||||||
@JsonProperty
|
@JsonProperty("timestampSpec")
|
||||||
private TimestampSpec getGivenTimestampSpec()
|
private TimestampSpec getGivenTimestampSpec()
|
||||||
{
|
{
|
||||||
return timestampSpec;
|
return timestampSpec;
|
||||||
|
@ -213,7 +213,7 @@ public class DataSchema
|
||||||
}
|
}
|
||||||
|
|
||||||
@Nullable
|
@Nullable
|
||||||
@JsonProperty
|
@JsonProperty("dimensionsSpec")
|
||||||
private DimensionsSpec getGivenDimensionsSpec()
|
private DimensionsSpec getGivenDimensionsSpec()
|
||||||
{
|
{
|
||||||
return dimensionsSpec;
|
return dimensionsSpec;
|
||||||
|
|
Loading…
Reference in New Issue