Merge remote-tracking branch 'origin/master' into index-lifecycle
|
@ -0,0 +1,19 @@
|
|||
#!/bin/bash
|
||||
|
||||
SCRIPT="$0"
|
||||
|
||||
# SCRIPT might be an arbitrarily deep series of symbolic links; loop until we
|
||||
# have the concrete path
|
||||
while [ -h "$SCRIPT" ] ; do
|
||||
ls=$(ls -ld "$SCRIPT")
|
||||
# Drop everything prior to ->
|
||||
link=$(expr "$ls" : '.*-> \(.*\)$')
|
||||
if expr "$link" : '/.*' > /dev/null; then
|
||||
SCRIPT="$link"
|
||||
else
|
||||
SCRIPT=$(dirname "$SCRIPT")/"$link"
|
||||
fi
|
||||
done
|
||||
|
||||
source $(dirname "${SCRIPT}")/java-versions.properties
|
||||
JAVA_HOME="${HOME}"/.java/${ES_BUILD_JAVA} ./gradlew resolveAllDependencies --parallel
|
|
@ -628,3 +628,11 @@ if (System.properties.get("build.compare") != null) {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
allprojects {
|
||||
task resolveAllDependencies {
|
||||
doLast {
|
||||
configurations.findAll { it.isCanBeResolved() }.each { it.resolve() }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.apache.http.entity.ByteArrayEntity;
|
|||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.client.RequestConverters.EndpointBuilder;
|
||||
import org.elasticsearch.client.ml.CloseJobRequest;
|
||||
import org.elasticsearch.client.ml.DeleteDatafeedRequest;
|
||||
import org.elasticsearch.client.ml.DeleteForecastRequest;
|
||||
import org.elasticsearch.client.ml.DeleteJobRequest;
|
||||
import org.elasticsearch.client.ml.FlushJobRequest;
|
||||
|
@ -41,6 +42,7 @@ import org.elasticsearch.client.ml.GetOverallBucketsRequest;
|
|||
import org.elasticsearch.client.ml.GetRecordsRequest;
|
||||
import org.elasticsearch.client.ml.OpenJobRequest;
|
||||
import org.elasticsearch.client.ml.PostDataRequest;
|
||||
import org.elasticsearch.client.ml.PutCalendarRequest;
|
||||
import org.elasticsearch.client.ml.PutDatafeedRequest;
|
||||
import org.elasticsearch.client.ml.PutJobRequest;
|
||||
import org.elasticsearch.client.ml.UpdateJobRequest;
|
||||
|
@ -195,6 +197,19 @@ final class MLRequestConverters {
|
|||
return request;
|
||||
}
|
||||
|
||||
static Request deleteDatafeed(DeleteDatafeedRequest deleteDatafeedRequest) {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("datafeeds")
|
||||
.addPathPart(deleteDatafeedRequest.getDatafeedId())
|
||||
.build();
|
||||
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
params.putParam("force", Boolean.toString(deleteDatafeedRequest.isForce()));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request deleteForecast(DeleteForecastRequest deleteForecastRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
|
@ -313,4 +328,16 @@ final class MLRequestConverters {
|
|||
request.setEntity(createEntity(getInfluencersRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request putCalendar(PutCalendarRequest putCalendarRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("calendars")
|
||||
.addPathPart(putCalendarRequest.getCalendar().getId())
|
||||
.build();
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
request.setEntity(createEntity(putCalendarRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,9 +22,9 @@ import org.elasticsearch.action.ActionListener;
|
|||
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||
import org.elasticsearch.client.ml.CloseJobRequest;
|
||||
import org.elasticsearch.client.ml.CloseJobResponse;
|
||||
import org.elasticsearch.client.ml.DeleteDatafeedRequest;
|
||||
import org.elasticsearch.client.ml.DeleteForecastRequest;
|
||||
import org.elasticsearch.client.ml.DeleteJobRequest;
|
||||
import org.elasticsearch.client.ml.DeleteJobResponse;
|
||||
import org.elasticsearch.client.ml.FlushJobRequest;
|
||||
import org.elasticsearch.client.ml.FlushJobResponse;
|
||||
import org.elasticsearch.client.ml.ForecastJobRequest;
|
||||
|
@ -47,6 +47,8 @@ import org.elasticsearch.client.ml.OpenJobRequest;
|
|||
import org.elasticsearch.client.ml.OpenJobResponse;
|
||||
import org.elasticsearch.client.ml.PostDataRequest;
|
||||
import org.elasticsearch.client.ml.PostDataResponse;
|
||||
import org.elasticsearch.client.ml.PutCalendarRequest;
|
||||
import org.elasticsearch.client.ml.PutCalendarResponse;
|
||||
import org.elasticsearch.client.ml.PutDatafeedRequest;
|
||||
import org.elasticsearch.client.ml.PutDatafeedResponse;
|
||||
import org.elasticsearch.client.ml.PutJobRequest;
|
||||
|
@ -60,7 +62,6 @@ import java.util.Collections;
|
|||
|
||||
/**
|
||||
* Machine Learning API client wrapper for the {@link RestHighLevelClient}
|
||||
*
|
||||
* <p>
|
||||
* See the <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-apis.html">
|
||||
* X-Pack Machine Learning APIs </a> for additional information.
|
||||
|
@ -86,10 +87,10 @@ public final class MachineLearningClient {
|
|||
*/
|
||||
public PutJobResponse putJob(PutJobRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
MLRequestConverters::putJob,
|
||||
options,
|
||||
PutJobResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
MLRequestConverters::putJob,
|
||||
options,
|
||||
PutJobResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -104,63 +105,60 @@ public final class MachineLearningClient {
|
|||
*/
|
||||
public void putJobAsync(PutJobRequest request, RequestOptions options, ActionListener<PutJobResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
MLRequestConverters::putJob,
|
||||
options,
|
||||
PutJobResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
MLRequestConverters::putJob,
|
||||
options,
|
||||
PutJobResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets one or more Machine Learning job configuration info.
|
||||
*
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-job.html"></a>
|
||||
* </p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-job.html">ML GET job documentation</a>
|
||||
*
|
||||
* @param request {@link GetJobRequest} Request containing a list of jobId(s) and additional options
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return {@link GetJobResponse} response object containing
|
||||
* the {@link org.elasticsearch.client.ml.job.config.Job} objects and the number of jobs found
|
||||
* @throws IOException when there is a serialization issue sending the request or receiving the response
|
||||
*/
|
||||
public GetJobResponse getJob(GetJobRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
MLRequestConverters::getJob,
|
||||
options,
|
||||
GetJobResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
MLRequestConverters::getJob,
|
||||
options,
|
||||
GetJobResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
/**
|
||||
* Gets one or more Machine Learning job configuration info, asynchronously.
|
||||
*
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-job.html"></a>
|
||||
* </p>
|
||||
* @param request {@link GetJobRequest} Request containing a list of jobId(s) and additional options
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-job.html">ML GET job documentation</a>
|
||||
*
|
||||
* @param request {@link GetJobRequest} Request containing a list of jobId(s) and additional options
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener Listener to be notified with {@link GetJobResponse} upon request completion
|
||||
*/
|
||||
public void getJobAsync(GetJobRequest request, RequestOptions options, ActionListener<GetJobResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
MLRequestConverters::getJob,
|
||||
options,
|
||||
GetJobResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
MLRequestConverters::getJob,
|
||||
options,
|
||||
GetJobResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets usage statistics for one or more Machine Learning jobs
|
||||
*
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-job-stats.html">Get Job stats docs</a>
|
||||
* </p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-job-stats.html">Get job stats docs</a>
|
||||
*
|
||||
* @param request {@link GetJobStatsRequest} Request containing a list of jobId(s) and additional options
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return {@link GetJobStatsResponse} response object containing
|
||||
* the {@link JobStats} objects and the number of jobs found
|
||||
* @throws IOException when there is a serialization issue sending the request or receiving the response
|
||||
|
@ -175,12 +173,11 @@ public final class MachineLearningClient {
|
|||
|
||||
/**
|
||||
* Gets one or more Machine Learning job configuration info, asynchronously.
|
||||
*
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-job-stats.html">Get Job stats docs</a>
|
||||
* </p>
|
||||
* @param request {@link GetJobStatsRequest} Request containing a list of jobId(s) and additional options
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-job-stats.html">Get job stats docs</a>
|
||||
*
|
||||
* @param request {@link GetJobStatsRequest} Request containing a list of jobId(s) and additional options
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener Listener to be notified with {@link GetJobStatsResponse} upon request completion
|
||||
*/
|
||||
|
@ -196,37 +193,37 @@ public final class MachineLearningClient {
|
|||
/**
|
||||
* Deletes the given Machine Learning Job
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-job.html">ML Delete Job documentation</a>
|
||||
* </p>
|
||||
* For additional info
|
||||
* see <a href="http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-job.html">ML Delete job documentation</a>
|
||||
*
|
||||
* @param request The request to delete the job
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return action acknowledgement
|
||||
* @throws IOException when there is a serialization issue sending the request or receiving the response
|
||||
*/
|
||||
public DeleteJobResponse deleteJob(DeleteJobRequest request, RequestOptions options) throws IOException {
|
||||
public AcknowledgedResponse deleteJob(DeleteJobRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
MLRequestConverters::deleteJob,
|
||||
options,
|
||||
DeleteJobResponse::fromXContent,
|
||||
AcknowledgedResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes the given Machine Learning Job asynchronously and notifies the listener on completion
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-job.html">ML Delete Job documentation</a>
|
||||
* </p>
|
||||
* @param request The request to delete the job
|
||||
* For additional info
|
||||
* see <a href="http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-job.html">ML Delete Job documentation</a>
|
||||
*
|
||||
* @param request The request to delete the job
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener Listener to be notified upon request completion
|
||||
*/
|
||||
public void deleteJobAsync(DeleteJobRequest request, RequestOptions options, ActionListener<DeleteJobResponse> listener) {
|
||||
public void deleteJobAsync(DeleteJobRequest request, RequestOptions options, ActionListener<AcknowledgedResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
MLRequestConverters::deleteJob,
|
||||
options,
|
||||
DeleteJobResponse::fromXContent,
|
||||
AcknowledgedResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
@ -234,103 +231,101 @@ public final class MachineLearningClient {
|
|||
/**
|
||||
* Opens a Machine Learning Job.
|
||||
* When you open a new job, it starts with an empty model.
|
||||
*
|
||||
* When you open an existing job, the most recent model state is automatically loaded.
|
||||
* The job is ready to resume its analysis from where it left off, once new data is received.
|
||||
*
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-open-job.html"></a>
|
||||
* </p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-open-job.html">ML Open Job documentation</a>
|
||||
*
|
||||
* @param request Request containing job_id and additional optional options
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return response containing if the job was successfully opened or not.
|
||||
* @throws IOException when there is a serialization issue sending the request or receiving the response
|
||||
*/
|
||||
public OpenJobResponse openJob(OpenJobRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
MLRequestConverters::openJob,
|
||||
options,
|
||||
OpenJobResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
MLRequestConverters::openJob,
|
||||
options,
|
||||
OpenJobResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Opens a Machine Learning Job asynchronously, notifies listener on completion.
|
||||
* When you open a new job, it starts with an empty model.
|
||||
*
|
||||
* When you open an existing job, the most recent model state is automatically loaded.
|
||||
* The job is ready to resume its analysis from where it left off, once new data is received.
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-open-job.html"></a>
|
||||
* </p>
|
||||
* @param request Request containing job_id and additional optional options
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-open-job.html">ML Open Job documentation</a>
|
||||
*
|
||||
* @param request Request containing job_id and additional optional options
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener Listener to be notified upon request completion
|
||||
*/
|
||||
public void openJobAsync(OpenJobRequest request, RequestOptions options, ActionListener<OpenJobResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
MLRequestConverters::openJob,
|
||||
options,
|
||||
OpenJobResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
MLRequestConverters::openJob,
|
||||
options,
|
||||
OpenJobResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes one or more Machine Learning Jobs. A job can be opened and closed multiple times throughout its lifecycle.
|
||||
*
|
||||
* A closed job cannot receive data or perform analysis operations, but you can still explore and navigate results.
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-close-job.html">ML Close Job documentation</a>
|
||||
*
|
||||
* @param request Request containing job_ids and additional options. See {@link CloseJobRequest}
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return response containing if the job was successfully closed or not.
|
||||
* @throws IOException when there is a serialization issue sending the request or receiving the response
|
||||
*/
|
||||
public CloseJobResponse closeJob(CloseJobRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
MLRequestConverters::closeJob,
|
||||
options,
|
||||
CloseJobResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
MLRequestConverters::closeJob,
|
||||
options,
|
||||
CloseJobResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes one or more Machine Learning Jobs asynchronously, notifies listener on completion
|
||||
*
|
||||
* A closed job cannot receive data or perform analysis operations, but you can still explore and navigate results.
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-close-job.html">ML Close Job documentation</a>
|
||||
*
|
||||
* @param request Request containing job_ids and additional options. See {@link CloseJobRequest}
|
||||
* @param request Request containing job_ids and additional options. See {@link CloseJobRequest}
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener Listener to be notified upon request completion
|
||||
*/
|
||||
public void closeJobAsync(CloseJobRequest request, RequestOptions options, ActionListener<CloseJobResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
MLRequestConverters::closeJob,
|
||||
options,
|
||||
CloseJobResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
MLRequestConverters::closeJob,
|
||||
options,
|
||||
CloseJobResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Flushes internally buffered data for the given Machine Learning Job ensuring all data sent to the has been processed.
|
||||
* This may cause new results to be calculated depending on the contents of the buffer
|
||||
*
|
||||
* Both flush and close operations are similar,
|
||||
* however the flush is more efficient if you are expecting to send more data for analysis.
|
||||
*
|
||||
* When flushing, the job remains open and is available to continue analyzing data.
|
||||
* A close operation additionally prunes and persists the model state to disk and the
|
||||
* job must be opened again before analyzing further data.
|
||||
*
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-flush-job.html">Flush ML job documentation</a>
|
||||
*
|
||||
* @param request The {@link FlushJobRequest} object enclosing the `jobId` and additional request options
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param request The {@link FlushJobRequest} object enclosing the `jobId` and additional request options
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @throws IOException when there is a serialization issue sending the request or receiving the response
|
||||
*/
|
||||
public FlushJobResponse flushJob(FlushJobRequest request, RequestOptions options) throws IOException {
|
||||
|
@ -344,14 +339,11 @@ public final class MachineLearningClient {
|
|||
/**
|
||||
* Flushes internally buffered data for the given Machine Learning Job asynchronously ensuring all data sent to the has been processed.
|
||||
* This may cause new results to be calculated depending on the contents of the buffer
|
||||
*
|
||||
* Both flush and close operations are similar,
|
||||
* however the flush is more efficient if you are expecting to send more data for analysis.
|
||||
*
|
||||
* When flushing, the job remains open and is available to continue analyzing data.
|
||||
* A close operation additionally prunes and persists the model state to disk and the
|
||||
* job must be opened again before analyzing further data.
|
||||
*
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-flush-job.html">Flush ML job documentation</a>
|
||||
|
@ -371,87 +363,82 @@ public final class MachineLearningClient {
|
|||
|
||||
/**
|
||||
* Creates a forecast of an existing, opened Machine Learning Job
|
||||
*
|
||||
* This predicts the future behavior of a time series by using its historical behavior.
|
||||
*
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/master/ml-forecast.html">Forecast ML Job Documentation</a>
|
||||
* </p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/master/ml-forecast.html">Forecast ML Job Documentation</a>
|
||||
*
|
||||
* @param request ForecastJobRequest with forecasting options
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return response containing forecast acknowledgement and new forecast's ID
|
||||
* @throws IOException when there is a serialization issue sending the request or receiving the response
|
||||
*/
|
||||
public ForecastJobResponse forecastJob(ForecastJobRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
MLRequestConverters::forecastJob,
|
||||
options,
|
||||
ForecastJobResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates a Machine Learning {@link org.elasticsearch.client.ml.job.config.Job}
|
||||
*
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-update-job.html"></a>
|
||||
* </p>
|
||||
*
|
||||
* @param request the {@link UpdateJobRequest} object enclosing the desired updates
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return a PutJobResponse object containing the updated job object
|
||||
* @throws IOException when there is a serialization issue sending the request or receiving the response
|
||||
*/
|
||||
public PutJobResponse updateJob(UpdateJobRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
MLRequestConverters::updateJob,
|
||||
options,
|
||||
PutJobResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
MLRequestConverters::forecastJob,
|
||||
options,
|
||||
ForecastJobResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a forecast of an existing, opened Machine Learning Job asynchronously
|
||||
*
|
||||
* This predicts the future behavior of a time series by using its historical behavior.
|
||||
*
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/master/ml-forecast.html">Forecast ML Job Documentation</a>
|
||||
* </p>
|
||||
* @param request ForecastJobRequest with forecasting options
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/master/ml-forecast.html">Forecast ML Job Documentation</a>
|
||||
*
|
||||
* @param request ForecastJobRequest with forecasting options
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener Listener to be notified upon request completion
|
||||
*/
|
||||
public void forecastJobAsync(ForecastJobRequest request, RequestOptions options, ActionListener<ForecastJobResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
MLRequestConverters::forecastJob,
|
||||
options,
|
||||
ForecastJobResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
MLRequestConverters::forecastJob,
|
||||
options,
|
||||
ForecastJobResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates a Machine Learning {@link org.elasticsearch.client.ml.job.config.Job} asynchronously
|
||||
*
|
||||
* Deletes Machine Learning Job Forecasts
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-update-job.html"></a>
|
||||
* </p>
|
||||
* @param request the {@link UpdateJobRequest} object enclosing the desired updates
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-forecast.html">Delete Job Forecast
|
||||
* Documentation</a>
|
||||
*
|
||||
* @param request the {@link DeleteForecastRequest} object enclosing the desired jobId, forecastIDs, and other options
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return a AcknowledgedResponse object indicating request success
|
||||
* @throws IOException when there is a serialization issue sending the request or receiving the response
|
||||
*/
|
||||
public AcknowledgedResponse deleteForecast(DeleteForecastRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
MLRequestConverters::deleteForecast,
|
||||
options,
|
||||
AcknowledgedResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes Machine Learning Job Forecasts asynchronously
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-forecast.html">Delete Job Forecast
|
||||
* Documentation</a>
|
||||
*
|
||||
* @param request the {@link DeleteForecastRequest} object enclosing the desired jobId, forecastIDs, and other options
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener Listener to be notified upon request completion
|
||||
*/
|
||||
public void updateJobAsync(UpdateJobRequest request, RequestOptions options, ActionListener<PutJobResponse> listener) {
|
||||
public void deleteForecastAsync(DeleteForecastRequest request, RequestOptions options, ActionListener<AcknowledgedResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
MLRequestConverters::updateJob,
|
||||
options,
|
||||
PutJobResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
MLRequestConverters::deleteForecast,
|
||||
options,
|
||||
AcknowledgedResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -493,45 +480,81 @@ public final class MachineLearningClient {
|
|||
}
|
||||
|
||||
/**
|
||||
* Deletes Machine Learning Job Forecasts
|
||||
*
|
||||
* Deletes the given Machine Learning Datafeed
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-forecast.html"></a>
|
||||
* </p>
|
||||
* For additional info
|
||||
* see <a href="http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-datafeed.html">
|
||||
* ML Delete Datafeed documentation</a>
|
||||
*
|
||||
* @param request the {@link DeleteForecastRequest} object enclosing the desired jobId, forecastIDs, and other options
|
||||
* @param request The request to delete the datafeed
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return a AcknowledgedResponse object indicating request success
|
||||
* @return action acknowledgement
|
||||
* @throws IOException when there is a serialization issue sending the request or receiving the response
|
||||
*/
|
||||
public AcknowledgedResponse deleteForecast(DeleteForecastRequest request, RequestOptions options) throws IOException {
|
||||
public AcknowledgedResponse deleteDatafeed(DeleteDatafeedRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
MLRequestConverters::deleteForecast,
|
||||
options,
|
||||
AcknowledgedResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
MLRequestConverters::deleteDatafeed,
|
||||
options,
|
||||
AcknowledgedResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes Machine Learning Job Forecasts asynchronously
|
||||
*
|
||||
* Deletes the given Machine Learning Datafeed asynchronously and notifies the listener on completion
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-forecast.html"></a>
|
||||
* </p>
|
||||
* For additional info
|
||||
* see <a href="http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-datafeed.html">
|
||||
* ML Delete Datafeed documentation</a>
|
||||
*
|
||||
* @param request the {@link DeleteForecastRequest} object enclosing the desired jobId, forecastIDs, and other options
|
||||
* @param request The request to delete the datafeed
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener Listener to be notified upon request completion
|
||||
*/
|
||||
public void deleteForecastAsync(DeleteForecastRequest request, RequestOptions options, ActionListener<AcknowledgedResponse> listener) {
|
||||
public void deleteDatafeedAsync(DeleteDatafeedRequest request, RequestOptions options, ActionListener<AcknowledgedResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
MLRequestConverters::deleteForecast,
|
||||
options,
|
||||
AcknowledgedResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
MLRequestConverters::deleteDatafeed,
|
||||
options,
|
||||
AcknowledgedResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates a Machine Learning {@link org.elasticsearch.client.ml.job.config.Job}
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-update-job.html">ML Update Job Documentation</a>
|
||||
*
|
||||
* @param request the {@link UpdateJobRequest} object enclosing the desired updates
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return a PutJobResponse object containing the updated job object
|
||||
* @throws IOException when there is a serialization issue sending the request or receiving the response
|
||||
*/
|
||||
public PutJobResponse updateJob(UpdateJobRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
MLRequestConverters::updateJob,
|
||||
options,
|
||||
PutJobResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates a Machine Learning {@link org.elasticsearch.client.ml.job.config.Job} asynchronously
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-update-job.html">ML Update Job Documentation</a>
|
||||
*
|
||||
* @param request the {@link UpdateJobRequest} object enclosing the desired updates
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener Listener to be notified upon request completion
|
||||
*/
|
||||
public void updateJobAsync(UpdateJobRequest request, RequestOptions options, ActionListener<PutJobResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
MLRequestConverters::updateJob,
|
||||
options,
|
||||
PutJobResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -540,8 +563,8 @@ public final class MachineLearningClient {
|
|||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-bucket.html">ML GET buckets documentation</a>
|
||||
*
|
||||
* @param request The request
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param request The request
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
*/
|
||||
public GetBucketsResponse getBuckets(GetBucketsRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
|
@ -568,25 +591,25 @@ public final class MachineLearningClient {
|
|||
GetBucketsResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the categories for a Machine Learning Job.
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-category.html">
|
||||
* ML GET categories documentation</a>
|
||||
* ML GET categories documentation</a>
|
||||
*
|
||||
* @param request The request
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param request The request
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @throws IOException when there is a serialization issue sending the request or receiving the response
|
||||
*/
|
||||
public GetCategoriesResponse getCategories(GetCategoriesRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
MLRequestConverters::getCategories,
|
||||
options,
|
||||
GetCategoriesResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
MLRequestConverters::getCategories,
|
||||
options,
|
||||
GetCategoriesResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -594,7 +617,7 @@ public final class MachineLearningClient {
|
|||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-category.html">
|
||||
* ML GET categories documentation</a>
|
||||
* ML GET categories documentation</a>
|
||||
*
|
||||
* @param request The request
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
|
@ -602,11 +625,11 @@ public final class MachineLearningClient {
|
|||
*/
|
||||
public void getCategoriesAsync(GetCategoriesRequest request, RequestOptions options, ActionListener<GetCategoriesResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
MLRequestConverters::getCategories,
|
||||
options,
|
||||
GetCategoriesResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
MLRequestConverters::getCategories,
|
||||
options,
|
||||
GetCategoriesResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -614,10 +637,10 @@ public final class MachineLearningClient {
|
|||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-overall-buckets.html">
|
||||
* ML GET overall buckets documentation</a>
|
||||
* ML GET overall buckets documentation</a>
|
||||
*
|
||||
* @param request The request
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param request The request
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
*/
|
||||
public GetOverallBucketsResponse getOverallBuckets(GetOverallBucketsRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
|
@ -632,7 +655,7 @@ public final class MachineLearningClient {
|
|||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-overall-buckets.html">
|
||||
* ML GET overall buckets documentation</a>
|
||||
* ML GET overall buckets documentation</a>
|
||||
*
|
||||
* @param request The request
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
|
@ -654,8 +677,8 @@ public final class MachineLearningClient {
|
|||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-record.html">ML GET records documentation</a>
|
||||
*
|
||||
* @param request the request
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param request the request
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
*/
|
||||
public GetRecordsResponse getRecords(GetRecordsRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
|
@ -686,48 +709,44 @@ public final class MachineLearningClient {
|
|||
|
||||
/**
|
||||
* Sends data to an anomaly detection job for analysis.
|
||||
*
|
||||
* NOTE: The job must have a state of open to receive and process the data.
|
||||
*
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-post-data.html">ML POST Data documentation</a>
|
||||
* </p>
|
||||
* NOTE: The job must have a state of open to receive and process the data.
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-post-data.html">ML POST Data documentation</a>
|
||||
*
|
||||
* @param request PostDataRequest containing the data to post and some additional options
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return response containing operational progress about the job
|
||||
* @throws IOException when there is a serialization issue sending the request or receiving the response
|
||||
*/
|
||||
public PostDataResponse postData(PostDataRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
MLRequestConverters::postData,
|
||||
options,
|
||||
PostDataResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
MLRequestConverters::postData,
|
||||
options,
|
||||
PostDataResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Sends data to an anomaly detection job for analysis, asynchronously
|
||||
*
|
||||
* NOTE: The job must have a state of open to receive and process the data.
|
||||
*
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-post-data.html">ML POST Data documentation</a>
|
||||
* </p>
|
||||
* NOTE: The job must have a state of open to receive and process the data.
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-post-data.html">ML POST Data documentation</a>
|
||||
*
|
||||
* @param request PostDataRequest containing the data to post and some additional options
|
||||
* @param request PostDataRequest containing the data to post and some additional options
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener Listener to be notified upon request completion
|
||||
*/
|
||||
public void postDataAsync(PostDataRequest request, RequestOptions options, ActionListener<PostDataResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
MLRequestConverters::postData,
|
||||
options,
|
||||
PostDataResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
MLRequestConverters::postData,
|
||||
options,
|
||||
PostDataResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -735,10 +754,10 @@ public final class MachineLearningClient {
|
|||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-influencer.html">
|
||||
* ML GET influencers documentation</a>
|
||||
* ML GET influencers documentation</a>
|
||||
*
|
||||
* @param request the request
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param request the request
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
*/
|
||||
public GetInfluencersResponse getInfluencers(GetInfluencersRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
|
@ -753,7 +772,7 @@ public final class MachineLearningClient {
|
|||
* <p>
|
||||
* For additional info
|
||||
* * see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-influencer.html">
|
||||
* ML GET influencers documentation</a>
|
||||
* ML GET influencers documentation</a>
|
||||
*
|
||||
* @param request the request
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
|
@ -768,4 +787,44 @@ public final class MachineLearningClient {
|
|||
listener,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new machine learning calendar
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-put-calendar.html">
|
||||
* ML create calendar documentation</a>
|
||||
*
|
||||
* @param request The request
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return The {@link PutCalendarResponse} containing the calendar
|
||||
* @throws IOException when there is a serialization issue sending the request or receiving the response
|
||||
*/
|
||||
public PutCalendarResponse putCalendar(PutCalendarRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
MLRequestConverters::putCalendar,
|
||||
options,
|
||||
PutCalendarResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new machine learning calendar, notifies listener with the created calendar
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-put-calendar.html">
|
||||
* ML create calendar documentation</a>
|
||||
*
|
||||
* @param request The request
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener Listener to be notified upon request completion
|
||||
*/
|
||||
public void putCalendarAsync(PutCalendarRequest request, RequestOptions options, ActionListener<PutCalendarResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
MLRequestConverters::putCalendar,
|
||||
options,
|
||||
PutCalendarResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,80 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Request to delete a Machine Learning Datafeed via its ID
|
||||
*/
|
||||
public class DeleteDatafeedRequest extends ActionRequest {
|
||||
|
||||
private String datafeedId;
|
||||
private boolean force;
|
||||
|
||||
public DeleteDatafeedRequest(String datafeedId) {
|
||||
this.datafeedId = Objects.requireNonNull(datafeedId, "[datafeed_id] must not be null");
|
||||
}
|
||||
|
||||
public String getDatafeedId() {
|
||||
return datafeedId;
|
||||
}
|
||||
|
||||
public boolean isForce() {
|
||||
return force;
|
||||
}
|
||||
|
||||
/**
|
||||
* Used to forcefully delete a started datafeed.
|
||||
* This method is quicker than stopping and deleting the datafeed.
|
||||
*
|
||||
* @param force When {@code true} forcefully delete a started datafeed. Defaults to {@code false}
|
||||
*/
|
||||
public void setForce(boolean force) {
|
||||
this.force = force;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(datafeedId, force);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (obj == null || obj.getClass() != getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
DeleteDatafeedRequest other = (DeleteDatafeedRequest) obj;
|
||||
return Objects.equals(datafeedId, other.datafeedId) && Objects.equals(force, other.force);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,63 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Response acknowledging the Machine Learning Job request
|
||||
*/
|
||||
public class DeleteJobResponse extends AcknowledgedResponse {
|
||||
|
||||
public DeleteJobResponse(boolean acknowledged) {
|
||||
super(acknowledged);
|
||||
}
|
||||
|
||||
public DeleteJobResponse() {
|
||||
}
|
||||
|
||||
public static DeleteJobResponse fromXContent(XContentParser parser) throws IOException {
|
||||
AcknowledgedResponse response = AcknowledgedResponse.fromXContent(parser);
|
||||
return new DeleteJobResponse(response.isAcknowledged());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (other == null || getClass() != other.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
DeleteJobResponse that = (DeleteJobResponse) other;
|
||||
return isAcknowledged() == that.isAcknowledged();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(isAcknowledged());
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,73 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.client.ml.calendars.Calendar;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Request to create a new Machine Learning calendar
|
||||
*/
|
||||
public class PutCalendarRequest extends ActionRequest implements ToXContentObject {
|
||||
|
||||
private final Calendar calendar;
|
||||
|
||||
public PutCalendarRequest(Calendar calendar) {
|
||||
this.calendar = calendar;
|
||||
}
|
||||
|
||||
public Calendar getCalendar() {
|
||||
return calendar;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
calendar.toXContent(builder, params);
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(calendar);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
PutCalendarRequest other = (PutCalendarRequest) obj;
|
||||
return Objects.equals(calendar, other.calendar);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,76 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.client.ml.calendars.Calendar;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class PutCalendarResponse implements ToXContentObject {
|
||||
|
||||
public static PutCalendarResponse fromXContent(XContentParser parser) throws IOException {
|
||||
return new PutCalendarResponse(Calendar.PARSER.parse(parser, null));
|
||||
}
|
||||
|
||||
private final Calendar calendar;
|
||||
|
||||
PutCalendarResponse(Calendar calendar) {
|
||||
this.calendar = calendar;
|
||||
}
|
||||
|
||||
public Calendar getCalendar() {
|
||||
return calendar;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
calendar.toXContent(builder, params);
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(calendar);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
PutCalendarResponse other = (PutCalendarResponse) obj;
|
||||
return Objects.equals(calendar, other.calendar);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final String toString() {
|
||||
return Strings.toString(this);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,115 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml.calendars;
|
||||
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* A simple calendar object for scheduled (special) events.
|
||||
* The calendar consists of a name an a list of job Ids or job groups
|
||||
* the events are stored separately and reference the calendar.
|
||||
*/
|
||||
public class Calendar implements ToXContentObject {
|
||||
|
||||
public static final String CALENDAR_TYPE = "calendar";
|
||||
|
||||
public static final ParseField JOB_IDS = new ParseField("job_ids");
|
||||
public static final ParseField ID = new ParseField("calendar_id");
|
||||
public static final ParseField DESCRIPTION = new ParseField("description");
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static final ConstructingObjectParser<Calendar, Void> PARSER =
|
||||
new ConstructingObjectParser<>(CALENDAR_TYPE, true, a ->
|
||||
new Calendar((String) a[0], (List<String>) a[1], (String) a[2]));
|
||||
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), ID);
|
||||
PARSER.declareStringArray(ConstructingObjectParser.constructorArg(), JOB_IDS);
|
||||
PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), DESCRIPTION);
|
||||
}
|
||||
|
||||
private final String id;
|
||||
private final List<String> jobIds;
|
||||
private final String description;
|
||||
|
||||
/**
|
||||
* {@code jobIds} can be a mix of job groups and job Ids
|
||||
* @param id The calendar Id
|
||||
* @param jobIds List of job Ids or job groups
|
||||
* @param description An optional description
|
||||
*/
|
||||
public Calendar(String id, List<String> jobIds, @Nullable String description) {
|
||||
this.id = Objects.requireNonNull(id, ID.getPreferredName() + " must not be null");
|
||||
this.jobIds = Collections.unmodifiableList(Objects.requireNonNull(jobIds, JOB_IDS.getPreferredName() + " must not be null"));
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public List<String> getJobIds() {
|
||||
return jobIds;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(ID.getPreferredName(), id);
|
||||
builder.field(JOB_IDS.getPreferredName(), jobIds);
|
||||
if (description != null) {
|
||||
builder.field(DESCRIPTION.getPreferredName(), description);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == this) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Calendar other = (Calendar) obj;
|
||||
return id.equals(other.id) && jobIds.equals(other.jobIds) && Objects.equals(description, other.description);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(id, jobIds, description);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,125 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml.calendars;
|
||||
|
||||
import org.elasticsearch.client.ml.job.util.TimeUtil;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Date;
|
||||
import java.util.Objects;
|
||||
|
||||
public class ScheduledEvent implements ToXContentObject {
|
||||
|
||||
public static final ParseField DESCRIPTION = new ParseField("description");
|
||||
public static final ParseField START_TIME = new ParseField("start_time");
|
||||
public static final ParseField END_TIME = new ParseField("end_time");
|
||||
public static final ParseField EVENT_ID = new ParseField("event_id");
|
||||
public static final String SCHEDULED_EVENT_TYPE = "scheduled_event";
|
||||
|
||||
public static final ConstructingObjectParser<ScheduledEvent, Void> PARSER =
|
||||
new ConstructingObjectParser<>(SCHEDULED_EVENT_TYPE, true, a ->
|
||||
new ScheduledEvent((String) a[0], (Date) a[1], (Date) a[2], (String) a[3], (String) a[4]));
|
||||
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), DESCRIPTION);
|
||||
PARSER.declareField(ConstructingObjectParser.constructorArg(),(p) -> TimeUtil.parseTimeField(p, START_TIME.getPreferredName()),
|
||||
START_TIME, ObjectParser.ValueType.VALUE);
|
||||
PARSER.declareField(ConstructingObjectParser.constructorArg(),(p) -> TimeUtil.parseTimeField(p, END_TIME.getPreferredName()),
|
||||
END_TIME, ObjectParser.ValueType.VALUE);
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), Calendar.ID);
|
||||
PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), EVENT_ID);
|
||||
}
|
||||
|
||||
private final String description;
|
||||
private final Date startTime;
|
||||
private final Date endTime;
|
||||
private final String calendarId;
|
||||
private final String eventId;
|
||||
|
||||
ScheduledEvent(String description, Date startTime, Date endTime, String calendarId, @Nullable String eventId) {
|
||||
this.description = Objects.requireNonNull(description);
|
||||
this.startTime = Objects.requireNonNull(startTime);
|
||||
this.endTime = Objects.requireNonNull(endTime);
|
||||
this.calendarId = Objects.requireNonNull(calendarId);
|
||||
this.eventId = eventId;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
public Date getStartTime() {
|
||||
return startTime;
|
||||
}
|
||||
|
||||
public Date getEndTime() {
|
||||
return endTime;
|
||||
}
|
||||
|
||||
public String getCalendarId() {
|
||||
return calendarId;
|
||||
}
|
||||
|
||||
public String getEventId() {
|
||||
return eventId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(DESCRIPTION.getPreferredName(), description);
|
||||
builder.timeField(START_TIME.getPreferredName(), START_TIME.getPreferredName() + "_string", startTime.getTime());
|
||||
builder.timeField(END_TIME.getPreferredName(), END_TIME.getPreferredName() + "_string", endTime.getTime());
|
||||
builder.field(Calendar.ID.getPreferredName(), calendarId);
|
||||
if (eventId != null) {
|
||||
builder.field(EVENT_ID.getPreferredName(), eventId);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == this) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
ScheduledEvent other = (ScheduledEvent) obj;
|
||||
return Objects.equals(this.description, other.description)
|
||||
&& Objects.equals(this.startTime, other.startTime)
|
||||
&& Objects.equals(this.endTime, other.endTime)
|
||||
&& Objects.equals(this.calendarId, other.calendarId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(description, startTime, endTime, calendarId);
|
||||
}
|
||||
}
|
|
@ -24,6 +24,7 @@ import org.apache.http.client.methods.HttpGet;
|
|||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.client.methods.HttpPut;
|
||||
import org.elasticsearch.client.ml.CloseJobRequest;
|
||||
import org.elasticsearch.client.ml.DeleteDatafeedRequest;
|
||||
import org.elasticsearch.client.ml.DeleteForecastRequest;
|
||||
import org.elasticsearch.client.ml.DeleteJobRequest;
|
||||
import org.elasticsearch.client.ml.FlushJobRequest;
|
||||
|
@ -37,9 +38,12 @@ import org.elasticsearch.client.ml.GetOverallBucketsRequest;
|
|||
import org.elasticsearch.client.ml.GetRecordsRequest;
|
||||
import org.elasticsearch.client.ml.OpenJobRequest;
|
||||
import org.elasticsearch.client.ml.PostDataRequest;
|
||||
import org.elasticsearch.client.ml.PutCalendarRequest;
|
||||
import org.elasticsearch.client.ml.PutDatafeedRequest;
|
||||
import org.elasticsearch.client.ml.PutJobRequest;
|
||||
import org.elasticsearch.client.ml.UpdateJobRequest;
|
||||
import org.elasticsearch.client.ml.calendars.Calendar;
|
||||
import org.elasticsearch.client.ml.calendars.CalendarTests;
|
||||
import org.elasticsearch.client.ml.datafeed.DatafeedConfig;
|
||||
import org.elasticsearch.client.ml.datafeed.DatafeedConfigTests;
|
||||
import org.elasticsearch.client.ml.job.config.AnalysisConfig;
|
||||
|
@ -223,6 +227,20 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testDeleteDatafeed() {
|
||||
String datafeedId = randomAlphaOfLength(10);
|
||||
DeleteDatafeedRequest deleteDatafeedRequest = new DeleteDatafeedRequest(datafeedId);
|
||||
|
||||
Request request = MLRequestConverters.deleteDatafeed(deleteDatafeedRequest);
|
||||
assertEquals(HttpDelete.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/ml/datafeeds/" + datafeedId, request.getEndpoint());
|
||||
assertEquals(Boolean.toString(false), request.getParameters().get("force"));
|
||||
|
||||
deleteDatafeedRequest.setForce(true);
|
||||
request = MLRequestConverters.deleteDatafeed(deleteDatafeedRequest);
|
||||
assertEquals(Boolean.toString(true), request.getParameters().get("force"));
|
||||
}
|
||||
|
||||
public void testDeleteForecast() throws Exception {
|
||||
String jobId = randomAlphaOfLength(10);
|
||||
DeleteForecastRequest deleteForecastRequest = new DeleteForecastRequest(jobId);
|
||||
|
@ -368,6 +386,17 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testPutCalendar() throws IOException {
|
||||
PutCalendarRequest putCalendarRequest = new PutCalendarRequest(CalendarTests.testInstance());
|
||||
Request request = MLRequestConverters.putCalendar(putCalendarRequest);
|
||||
assertEquals(HttpPut.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/ml/calendars/" + putCalendarRequest.getCalendar().getId(), request.getEndpoint());
|
||||
try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) {
|
||||
Calendar parsedCalendar = Calendar.PARSER.apply(parser, null);
|
||||
assertThat(parsedCalendar, equalTo(putCalendarRequest.getCalendar()));
|
||||
}
|
||||
}
|
||||
|
||||
private static Job createValidJob(String jobId) {
|
||||
AnalysisConfig.Builder analysisConfig = AnalysisConfig.builder(Collections.singletonList(
|
||||
Detector.builder().setFunction("count").build()));
|
||||
|
|
|
@ -25,9 +25,9 @@ import org.elasticsearch.action.get.GetResponse;
|
|||
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||
import org.elasticsearch.client.ml.CloseJobRequest;
|
||||
import org.elasticsearch.client.ml.CloseJobResponse;
|
||||
import org.elasticsearch.client.ml.DeleteDatafeedRequest;
|
||||
import org.elasticsearch.client.ml.DeleteForecastRequest;
|
||||
import org.elasticsearch.client.ml.DeleteJobRequest;
|
||||
import org.elasticsearch.client.ml.DeleteJobResponse;
|
||||
import org.elasticsearch.client.ml.FlushJobRequest;
|
||||
import org.elasticsearch.client.ml.FlushJobResponse;
|
||||
import org.elasticsearch.client.ml.ForecastJobRequest;
|
||||
|
@ -40,11 +40,15 @@ import org.elasticsearch.client.ml.OpenJobRequest;
|
|||
import org.elasticsearch.client.ml.OpenJobResponse;
|
||||
import org.elasticsearch.client.ml.PostDataRequest;
|
||||
import org.elasticsearch.client.ml.PostDataResponse;
|
||||
import org.elasticsearch.client.ml.PutCalendarRequest;
|
||||
import org.elasticsearch.client.ml.PutCalendarResponse;
|
||||
import org.elasticsearch.client.ml.PutDatafeedRequest;
|
||||
import org.elasticsearch.client.ml.PutDatafeedResponse;
|
||||
import org.elasticsearch.client.ml.PutJobRequest;
|
||||
import org.elasticsearch.client.ml.PutJobResponse;
|
||||
import org.elasticsearch.client.ml.UpdateJobRequest;
|
||||
import org.elasticsearch.client.ml.calendars.Calendar;
|
||||
import org.elasticsearch.client.ml.calendars.CalendarTests;
|
||||
import org.elasticsearch.client.ml.datafeed.DatafeedConfig;
|
||||
import org.elasticsearch.client.ml.job.config.AnalysisConfig;
|
||||
import org.elasticsearch.client.ml.job.config.DataDescription;
|
||||
|
@ -129,7 +133,7 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
|
|||
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
|
||||
machineLearningClient.putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
|
||||
|
||||
DeleteJobResponse response = execute(new DeleteJobRequest(jobId),
|
||||
AcknowledgedResponse response = execute(new DeleteJobRequest(jobId),
|
||||
machineLearningClient::deleteJob,
|
||||
machineLearningClient::deleteJobAsync);
|
||||
|
||||
|
@ -312,6 +316,22 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
|
|||
assertThat(createdDatafeed.getIndices(), equalTo(datafeedConfig.getIndices()));
|
||||
}
|
||||
|
||||
public void testDeleteDatafeed() throws Exception {
|
||||
String jobId = randomValidJobId();
|
||||
Job job = buildJob(jobId);
|
||||
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
|
||||
machineLearningClient.putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
|
||||
|
||||
String datafeedId = "datafeed-" + jobId;
|
||||
DatafeedConfig datafeedConfig = DatafeedConfig.builder(datafeedId, jobId).setIndices("some_data_index").build();
|
||||
execute(new PutDatafeedRequest(datafeedConfig), machineLearningClient::putDatafeed, machineLearningClient::putDatafeedAsync);
|
||||
|
||||
AcknowledgedResponse response = execute(new DeleteDatafeedRequest(datafeedId), machineLearningClient::deleteDatafeed,
|
||||
machineLearningClient::deleteDatafeedAsync);
|
||||
|
||||
assertTrue(response.isAcknowledged());
|
||||
}
|
||||
|
||||
public void testDeleteForecast() throws Exception {
|
||||
String jobId = "test-delete-forecast";
|
||||
|
||||
|
@ -381,6 +401,16 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
|
|||
return getResponse.isExists();
|
||||
}
|
||||
|
||||
public void testPutCalendar() throws IOException {
|
||||
|
||||
Calendar calendar = CalendarTests.testInstance();
|
||||
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
|
||||
PutCalendarResponse putCalendarResponse = execute(new PutCalendarRequest(calendar), machineLearningClient::putCalendar,
|
||||
machineLearningClient::putCalendarAsync);
|
||||
|
||||
assertThat(putCalendarResponse.getCalendar(), equalTo(calendar));
|
||||
}
|
||||
|
||||
public static String randomValidJobId() {
|
||||
CodepointSetGenerator generator = new CodepointSetGenerator("abcdefghijklmnopqrstuvwxyz0123456789".toCharArray());
|
||||
return generator.ofCodePointsLength(random(), 10, 10);
|
||||
|
|
|
@ -34,9 +34,9 @@ import org.elasticsearch.client.RequestOptions;
|
|||
import org.elasticsearch.client.RestHighLevelClient;
|
||||
import org.elasticsearch.client.ml.CloseJobRequest;
|
||||
import org.elasticsearch.client.ml.CloseJobResponse;
|
||||
import org.elasticsearch.client.ml.DeleteDatafeedRequest;
|
||||
import org.elasticsearch.client.ml.DeleteForecastRequest;
|
||||
import org.elasticsearch.client.ml.DeleteJobRequest;
|
||||
import org.elasticsearch.client.ml.DeleteJobResponse;
|
||||
import org.elasticsearch.client.ml.FlushJobRequest;
|
||||
import org.elasticsearch.client.ml.FlushJobResponse;
|
||||
import org.elasticsearch.client.ml.ForecastJobRequest;
|
||||
|
@ -59,11 +59,14 @@ import org.elasticsearch.client.ml.OpenJobRequest;
|
|||
import org.elasticsearch.client.ml.OpenJobResponse;
|
||||
import org.elasticsearch.client.ml.PostDataRequest;
|
||||
import org.elasticsearch.client.ml.PostDataResponse;
|
||||
import org.elasticsearch.client.ml.PutCalendarRequest;
|
||||
import org.elasticsearch.client.ml.PutCalendarResponse;
|
||||
import org.elasticsearch.client.ml.PutDatafeedRequest;
|
||||
import org.elasticsearch.client.ml.PutDatafeedResponse;
|
||||
import org.elasticsearch.client.ml.PutJobRequest;
|
||||
import org.elasticsearch.client.ml.PutJobResponse;
|
||||
import org.elasticsearch.client.ml.UpdateJobRequest;
|
||||
import org.elasticsearch.client.ml.calendars.Calendar;
|
||||
import org.elasticsearch.client.ml.datafeed.ChunkingConfig;
|
||||
import org.elasticsearch.client.ml.datafeed.DatafeedConfig;
|
||||
import org.elasticsearch.client.ml.job.config.AnalysisConfig;
|
||||
|
@ -264,7 +267,7 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
//tag::x-pack-delete-ml-job-request
|
||||
DeleteJobRequest deleteJobRequest = new DeleteJobRequest("my-first-machine-learning-job");
|
||||
deleteJobRequest.setForce(false); //<1>
|
||||
DeleteJobResponse deleteJobResponse = client.machineLearning().deleteJob(deleteJobRequest, RequestOptions.DEFAULT);
|
||||
AcknowledgedResponse deleteJobResponse = client.machineLearning().deleteJob(deleteJobRequest, RequestOptions.DEFAULT);
|
||||
//end::x-pack-delete-ml-job-request
|
||||
|
||||
//tag::x-pack-delete-ml-job-response
|
||||
|
@ -273,9 +276,9 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
}
|
||||
{
|
||||
//tag::x-pack-delete-ml-job-request-listener
|
||||
ActionListener<DeleteJobResponse> listener = new ActionListener<DeleteJobResponse>() {
|
||||
ActionListener<AcknowledgedResponse> listener = new ActionListener<AcknowledgedResponse>() {
|
||||
@Override
|
||||
public void onResponse(DeleteJobResponse deleteJobResponse) {
|
||||
public void onResponse(AcknowledgedResponse acknowledgedResponse) {
|
||||
// <1>
|
||||
}
|
||||
|
||||
|
@ -587,6 +590,61 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testDeleteDatafeed() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
String jobId = "test-delete-datafeed-job";
|
||||
Job job = MachineLearningIT.buildJob(jobId);
|
||||
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
|
||||
|
||||
String datafeedId = "test-delete-datafeed";
|
||||
DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, jobId).setIndices("foo").build();
|
||||
client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT);
|
||||
|
||||
{
|
||||
//tag::x-pack-delete-ml-datafeed-request
|
||||
DeleteDatafeedRequest deleteDatafeedRequest = new DeleteDatafeedRequest(datafeedId);
|
||||
deleteDatafeedRequest.setForce(false); //<1>
|
||||
AcknowledgedResponse deleteDatafeedResponse = client.machineLearning().deleteDatafeed(
|
||||
deleteDatafeedRequest, RequestOptions.DEFAULT);
|
||||
//end::x-pack-delete-ml-datafeed-request
|
||||
|
||||
//tag::x-pack-delete-ml-datafeed-response
|
||||
boolean isAcknowledged = deleteDatafeedResponse.isAcknowledged(); //<1>
|
||||
//end::x-pack-delete-ml-datafeed-response
|
||||
}
|
||||
|
||||
// Recreate datafeed to allow second deletion
|
||||
client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT);
|
||||
|
||||
{
|
||||
//tag::x-pack-delete-ml-datafeed-request-listener
|
||||
ActionListener<AcknowledgedResponse> listener = new ActionListener<AcknowledgedResponse>() {
|
||||
@Override
|
||||
public void onResponse(AcknowledgedResponse acknowledgedResponse) {
|
||||
// <1>
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
//end::x-pack-delete-ml-datafeed-request-listener
|
||||
|
||||
// Replace the empty listener by a blocking listener in test
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
//tag::x-pack-delete-ml-datafeed-request-async
|
||||
DeleteDatafeedRequest deleteDatafeedRequest = new DeleteDatafeedRequest(datafeedId);
|
||||
client.machineLearning().deleteDatafeedAsync(deleteDatafeedRequest, RequestOptions.DEFAULT, listener); // <1>
|
||||
//end::x-pack-delete-ml-datafeed-request-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
}
|
||||
|
||||
public void testGetBuckets() throws IOException, InterruptedException {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
|
@ -1313,7 +1371,7 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared", "doc");
|
||||
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
|
||||
indexRequest.source("{\"job_id\": \"test-get-categories\", \"category_id\": 1, \"terms\": \"AAL\"," +
|
||||
" \"regex\": \".*?AAL.*\", \"max_matching_length\": 3, \"examples\": [\"AAL\"]}", XContentType.JSON);
|
||||
" \"regex\": \".*?AAL.*\", \"max_matching_length\": 3, \"examples\": [\"AAL\"]}", XContentType.JSON);
|
||||
client.index(indexRequest, RequestOptions.DEFAULT);
|
||||
|
||||
{
|
||||
|
@ -1347,17 +1405,17 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
|
||||
// tag::x-pack-ml-get-categories-listener
|
||||
ActionListener<GetCategoriesResponse> listener =
|
||||
new ActionListener<GetCategoriesResponse>() {
|
||||
@Override
|
||||
public void onResponse(GetCategoriesResponse getcategoriesResponse) {
|
||||
// <1>
|
||||
}
|
||||
new ActionListener<GetCategoriesResponse>() {
|
||||
@Override
|
||||
public void onResponse(GetCategoriesResponse getcategoriesResponse) {
|
||||
// <1>
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
// end::x-pack-ml-get-categories-listener
|
||||
|
||||
// Replace the empty listener by a blocking listener in test
|
||||
|
@ -1369,6 +1427,48 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
// end::x-pack-ml-get-categories-execute-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testPutCalendar() throws IOException, InterruptedException {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
//tag::x-pack-ml-put-calendar-request
|
||||
Calendar calendar = new Calendar("public_holidays", Collections.singletonList("job_1"), "A calendar for public holidays");
|
||||
PutCalendarRequest request = new PutCalendarRequest(calendar); // <1>
|
||||
//end::x-pack-ml-put-calendar-request
|
||||
|
||||
//tag::x-pack-ml-put-calendar-execution
|
||||
PutCalendarResponse response = client.machineLearning().putCalendar(request, RequestOptions.DEFAULT);
|
||||
//end::x-pack-ml-put-calendar-execution
|
||||
|
||||
//tag::x-pack-ml-put-calendar-response
|
||||
Calendar newCalendar = response.getCalendar(); // <1>
|
||||
//end::x-pack-ml-put-calendar-response
|
||||
assertThat(newCalendar.getId(), equalTo("public_holidays"));
|
||||
|
||||
// tag::x-pack-ml-put-calendar-listener
|
||||
ActionListener<PutCalendarResponse> listener = new ActionListener<PutCalendarResponse>() {
|
||||
@Override
|
||||
public void onResponse(PutCalendarResponse response) {
|
||||
// <1>
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
// end::x-pack-ml-put-calendar-listener
|
||||
|
||||
// Replace the empty listener by a blocking listener in test
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::x-pack-ml-put-calendar-execute-async
|
||||
client.machineLearning().putCalendarAsync(request, RequestOptions.DEFAULT, listener); // <1>
|
||||
// end::x-pack-ml-put-calendar-execute-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,42 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.client.ml.datafeed.DatafeedConfigTests;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
public class DeleteDatafeedRequestTests extends ESTestCase {
|
||||
|
||||
public void testConstructor_GivenNullId() {
|
||||
NullPointerException ex = expectThrows(NullPointerException.class, () -> new DeleteJobRequest(null));
|
||||
assertEquals("[job_id] must not be null", ex.getMessage());
|
||||
}
|
||||
|
||||
public void testSetForce() {
|
||||
DeleteDatafeedRequest deleteDatafeedRequest = createTestInstance();
|
||||
assertFalse(deleteDatafeedRequest.isForce());
|
||||
|
||||
deleteDatafeedRequest.setForce(true);
|
||||
assertTrue(deleteDatafeedRequest.isForce());
|
||||
}
|
||||
|
||||
private DeleteDatafeedRequest createTestInstance() {
|
||||
return new DeleteDatafeedRequest(DatafeedConfigTests.randomValidDatafeedId());
|
||||
}
|
||||
}
|
|
@ -0,0 +1,44 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.client.ml.calendars.Calendar;
|
||||
import org.elasticsearch.client.ml.calendars.CalendarTests;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class PutCalendarRequestTests extends AbstractXContentTestCase<PutCalendarRequest> {
|
||||
@Override
|
||||
protected PutCalendarRequest createTestInstance() {
|
||||
return new PutCalendarRequest(CalendarTests.testInstance());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected PutCalendarRequest doParseInstance(XContentParser parser) throws IOException {
|
||||
return new PutCalendarRequest(Calendar.PARSER.apply(parser, null));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsUnknownFields() {
|
||||
return true;
|
||||
}
|
||||
}
|
|
@ -7,7 +7,7 @@
|
|||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
|
@ -16,27 +16,28 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.client.ml.calendars.CalendarTests;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class DeleteJobResponseTests extends AbstractXContentTestCase<DeleteJobResponse> {
|
||||
|
||||
public class PutCalendarResponseTests extends AbstractXContentTestCase<PutCalendarResponse> {
|
||||
@Override
|
||||
protected DeleteJobResponse createTestInstance() {
|
||||
return new DeleteJobResponse();
|
||||
protected PutCalendarResponse createTestInstance() {
|
||||
return new PutCalendarResponse(CalendarTests.testInstance());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected DeleteJobResponse doParseInstance(XContentParser parser) throws IOException {
|
||||
return DeleteJobResponse.fromXContent(parser);
|
||||
protected PutCalendarResponse doParseInstance(XContentParser parser) throws IOException {
|
||||
return PutCalendarResponse.fromXContent(parser);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsUnknownFields() {
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,61 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.ml.calendars;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public class CalendarTests extends AbstractXContentTestCase<Calendar> {
|
||||
|
||||
public static Calendar testInstance() {
|
||||
int size = randomInt(10);
|
||||
List<String> items = new ArrayList<>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
items.add(randomAlphaOfLengthBetween(1, 20));
|
||||
}
|
||||
String description = null;
|
||||
if (randomBoolean()) {
|
||||
description = randomAlphaOfLength(20);
|
||||
}
|
||||
|
||||
CodepointSetGenerator generator = new CodepointSetGenerator("abcdefghijklmnopqrstuvwxyz".toCharArray());
|
||||
return new Calendar(generator.ofCodePointsLength(random(), 10, 10), items, description);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Calendar createTestInstance() {
|
||||
return testInstance();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Calendar doParseInstance(XContentParser parser) throws IOException {
|
||||
return Calendar.PARSER.apply(parser, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsUnknownFields() {
|
||||
return true;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,51 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.ml.calendars;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||
|
||||
import java.util.Date;
|
||||
|
||||
public class ScheduledEventTests extends AbstractXContentTestCase<ScheduledEvent> {
|
||||
|
||||
public static ScheduledEvent testInstance() {
|
||||
Date start = new Date(randomNonNegativeLong());
|
||||
Date end = new Date(start.getTime() + randomIntBetween(1, 10000) * 1000);
|
||||
|
||||
return new ScheduledEvent(randomAlphaOfLength(10), start, end, randomAlphaOfLengthBetween(1, 20),
|
||||
randomBoolean() ? null : randomAlphaOfLength(7));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ScheduledEvent createTestInstance() {
|
||||
return testInstance();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ScheduledEvent doParseInstance(XContentParser parser) {
|
||||
return ScheduledEvent.PARSER.apply(parser, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsUnknownFields() {
|
||||
return true;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,49 @@
|
|||
[[java-rest-high-x-pack-ml-delete-datafeed]]
|
||||
=== Delete Datafeed API
|
||||
|
||||
[[java-rest-high-x-pack-machine-learning-delete-datafeed-request]]
|
||||
==== Delete Datafeed Request
|
||||
|
||||
A `DeleteDatafeedRequest` object requires a non-null `datafeedId` and can optionally set `force`.
|
||||
Can be executed as follows:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
---------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-delete-ml-datafeed-request]
|
||||
---------------------------------------------------
|
||||
<1> Use to forcefully delete a started datafeed;
|
||||
this method is quicker than stopping and deleting the datafeed.
|
||||
Defaults to `false`.
|
||||
|
||||
[[java-rest-high-x-pack-machine-learning-delete-datafeed-response]]
|
||||
==== Delete Datafeed Response
|
||||
|
||||
The returned `AcknowledgedResponse` object indicates the acknowledgement of the request:
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
---------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-delete-ml-datafeed-response]
|
||||
---------------------------------------------------
|
||||
<1> `isAcknowledged` was the deletion request acknowledged or not
|
||||
|
||||
[[java-rest-high-x-pack-machine-learning-delete-datafeed-async]]
|
||||
==== Delete Datafeed Asynchronously
|
||||
|
||||
This request can also be made asynchronously.
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
---------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-delete-ml-datafeed-request-async]
|
||||
---------------------------------------------------
|
||||
<1> The `DeleteDatafeedRequest` to execute and the `ActionListener` to alert on completion or error.
|
||||
|
||||
The deletion request returns immediately. Once the request is completed, the `ActionListener` is
|
||||
called back using the `onResponse` or `onFailure`. The latter indicates some failure occurred when
|
||||
making the request.
|
||||
|
||||
A typical listener for a `DeleteDatafeedRequest` could be defined as follows:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
---------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-delete-ml-datafeed-request-listener]
|
||||
---------------------------------------------------
|
||||
<1> The action to be taken when it is completed
|
||||
<2> What to do when a failure occurs
|
|
@ -18,7 +18,7 @@ Defaults to `false`
|
|||
[[java-rest-high-x-pack-machine-learning-delete-job-response]]
|
||||
==== Delete Job Response
|
||||
|
||||
The returned `DeleteJobResponse` object indicates the acknowledgement of the request:
|
||||
The returned `AcknowledgedResponse` object indicates the acknowledgement of the request:
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
---------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-delete-ml-job-response]
|
||||
|
|
|
@ -112,7 +112,7 @@ include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-l
|
|||
<1> `onResponse` is called back when the action is completed successfully
|
||||
<2> `onFailure` is called back when some unexpected error occurs
|
||||
|
||||
[[java-rest-high-snapshot-ml-get-buckets-response]]
|
||||
[[java-rest-high-x-pack-ml-get-buckets-response]]
|
||||
==== Get Buckets Response
|
||||
|
||||
The returned `GetBucketsResponse` contains the requested buckets:
|
||||
|
|
|
@ -70,7 +70,7 @@ include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-categorie
|
|||
<1> `onResponse` is called back when the action is completed successfully
|
||||
<2> `onFailure` is called back when some unexpected error occurs
|
||||
|
||||
[[java-rest-high-snapshot-ml-get-categories-response]]
|
||||
[[java-rest-high-x-pack-ml-get-categories-response]]
|
||||
==== Get Categories Response
|
||||
|
||||
The returned `GetCategoriesResponse` contains the requested categories:
|
||||
|
|
|
@ -99,7 +99,7 @@ include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-influence
|
|||
<1> `onResponse` is called back when the action is completed successfully
|
||||
<2> `onFailure` is called back when some unexpected error occurs
|
||||
|
||||
[[java-rest-high-snapshot-ml-get-influencers-response]]
|
||||
[[java-rest-high-x-pack-ml-get-influencers-response]]
|
||||
==== Get Influencers Response
|
||||
|
||||
The returned `GetInfluencersResponse` contains the requested influencers:
|
||||
|
|
|
@ -94,7 +94,7 @@ include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-overall-b
|
|||
<1> `onResponse` is called back when the action is completed successfully
|
||||
<2> `onFailure` is called back when some unexpected error occurs
|
||||
|
||||
[[java-rest-high-snapshot-ml-get-overall-buckets-response]]
|
||||
[[java-rest-high-x-pack-ml-get-overall-buckets-response]]
|
||||
==== Get Overall Buckets Response
|
||||
|
||||
The returned `GetOverallBucketsResponse` contains the requested buckets:
|
||||
|
|
|
@ -100,7 +100,7 @@ include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-records-l
|
|||
<1> `onResponse` is called back when the action is completed successfully
|
||||
<2> `onFailure` is called back when some unexpected error occurs
|
||||
|
||||
[[java-rest-high-snapshot-ml-get-records-response]]
|
||||
[[java-rest-high-x-pack-ml-get-records-response]]
|
||||
==== Get Records Response
|
||||
|
||||
The returned `GetRecordsResponse` contains the requested records:
|
||||
|
|
|
@ -0,0 +1,65 @@
|
|||
[[java-rest-high-x-pack-ml-put-calendar]]
|
||||
=== Put Calendar API
|
||||
Creates a new {ml} calendar.
|
||||
The API accepts a `PutCalendarRequest` and responds
|
||||
with a `PutCalendarResponse` object.
|
||||
|
||||
[[java-rest-high-x-pack-ml-get-calendars-request]]
|
||||
==== Put Calendar Request
|
||||
|
||||
A `PutCalendarRequest` is constructed with a Calendar object
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-calendar-request]
|
||||
--------------------------------------------------
|
||||
<1> Create a request with the given Calendar
|
||||
|
||||
|
||||
[[java-rest-high-x-pack-ml-put-calendar-response]]
|
||||
==== Put Calendar Response
|
||||
|
||||
The returned `PutCalendarResponse` contains the created Calendar:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-calendar-response]
|
||||
--------------------------------------------------
|
||||
<1> The created Calendar
|
||||
|
||||
[[java-rest-high-x-pack-ml-put-calendar-execution]]
|
||||
==== Execution
|
||||
The request can be executed through the `MachineLearningClient` contained
|
||||
in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-calendar-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-x-pack-ml-put-calendar-execution-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The request can also be executed asynchronously:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-calendar-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `PutCalendarResquest` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once it is
|
||||
completed the `ActionListener` is called back with the `onResponse` method
|
||||
if the execution is successful or the `onFailure` method if the execution
|
||||
failed.
|
||||
|
||||
A typical listener for `PutCalendarResponse` looks like:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-calendar-listener]
|
||||
--------------------------------------------------
|
||||
<1> `onResponse` is called back when the action is completed successfully
|
||||
<2> `onFailure` is called back when some unexpected error occurs
|
||||
|
|
@ -221,6 +221,7 @@ The Java High Level REST Client supports the following Machine Learning APIs:
|
|||
* <<java-rest-high-x-pack-ml-update-job>>
|
||||
* <<java-rest-high-x-pack-ml-get-job-stats>>
|
||||
* <<java-rest-high-x-pack-ml-put-datafeed>>
|
||||
* <<java-rest-high-x-pack-ml-delete-datafeed>>
|
||||
* <<java-rest-high-x-pack-ml-forecast-job>>
|
||||
* <<java-rest-high-x-pack-ml-delete-forecast>>
|
||||
* <<java-rest-high-x-pack-ml-get-buckets>>
|
||||
|
@ -229,6 +230,7 @@ The Java High Level REST Client supports the following Machine Learning APIs:
|
|||
* <<java-rest-high-x-pack-ml-post-data>>
|
||||
* <<java-rest-high-x-pack-ml-get-influencers>>
|
||||
* <<java-rest-high-x-pack-ml-get-categories>>
|
||||
* <<java-rest-high-x-pack-ml-put-calendar>>
|
||||
|
||||
include::ml/put-job.asciidoc[]
|
||||
include::ml/get-job.asciidoc[]
|
||||
|
@ -238,6 +240,7 @@ include::ml/close-job.asciidoc[]
|
|||
include::ml/update-job.asciidoc[]
|
||||
include::ml/flush-job.asciidoc[]
|
||||
include::ml/put-datafeed.asciidoc[]
|
||||
include::ml/delete-datafeed.asciidoc[]
|
||||
include::ml/get-job-stats.asciidoc[]
|
||||
include::ml/forecast-job.asciidoc[]
|
||||
include::ml/delete-forecast.asciidoc[]
|
||||
|
@ -247,6 +250,7 @@ include::ml/get-records.asciidoc[]
|
|||
include::ml/post-data.asciidoc[]
|
||||
include::ml/get-influencers.asciidoc[]
|
||||
include::ml/get-categories.asciidoc[]
|
||||
include::ml/put-calendar.asciidoc[]
|
||||
|
||||
== Migration APIs
|
||||
|
||||
|
|
After Width: | Height: | Size: 25 KiB |
After Width: | Height: | Size: 23 KiB |
After Width: | Height: | Size: 20 KiB |
After Width: | Height: | Size: 29 KiB |
After Width: | Height: | Size: 28 KiB |
After Width: | Height: | Size: 86 KiB |
After Width: | Height: | Size: 60 KiB |
After Width: | Height: | Size: 56 KiB |
After Width: | Height: | Size: 54 KiB |
After Width: | Height: | Size: 56 KiB |
After Width: | Height: | Size: 97 KiB |
After Width: | Height: | Size: 22 KiB |
After Width: | Height: | Size: 26 KiB |
After Width: | Height: | Size: 17 KiB |
After Width: | Height: | Size: 28 KiB |
After Width: | Height: | Size: 22 KiB |
After Width: | Height: | Size: 13 KiB |
After Width: | Height: | Size: 44 KiB |
After Width: | Height: | Size: 16 KiB |
After Width: | Height: | Size: 25 KiB |
After Width: | Height: | Size: 34 KiB |
After Width: | Height: | Size: 74 KiB |
|
@ -87,3 +87,9 @@ depending on whether {security} is enabled. Previously a
|
|||
404 - NOT FOUND (IndexNotFoundException) could be returned in case the
|
||||
current user was not authorized for any alias. An empty response with
|
||||
status 200 - OK is now returned instead at all times.
|
||||
|
||||
==== Put User API response no longer has `user` object
|
||||
|
||||
The Put User API response was changed in 6.5.0 to add the `created` field
|
||||
outside of the user object where it previously had been. In 7.0.0 the user
|
||||
object has been removed in favor of the top level `created` field.
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
[role="xpack"]
|
||||
[[security-files]]
|
||||
=== Security Files
|
||||
|
|
@ -29,17 +29,17 @@ information, see <<security-settings>>.
|
|||
For more information about encrypting communications across the Elastic Stack,
|
||||
see {xpack-ref}/encrypting-communications.html[Encrypting Communications].
|
||||
|
||||
:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/security/securing-communications/node-certificates.asciidoc
|
||||
:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/docs/reference/security/securing-communications/node-certificates.asciidoc
|
||||
include::node-certificates.asciidoc[]
|
||||
|
||||
:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/security/securing-communications/tls-transport.asciidoc
|
||||
:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/docs/reference/security/securing-communications/tls-transport.asciidoc
|
||||
include::tls-transport.asciidoc[]
|
||||
|
||||
:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/security/securing-communications/tls-http.asciidoc
|
||||
:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/docs/reference/security/securing-communications/tls-http.asciidoc
|
||||
include::tls-http.asciidoc[]
|
||||
|
||||
:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/security/securing-communications/tls-ad.asciidoc
|
||||
:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/docs/reference/security/securing-communications/tls-ad.asciidoc
|
||||
include::tls-ad.asciidoc[]
|
||||
|
||||
:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/security/securing-communications/tls-ldap.asciidoc
|
||||
:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/docs/reference/security/securing-communications/tls-ldap.asciidoc
|
||||
include::tls-ldap.asciidoc[]
|
|
@ -37,7 +37,7 @@ transport.profiles.client.bind_host: 1.1.1.1 <2>
|
|||
<2> The bind address for the network used for client communication
|
||||
|
||||
If separate networks are not available, then
|
||||
{xpack-ref}/ip-filtering.html[IP Filtering] can
|
||||
{stack-ov}/ip-filtering.html[IP Filtering] can
|
||||
be enabled to limit access to the profiles.
|
||||
|
||||
When using SSL for transport, a different set of certificates can also be used
|
||||
|
@ -65,4 +65,4 @@ transport.profiles.client.xpack.security.ssl.client_authentication: none
|
|||
This setting keeps certificate authentication active for node-to-node traffic,
|
||||
but removes the requirement to distribute a signed certificate to transport
|
||||
clients. For more information, see
|
||||
{xpack-ref}/java-clients.html#transport-client[Configuring the Transport Client to work with a Secured Cluster].
|
||||
{stack-ov}/java-clients.html#transport-client[Configuring the Transport Client to work with a Secured Cluster].
|
|
@ -0,0 +1,57 @@
|
|||
[role="xpack"]
|
||||
[testenv="platinum"]
|
||||
[[sql-client-apps-dbeaver]]
|
||||
=== DBeaver
|
||||
|
||||
[quote, https://dbeaver.io/]
|
||||
____
|
||||
https://dbeaver.io/[DBeaver] DBeaver is free and open source universal database tool for developers and database administrators.
|
||||
____
|
||||
|
||||
==== Prerequisites
|
||||
|
||||
* DBeaver version 5.1.4 or higher
|
||||
* {es-sql} <<sql-jdbc, JDBC driver>>
|
||||
|
||||
==== New Connection
|
||||
|
||||
Create a new connection either through the menu *File* > *New* > *Database Connection* menu or directly through the *Database Connection* panel.
|
||||
|
||||
image:images/sql/client-apps/dbeaver-1-new-conn.png[]
|
||||
|
||||
==== Select {es} type
|
||||
Select the {es} type from the available connection types:
|
||||
|
||||
image:images/sql/client-apps/dbeaver-2-conn-es.png[]
|
||||
|
||||
==== Specify the {es} cluster information
|
||||
|
||||
Configure the {es-sql} connection appropriately:
|
||||
|
||||
image:images/sql/client-apps/dbeaver-3-conn-props.png[]
|
||||
|
||||
==== Verify the driver version
|
||||
|
||||
Make sure the correct JDBC driver version is used by using the *Edit Driver Settings* button:
|
||||
|
||||
image:images/sql/client-apps/dbeaver-4-driver-ver.png[]
|
||||
|
||||
DBeaver is aware of the {es} JDBC maven repository so simply *Download/Update* the artifact or add a new one. As an alternative one can add a local file instead if the {es} Maven repository is not an option.
|
||||
|
||||
When changing the driver, make sure to click on the *Find Class* button at the bottom - the Driver class should be picked out automatically however this provides a sanity check that the driver jar is properly found and it is not corrupt.
|
||||
|
||||
==== Test connectivity
|
||||
|
||||
Once the driver version and the settings are in place, use *Test Connection* to check that everything works. If things are okay, one should get a confirmation window with the version of the driver and that of {es-sql}:
|
||||
|
||||
image:images/sql/client-apps/dbeaver-5-test-conn.png[]
|
||||
|
||||
Click *Finish* and the new {es} connection appears in the *Database Connection* panel.
|
||||
|
||||
DBeaver is now configured to talk to {es}.
|
||||
|
||||
==== Connect to {es}
|
||||
|
||||
Simply click on the {es} connection and start querying and exploring {es}:
|
||||
|
||||
image:images/sql/client-apps/dbeaver-6-data.png[]
|
|
@ -0,0 +1,42 @@
|
|||
[role="xpack"]
|
||||
[testenv="platinum"]
|
||||
[[sql-client-apps-dbvis]]
|
||||
=== DbVisualizer
|
||||
|
||||
[quote, http://www.dbvis.com/]
|
||||
____
|
||||
https://www.dbvis.com/[DbVisualizer] is a database management and analysis tool for all major databases.
|
||||
____
|
||||
|
||||
==== Prerequisites
|
||||
|
||||
* {es-sql} <<sql-jdbc, JDBC driver>>
|
||||
|
||||
==== Add {es} JDBC driver
|
||||
|
||||
Add the {es} JDBC driver to DbVisualizer through *Tools* > *Driver Manager*:
|
||||
|
||||
image:images/sql/client-apps/dbvis-1-driver-manager.png[]
|
||||
|
||||
Create a new driver entry through *Driver* > *Create Driver* entry and add the JDBC driver in the files panel
|
||||
through the buttons on the right. Once specify, the driver class and its version should be automatically picked up - one can force the refresh through the *Find driver in liste locations* button, the second from the bottom on the right hand side:
|
||||
|
||||
image:images/sql/client-apps/dbvis-2-driver.png[]
|
||||
|
||||
==== Create a new connection
|
||||
|
||||
Once the {es} driver is in place, create a new connection:
|
||||
|
||||
image:images/sql/client-apps/dbvis-3-new-conn.png[]
|
||||
|
||||
One can use the wizard or add the settings all at once:
|
||||
|
||||
image:images/sql/client-apps/dbvis-4-conn-props.png[]
|
||||
|
||||
Press *Connect* and the driver version (as that of the cluster) should show up under *Connection Message*.
|
||||
|
||||
==== Execute SQL queries
|
||||
|
||||
The setup is done. DbVisualizer can be used to run queries against {es} and explore its content:
|
||||
|
||||
image:images/sql/client-apps/dbvis-5-data.png[]
|
|
@ -0,0 +1,21 @@
|
|||
[role="xpack"]
|
||||
[testenv="platinum"]
|
||||
[[sql-client-apps]]
|
||||
== SQL Client Applications
|
||||
|
||||
Thanks to its <<sql-jdbc, JDBC>> interface, a broad range of third-party applications can use {es}'s SQL capabilities.
|
||||
This section lists, in alphabetical order, a number of them and their respective configuration - the list however is by no means comprehensive (feel free to https://www.elastic.co/blog/art-of-pull-request[submit a PR] to improve it):
|
||||
as long as the app can use the {es-sql} driver, it can use {es-sql}.
|
||||
|
||||
* <<sql-client-apps-dbeaver, DBeaver>>
|
||||
* <<sql-client-apps-dbvis, DbVisualizer>>
|
||||
* <<sql-client-apps-squirrel, SQuirreL SQL>>
|
||||
* <<sql-client-apps-workbench, SQL Workbench>>
|
||||
|
||||
NOTE: Each application has its own requirements and license; these are outside the scope of this documentation
|
||||
which covers only the configuration aspect with {es-sql}.
|
||||
|
||||
include::dbeaver.asciidoc[]
|
||||
include::dbvis.asciidoc[]
|
||||
include::squirrel.asciidoc[]
|
||||
include::workbench.asciidoc[]
|
|
@ -0,0 +1,50 @@
|
|||
[role="xpack"]
|
||||
[testenv="platinum"]
|
||||
[[sql-client-apps-squirrel]]
|
||||
=== SQquirelL SQL
|
||||
|
||||
[quote, http://squirrel-sql.sourceforge.net/]
|
||||
____
|
||||
http://squirrel-sql.sourceforge.net/[SQuirelL SQL] is a graphical, [multi-platform] Java program that will allow you to view the structure of a JDBC compliant database [...].
|
||||
____
|
||||
|
||||
==== Prerequisites
|
||||
|
||||
* {es-sql} <<sql-jdbc, JDBC driver>>
|
||||
|
||||
==== Add {es} JDBC Driver
|
||||
|
||||
To add the {es} JDBC driver, use *Windows* > *View Drivers* menu (or Ctrl+Shift+D shortcut):
|
||||
|
||||
image:images/sql/client-apps/squirell-1-view-drivers.png[]
|
||||
|
||||
This opens up the `Drivers` panel on the left. Click on the `+` sign to create a new driver:
|
||||
|
||||
image:images/sql/client-apps/squirell-2-new-driver.png[]
|
||||
|
||||
Select the *Extra Class Path* tab and *Add* the JDBC jar. *List Drivers* to have the `Class Name` filled-in
|
||||
automatically and name the connection:
|
||||
|
||||
image:images/sql/client-apps/squirell-3-add-driver.png[]
|
||||
|
||||
The driver should now appear in the list:
|
||||
|
||||
image:images/sql/client-apps/squirell-4-driver-list.png[]
|
||||
|
||||
==== Add an alias for {es}
|
||||
|
||||
Add a new connection or in SQuirelL terminology an _alias_ using the new driver. To do so, select the *Aliases* panel on the left and click the `+` sign:
|
||||
|
||||
image:images/sql/client-apps/squirell-5-add-alias.png[]
|
||||
|
||||
Name the new alias and select the `Elasticsearch` driver previously added:
|
||||
|
||||
image:images/sql/client-apps/squirell-6-alias-props.png[]
|
||||
|
||||
The setup is completed. Double check it by clicking on *Test Connection*.
|
||||
|
||||
==== Execute SQL queries
|
||||
|
||||
The connection should open automatically (if it has been created before simply click on *Connect* in the *Alias* panel). SQuirelL SQL can now issue SQL commands to {es}:
|
||||
|
||||
image:images/sql/client-apps/squirell-7-data.png[]
|
|
@ -0,0 +1,40 @@
|
|||
[role="xpack"]
|
||||
[testenv="platinum"]
|
||||
[[sql-client-apps-workbench]]
|
||||
=== SQL Workbench/J
|
||||
|
||||
[quote, https://www.sql-workbench.eu/]
|
||||
____
|
||||
https://www.sql-workbench.eu/[SQL Workbench/J] is a free, DBMS-independent, cross-platform SQL query tool.
|
||||
____
|
||||
|
||||
==== Prerequisites
|
||||
|
||||
* {es-sql} <<sql-jdbc, JDBC driver>>
|
||||
|
||||
==== Add {es} JDBC driver
|
||||
|
||||
Add the {es} JDBC driver to SQL Workbench/J through *Manage Drivers* either from the main windows in the *File* menu or from the *Connect* window:
|
||||
|
||||
image:images/sql/client-apps/workbench-1-manage-drivers.png[]
|
||||
|
||||
Add a new entry to the list through the blank page button in the upper left corner. Add the JDBC jar, provide a name and click on the magnifier button to have the driver *Classname* picked-up automatically:
|
||||
|
||||
image:images/sql/client-apps/workbench-2-add-driver.png[]
|
||||
|
||||
==== Create a new connection profile
|
||||
|
||||
With the driver configured, create a new connection profile through *File* > *Connect Window* (or Alt+C shortcut):
|
||||
|
||||
image:images/sql/client-apps/workbench-3-connection.png[]
|
||||
|
||||
Select the previously configured driver and set the URL of your cluster using the JDBC syntax.
|
||||
Verify the connection through the *Test* button - a confirmation window should appear that everything is properly configured.
|
||||
|
||||
The setup is complete.
|
||||
|
||||
==== Execute SQL queries
|
||||
|
||||
SQL Workbench/J is ready to talk to {es} through SQL: click on the profile created to execute statements or explore the data:
|
||||
|
||||
image:images/sql/client-apps/workbench-4-data.png[]
|
|
@ -2,3 +2,4 @@ include::rest.asciidoc[]
|
|||
include::translate.asciidoc[]
|
||||
include::cli.asciidoc[]
|
||||
include::jdbc.asciidoc[]
|
||||
include::client-apps/index.asciidoc[]
|
||||
|
|
|
@ -3,14 +3,20 @@
|
|||
[[sql-jdbc]]
|
||||
== SQL JDBC
|
||||
|
||||
Elasticsearch's SQL jdbc driver is a rich, fully featured JDBC driver for Elasticsearch.
|
||||
{es}'s SQL jdbc driver is a rich, fully featured JDBC driver for {es}.
|
||||
It is Type 4 driver, meaning it is a platform independent, stand-alone, Direct to Database,
|
||||
pure Java driver that converts JDBC calls to Elasticsearch SQL.
|
||||
pure Java driver that converts JDBC calls to {es-sql}.
|
||||
|
||||
[[sql-jdbc-installation]]
|
||||
[float]
|
||||
=== Installation
|
||||
|
||||
The JDBC driver can be obtained either by downloading it from the https://www.elastic.co/downloads/jdbc-client[elastic.co] site or by using a http://maven.apache.org/[Maven]-compatible tool with the following dependency:
|
||||
The JDBC driver can be obtained from:
|
||||
|
||||
Dedicated page::
|
||||
https://www.elastic.co/downloads/jdbc-client[elastic.co] provides links, typically for manual downloads.
|
||||
Maven dependency::
|
||||
http://maven.apache.org/[Maven]-compatible tools can retrieve it automatically as a dependency:
|
||||
|
||||
["source","xml",subs="attributes"]
|
||||
----
|
||||
|
|
|
@ -36,6 +36,8 @@ indices and return results in tabular format.
|
|||
SQL and print tabular results.
|
||||
<<sql-jdbc,JDBC>>::
|
||||
A JDBC driver for {es}.
|
||||
<<sql-client-apps,Client Applications>>::
|
||||
Documentation for configuring various SQL/BI tools with {es-sql}.
|
||||
<<sql-spec,SQL Language>>::
|
||||
Overview of the {es-sql} language, such as supported data types, commands and
|
||||
syntax.
|
||||
|
|
|
@ -6,9 +6,12 @@
|
|||
.Synopsis
|
||||
[source, sql]
|
||||
----
|
||||
DESCRIBE [table identifier<1>|[LIKE pattern<2>]]
|
||||
DESCRIBE [table identifier<1> | [LIKE pattern<2>]]
|
||||
----
|
||||
|
||||
<1> single table identifier or double quoted es multi index
|
||||
<2> SQL LIKE pattern
|
||||
|
||||
or
|
||||
|
||||
[source, sql]
|
||||
|
@ -16,6 +19,8 @@ or
|
|||
DESC [table identifier<1>|[LIKE pattern<2>]]
|
||||
----
|
||||
|
||||
<1> single table identifier or double quoted es multi index
|
||||
<2> SQL LIKE pattern
|
||||
|
||||
.Description
|
||||
|
||||
|
|
|
@ -183,7 +183,7 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
|
|||
|
||||
/**
|
||||
* Returns the minimum number of children that are required to match for the parent to be considered a match.
|
||||
* The default is {@value #DEFAULT_MAX_CHILDREN}
|
||||
* The default is {@value #DEFAULT_MIN_CHILDREN}
|
||||
*/
|
||||
public int minChildren() {
|
||||
return minChildren;
|
||||
|
@ -191,7 +191,7 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
|
|||
|
||||
/**
|
||||
* Returns the maximum number of children that are required to match for the parent to be considered a match.
|
||||
* The default is {@value #DEFAULT_MIN_CHILDREN}
|
||||
* The default is {@value #DEFAULT_MAX_CHILDREN}
|
||||
*/
|
||||
public int maxChildren() { return maxChildren; }
|
||||
|
||||
|
|
|
@ -42,6 +42,7 @@ import static org.hamcrest.Matchers.equalTo;
|
|||
public class FullClusterRestartSettingsUpgradeIT extends AbstractFullClusterRestartTestCase {
|
||||
|
||||
public void testRemoteClusterSettingsUpgraded() throws IOException {
|
||||
assumeTrue("skip_unavailable did not exist until 6.1.0", getOldClusterVersion().onOrAfter(Version.V_6_1_0));
|
||||
assumeTrue("settings automatically upgraded since 6.5.0", getOldClusterVersion().before(Version.V_6_5_0));
|
||||
if (isRunningAgainstOldCluster()) {
|
||||
final Request putSettingsRequest = new Request("PUT", "/_cluster/settings");
|
||||
|
|
|
@ -111,6 +111,7 @@ public class RecoveryIT extends AbstractRollingTestCase {
|
|||
return future;
|
||||
}
|
||||
|
||||
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/pull/33616")
|
||||
public void testRecoveryWithConcurrentIndexing() throws Exception {
|
||||
final String index = "recovery_with_concurrent_indexing";
|
||||
Response response = client().performRequest(new Request("GET", "_nodes"));
|
||||
|
@ -183,6 +184,7 @@ public class RecoveryIT extends AbstractRollingTestCase {
|
|||
}
|
||||
|
||||
|
||||
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/pull/33616")
|
||||
public void testRelocationWithConcurrentIndexing() throws Exception {
|
||||
final String index = "relocation_with_concurrent_indexing";
|
||||
switch (CLUSTER_TYPE) {
|
||||
|
|
|
@ -134,8 +134,8 @@ setup:
|
|||
---
|
||||
"docvalue_fields":
|
||||
- skip:
|
||||
version: " - 6.3.99"
|
||||
reason: format option was added in 6.4
|
||||
version: " - 6.4.0"
|
||||
reason: format option was added in 6.4 and the deprecation message changed in 6.4.1
|
||||
features: warnings
|
||||
- do:
|
||||
warnings:
|
||||
|
@ -148,8 +148,8 @@ setup:
|
|||
---
|
||||
"multiple docvalue_fields":
|
||||
- skip:
|
||||
version: " - 6.3.99"
|
||||
reason: format option was added in 6.4
|
||||
version: " - 6.4.0"
|
||||
reason: format option was added in 6.4 and the deprecation message changed in 6.4.1
|
||||
features: warnings
|
||||
- do:
|
||||
warnings:
|
||||
|
@ -162,8 +162,8 @@ setup:
|
|||
---
|
||||
"docvalue_fields as url param":
|
||||
- skip:
|
||||
version: " - 6.3.99"
|
||||
reason: format option was added in 6.4
|
||||
version: " - 6.4.0"
|
||||
reason: format option was added in 6.4 and the deprecation message changed in 6.4.1
|
||||
features: warnings
|
||||
- do:
|
||||
warnings:
|
||||
|
|
|
@ -28,7 +28,7 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
|||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.time.CompoundDateTimeFormatter;
|
||||
import org.elasticsearch.common.time.DateFormatter;
|
||||
import org.elasticsearch.common.time.DateFormatters;
|
||||
import org.elasticsearch.common.xcontent.ContextParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
|
@ -368,8 +368,7 @@ public final class IndexGraveyard implements MetaData.Custom {
|
|||
TOMBSTONE_PARSER.declareString((b, s) -> {}, new ParseField(DELETE_DATE_KEY));
|
||||
}
|
||||
|
||||
static final CompoundDateTimeFormatter FORMATTER =
|
||||
DateFormatters.forPattern("strict_date_optional_time").withZone(ZoneOffset.UTC);
|
||||
static final DateFormatter FORMATTER = DateFormatters.forPattern("strict_date_optional_time").withZone(ZoneOffset.UTC);
|
||||
|
||||
static ContextParser<Void, Tombstone> getParser() {
|
||||
return (parser, context) -> TOMBSTONE_PARSER.apply(parser, null).build();
|
||||
|
|
|
@ -31,7 +31,7 @@ import org.elasticsearch.common.io.stream.Writeable;
|
|||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Setting.Property;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.time.CompoundDateTimeFormatter;
|
||||
import org.elasticsearch.common.time.DateFormatter;
|
||||
import org.elasticsearch.common.time.DateFormatters;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ToXContentFragment;
|
||||
|
@ -48,8 +48,7 @@ import java.util.Objects;
|
|||
*/
|
||||
public final class UnassignedInfo implements ToXContentFragment, Writeable {
|
||||
|
||||
public static final CompoundDateTimeFormatter DATE_TIME_FORMATTER =
|
||||
DateFormatters.forPattern("dateOptionalTime").withZone(ZoneOffset.UTC);
|
||||
public static final DateFormatter DATE_TIME_FORMATTER = DateFormatters.forPattern("dateOptionalTime").withZone(ZoneOffset.UTC);
|
||||
|
||||
public static final Setting<TimeValue> INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING =
|
||||
Setting.positiveTimeSetting("index.unassigned.node_left.delayed_timeout", TimeValue.timeValueMinutes(1), Property.Dynamic,
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.common;
|
||||
|
||||
import org.elasticsearch.common.time.CompoundDateTimeFormatter;
|
||||
import org.elasticsearch.common.time.DateFormatter;
|
||||
import org.elasticsearch.common.time.DateFormatters;
|
||||
|
||||
import java.time.Instant;
|
||||
|
@ -85,7 +85,7 @@ public class Table {
|
|||
return this;
|
||||
}
|
||||
|
||||
private static final CompoundDateTimeFormatter FORMATTER = DateFormatters.forPattern("HH:mm:ss").withZone(ZoneOffset.UTC);
|
||||
private static final DateFormatter FORMATTER = DateFormatters.forPattern("HH:mm:ss").withZone(ZoneOffset.UTC);
|
||||
|
||||
public Table startRow() {
|
||||
if (headers.isEmpty()) {
|
||||
|
|
|
@ -458,7 +458,7 @@ public class Setting<T> implements ToXContentObject {
|
|||
* @return the raw string representation of the setting value
|
||||
*/
|
||||
String innerGetRaw(final Settings settings) {
|
||||
return settings.get(getKey(), defaultValue.apply(settings), isListSetting());
|
||||
return settings.get(getKey(), defaultValue.apply(settings));
|
||||
}
|
||||
|
||||
/** Logs a deprecation warning if the setting is deprecated and used. */
|
||||
|
|
|
@ -0,0 +1,133 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.time;
|
||||
|
||||
import java.time.ZoneId;
|
||||
import java.time.format.DateTimeParseException;
|
||||
import java.time.temporal.TemporalAccessor;
|
||||
import java.time.temporal.TemporalField;
|
||||
import java.util.Arrays;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public interface DateFormatter {
|
||||
|
||||
/**
|
||||
* Try to parse input to a java time TemporalAccessor
|
||||
* @param input An arbitrary string resembling the string representation of a date or time
|
||||
* @throws DateTimeParseException If parsing fails, this exception will be thrown.
|
||||
* Note that it can contained suppressed exceptions when several formatters failed parse this value
|
||||
* @return The java time object containing the parsed input
|
||||
*/
|
||||
TemporalAccessor parse(String input);
|
||||
|
||||
/**
|
||||
* Create a copy of this formatter that is configured to parse dates in the specified time zone
|
||||
*
|
||||
* @param zoneId The time zone to act on
|
||||
* @return A copy of the date formatter this has been called on
|
||||
*/
|
||||
DateFormatter withZone(ZoneId zoneId);
|
||||
|
||||
/**
|
||||
* Print the supplied java time accessor in a string based representation according to this formatter
|
||||
*
|
||||
* @param accessor The temporal accessor used to format
|
||||
* @return The string result for the formatting
|
||||
*/
|
||||
String format(TemporalAccessor accessor);
|
||||
|
||||
/**
|
||||
* A name based format for this formatter. Can be one of the registered formatters like <code>epoch_millis</code> or
|
||||
* a configured format like <code>HH:mm:ss</code>
|
||||
*
|
||||
* @return The name of this formatter
|
||||
*/
|
||||
String pattern();
|
||||
|
||||
/**
|
||||
* Configure a formatter using default fields for a TemporalAccessor that should be used in case
|
||||
* the supplied date is not having all of those fields
|
||||
*
|
||||
* @param fields A <code>Map<TemporalField, Long></code> of fields to be used as fallbacks
|
||||
* @return A new date formatter instance, that will use those fields during parsing
|
||||
*/
|
||||
DateFormatter parseDefaulting(Map<TemporalField, Long> fields);
|
||||
|
||||
/**
|
||||
* Merge several date formatters into a single one. Useful if you need to have several formatters with
|
||||
* different formats act as one, for example when you specify a
|
||||
* format like <code>date_hour||epoch_millis</code>
|
||||
*
|
||||
* @param formatters The list of date formatters to be merged together
|
||||
* @return The new date formtter containing the specified date formatters
|
||||
*/
|
||||
static DateFormatter merge(DateFormatter ... formatters) {
|
||||
return new MergedDateFormatter(formatters);
|
||||
}
|
||||
|
||||
class MergedDateFormatter implements DateFormatter {
|
||||
|
||||
private final String format;
|
||||
private final DateFormatter[] formatters;
|
||||
|
||||
MergedDateFormatter(DateFormatter ... formatters) {
|
||||
this.formatters = formatters;
|
||||
this.format = Arrays.stream(formatters).map(DateFormatter::pattern).collect(Collectors.joining("||"));
|
||||
}
|
||||
|
||||
@Override
|
||||
public TemporalAccessor parse(String input) {
|
||||
DateTimeParseException failure = null;
|
||||
for (DateFormatter formatter : formatters) {
|
||||
try {
|
||||
return formatter.parse(input);
|
||||
} catch (DateTimeParseException e) {
|
||||
if (failure == null) {
|
||||
failure = e;
|
||||
} else {
|
||||
failure.addSuppressed(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
throw failure;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DateFormatter withZone(ZoneId zoneId) {
|
||||
return new MergedDateFormatter(Arrays.stream(formatters).map(f -> f.withZone(zoneId)).toArray(DateFormatter[]::new));
|
||||
}
|
||||
|
||||
@Override
|
||||
public String format(TemporalAccessor accessor) {
|
||||
return formatters[0].format(accessor);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String pattern() {
|
||||
return format;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DateFormatter parseDefaulting(Map<TemporalField, Long> fields) {
|
||||
return new MergedDateFormatter(Arrays.stream(formatters).map(f -> f.parseDefaulting(fields)).toArray(DateFormatter[]::new));
|
||||
}
|
||||
}
|
||||
}
|
|
@ -25,12 +25,10 @@ import java.time.DateTimeException;
|
|||
import java.time.DayOfWeek;
|
||||
import java.time.Instant;
|
||||
import java.time.LocalDate;
|
||||
import java.time.ZoneId;
|
||||
import java.time.ZoneOffset;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.time.format.DateTimeFormatterBuilder;
|
||||
import java.time.format.DateTimeParseException;
|
||||
import java.time.format.ResolverStyle;
|
||||
import java.time.format.SignStyle;
|
||||
import java.time.temporal.ChronoField;
|
||||
|
@ -38,9 +36,6 @@ import java.time.temporal.IsoFields;
|
|||
import java.time.temporal.TemporalAccessor;
|
||||
import java.time.temporal.TemporalAdjusters;
|
||||
import java.time.temporal.WeekFields;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.Locale;
|
||||
|
||||
import static java.time.temporal.ChronoField.DAY_OF_MONTH;
|
||||
|
@ -106,8 +101,9 @@ public class DateFormatters {
|
|||
/**
|
||||
* Returns a generic ISO datetime parser where the date is mandatory and the time is optional.
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter STRICT_DATE_OPTIONAL_TIME =
|
||||
new CompoundDateTimeFormatter(STRICT_DATE_OPTIONAL_TIME_FORMATTER_1, STRICT_DATE_OPTIONAL_TIME_FORMATTER_2);
|
||||
private static final DateFormatter STRICT_DATE_OPTIONAL_TIME =
|
||||
new JavaDateFormatter("strict_date_optional_time", STRICT_DATE_OPTIONAL_TIME_FORMATTER_1,
|
||||
STRICT_DATE_OPTIONAL_TIME_FORMATTER_1, STRICT_DATE_OPTIONAL_TIME_FORMATTER_2);
|
||||
|
||||
private static final DateTimeFormatter STRICT_DATE_OPTIONAL_TIME_FORMATTER_WITH_NANOS_1 = new DateTimeFormatterBuilder()
|
||||
.append(STRICT_YEAR_MONTH_DAY_FORMATTER)
|
||||
|
@ -140,8 +136,9 @@ public class DateFormatters {
|
|||
/**
|
||||
* Returns a generic ISO datetime parser where the date is mandatory and the time is optional with nanosecond resolution.
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter STRICT_DATE_OPTIONAL_TIME_NANOS =
|
||||
new CompoundDateTimeFormatter(STRICT_DATE_OPTIONAL_TIME_FORMATTER_WITH_NANOS_1, STRICT_DATE_OPTIONAL_TIME_FORMATTER_WITH_NANOS_2);
|
||||
private static final DateFormatter STRICT_DATE_OPTIONAL_TIME_NANOS = new JavaDateFormatter("strict_date_optional_time_nanos",
|
||||
STRICT_DATE_OPTIONAL_TIME_FORMATTER_WITH_NANOS_1,
|
||||
STRICT_DATE_OPTIONAL_TIME_FORMATTER_WITH_NANOS_1, STRICT_DATE_OPTIONAL_TIME_FORMATTER_WITH_NANOS_2);
|
||||
|
||||
/////////////////////////////////////////
|
||||
//
|
||||
|
@ -162,7 +159,8 @@ public class DateFormatters {
|
|||
* Returns a basic formatter for a two digit hour of day, two digit minute
|
||||
* of hour, two digit second of minute, and time zone offset (HHmmssZ).
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter BASIC_TIME_NO_MILLIS = new CompoundDateTimeFormatter(
|
||||
private static final DateFormatter BASIC_TIME_NO_MILLIS = new JavaDateFormatter("basic_time_no_millis",
|
||||
new DateTimeFormatterBuilder().append(BASIC_TIME_NO_MILLIS_BASE).appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(BASIC_TIME_NO_MILLIS_BASE).appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(BASIC_TIME_NO_MILLIS_BASE).append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT)
|
||||
);
|
||||
|
@ -186,7 +184,7 @@ public class DateFormatters {
|
|||
* of hour, two digit second of minute, three digit millis, and time zone
|
||||
* offset (HHmmss.SSSZ).
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter BASIC_TIME = new CompoundDateTimeFormatter(
|
||||
private static final DateFormatter BASIC_TIME = new JavaDateFormatter("basic_time",
|
||||
new DateTimeFormatterBuilder().append(BASIC_TIME_PRINTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(BASIC_TIME_FORMATTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(BASIC_TIME_FORMATTER).append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT)
|
||||
|
@ -203,7 +201,7 @@ public class DateFormatters {
|
|||
* of hour, two digit second of minute, three digit millis, and time zone
|
||||
* offset prefixed by 'T' ('T'HHmmss.SSSZ).
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter BASIC_T_TIME = new CompoundDateTimeFormatter(
|
||||
private static final DateFormatter BASIC_T_TIME = new JavaDateFormatter("basic_t_time",
|
||||
new DateTimeFormatterBuilder().append(BASIC_T_TIME_PRINTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(BASIC_T_TIME_FORMATTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(BASIC_T_TIME_FORMATTER).append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT)
|
||||
|
@ -214,11 +212,11 @@ public class DateFormatters {
|
|||
* of hour, two digit second of minute, and time zone offset prefixed by 'T'
|
||||
* ('T'HHmmssZ).
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter BASIC_T_TIME_NO_MILLIS = new CompoundDateTimeFormatter(
|
||||
new DateTimeFormatterBuilder().appendLiteral("T").append(BASIC_TIME_NO_MILLIS_BASE)
|
||||
.appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().appendLiteral("T").append(BASIC_TIME_NO_MILLIS_BASE)
|
||||
.append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT)
|
||||
private static final DateFormatter BASIC_T_TIME_NO_MILLIS = new JavaDateFormatter("basic_t_time_no_millis",
|
||||
new DateTimeFormatterBuilder().appendLiteral("T").append(BASIC_TIME_NO_MILLIS_BASE).appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().appendLiteral("T").append(BASIC_TIME_NO_MILLIS_BASE).appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().appendLiteral("T").append(BASIC_TIME_NO_MILLIS_BASE).append(TIME_ZONE_FORMATTER_NO_COLON)
|
||||
.toFormatter(Locale.ROOT)
|
||||
);
|
||||
|
||||
private static final DateTimeFormatter BASIC_YEAR_MONTH_DAY_FORMATTER = new DateTimeFormatterBuilder()
|
||||
|
@ -241,7 +239,7 @@ public class DateFormatters {
|
|||
* Returns a basic formatter that combines a basic date and time, separated
|
||||
* by a 'T' (yyyyMMdd'T'HHmmss.SSSZ).
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter BASIC_DATE_TIME = new CompoundDateTimeFormatter(
|
||||
private static final DateFormatter BASIC_DATE_TIME = new JavaDateFormatter("basic_date_time",
|
||||
new DateTimeFormatterBuilder().append(BASIC_DATE_TIME_PRINTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(BASIC_DATE_TIME_FORMATTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(BASIC_DATE_TIME_FORMATTER).append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT)
|
||||
|
@ -254,7 +252,9 @@ public class DateFormatters {
|
|||
* Returns a basic formatter that combines a basic date and time without millis,
|
||||
* separated by a 'T' (yyyyMMdd'T'HHmmssZ).
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter BASIC_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter(
|
||||
private static final DateFormatter BASIC_DATE_TIME_NO_MILLIS = new JavaDateFormatter("basic_t_time_no_millis",
|
||||
new DateTimeFormatterBuilder().append(BASIC_DATE_T).append(BASIC_TIME_NO_MILLIS_BASE)
|
||||
.appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(BASIC_DATE_T).append(BASIC_TIME_NO_MILLIS_BASE)
|
||||
.appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(BASIC_DATE_T).append(BASIC_TIME_NO_MILLIS_BASE)
|
||||
|
@ -265,14 +265,14 @@ public class DateFormatters {
|
|||
* Returns a formatter for a full ordinal date, using a four
|
||||
* digit year and three digit dayOfYear (yyyyDDD).
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter BASIC_ORDINAL_DATE = new CompoundDateTimeFormatter(
|
||||
private static final DateFormatter BASIC_ORDINAL_DATE = new JavaDateFormatter("basic_ordinal_date",
|
||||
DateTimeFormatter.ofPattern("yyyyDDD", Locale.ROOT));
|
||||
|
||||
/*
|
||||
* Returns a formatter for a full ordinal date and time, using a four
|
||||
* digit year and three digit dayOfYear (yyyyDDD'T'HHmmss.SSSZ).
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter BASIC_ORDINAL_DATE_TIME = new CompoundDateTimeFormatter(
|
||||
private static final DateFormatter BASIC_ORDINAL_DATE_TIME = new JavaDateFormatter("basic_ordinal_date_time",
|
||||
new DateTimeFormatterBuilder().appendPattern("yyyyDDD").append(BASIC_T_TIME_PRINTER)
|
||||
.appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().appendPattern("yyyyDDD").append(BASIC_T_TIME_FORMATTER)
|
||||
|
@ -284,7 +284,9 @@ public class DateFormatters {
|
|||
* Returns a formatter for a full ordinal date and time without millis,
|
||||
* using a four digit year and three digit dayOfYear (yyyyDDD'T'HHmmssZ).
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter BASIC_ORDINAL_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter(
|
||||
private static final DateFormatter BASIC_ORDINAL_DATE_TIME_NO_MILLIS = new JavaDateFormatter("basic_ordinal_date_time_no_millis",
|
||||
new DateTimeFormatterBuilder().appendPattern("yyyyDDD").appendLiteral("T").append(BASIC_TIME_NO_MILLIS_BASE)
|
||||
.appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().appendPattern("yyyyDDD").appendLiteral("T").append(BASIC_TIME_NO_MILLIS_BASE)
|
||||
.appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().appendPattern("yyyyDDD").appendLiteral("T").append(BASIC_TIME_NO_MILLIS_BASE)
|
||||
|
@ -329,14 +331,14 @@ public class DateFormatters {
|
|||
* Returns a basic formatter for a full date as four digit weekyear, two
|
||||
* digit week of weekyear, and one digit day of week (xxxx'W'wwe).
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter STRICT_BASIC_WEEK_DATE =
|
||||
new CompoundDateTimeFormatter(STRICT_BASIC_WEEK_DATE_PRINTER, STRICT_BASIC_WEEK_DATE_FORMATTER);
|
||||
private static final DateFormatter STRICT_BASIC_WEEK_DATE =
|
||||
new JavaDateFormatter("strict_basic_week_date", STRICT_BASIC_WEEK_DATE_PRINTER, STRICT_BASIC_WEEK_DATE_FORMATTER);
|
||||
|
||||
/*
|
||||
* Returns a basic formatter that combines a basic weekyear date and time
|
||||
* without millis, separated by a 'T' (xxxx'W'wwe'T'HHmmssX).
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter STRICT_BASIC_WEEK_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter(
|
||||
private static final DateFormatter STRICT_BASIC_WEEK_DATE_TIME_NO_MILLIS = new JavaDateFormatter("strict_basic_week_date_no_millis",
|
||||
new DateTimeFormatterBuilder()
|
||||
.append(STRICT_BASIC_WEEK_DATE_PRINTER).append(DateTimeFormatter.ofPattern("'T'HHmmssX", Locale.ROOT))
|
||||
.toFormatter(Locale.ROOT),
|
||||
|
@ -349,7 +351,7 @@ public class DateFormatters {
|
|||
* Returns a basic formatter that combines a basic weekyear date and time,
|
||||
* separated by a 'T' (xxxx'W'wwe'T'HHmmss.SSSX).
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter STRICT_BASIC_WEEK_DATE_TIME = new CompoundDateTimeFormatter(
|
||||
private static final DateFormatter STRICT_BASIC_WEEK_DATE_TIME = new JavaDateFormatter("strict_basic_week_date_time",
|
||||
new DateTimeFormatterBuilder()
|
||||
.append(STRICT_BASIC_WEEK_DATE_PRINTER)
|
||||
.append(DateTimeFormatter.ofPattern("'T'HHmmss.SSSX", Locale.ROOT))
|
||||
|
@ -363,30 +365,32 @@ public class DateFormatters {
|
|||
/*
|
||||
* An ISO date formatter that formats or parses a date without an offset, such as '2011-12-03'.
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter STRICT_DATE = new CompoundDateTimeFormatter(
|
||||
private static final DateFormatter STRICT_DATE = new JavaDateFormatter("strict_date",
|
||||
DateTimeFormatter.ISO_LOCAL_DATE.withResolverStyle(ResolverStyle.LENIENT));
|
||||
|
||||
/*
|
||||
* A date formatter that formats or parses a date plus an hour without an offset, such as '2011-12-03T01'.
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter STRICT_DATE_HOUR = new CompoundDateTimeFormatter(
|
||||
private static final DateFormatter STRICT_DATE_HOUR = new JavaDateFormatter("strict_date_hour",
|
||||
DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH", Locale.ROOT));
|
||||
|
||||
/*
|
||||
* A date formatter that formats or parses a date plus an hour/minute without an offset, such as '2011-12-03T01:10'.
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter STRICT_DATE_HOUR_MINUTE = new CompoundDateTimeFormatter(
|
||||
private static final DateFormatter STRICT_DATE_HOUR_MINUTE = new JavaDateFormatter("strict_date_hour_minute",
|
||||
DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm", Locale.ROOT));
|
||||
|
||||
/*
|
||||
* A strict date formatter that formats or parses a date without an offset, such as '2011-12-03'.
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter STRICT_YEAR_MONTH_DAY = new CompoundDateTimeFormatter(STRICT_YEAR_MONTH_DAY_FORMATTER);
|
||||
private static final DateFormatter STRICT_YEAR_MONTH_DAY =
|
||||
new JavaDateFormatter("strict_year_month_day", STRICT_YEAR_MONTH_DAY_FORMATTER);
|
||||
|
||||
/*
|
||||
* A strict formatter that formats or parses a year and a month, such as '2011-12'.
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter STRICT_YEAR_MONTH = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder()
|
||||
private static final DateFormatter STRICT_YEAR_MONTH = new JavaDateFormatter("strict_year_month",
|
||||
new DateTimeFormatterBuilder()
|
||||
.appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD)
|
||||
.appendLiteral("-")
|
||||
.appendValue(MONTH_OF_YEAR, 2, 2, SignStyle.NOT_NEGATIVE)
|
||||
|
@ -395,15 +399,15 @@ public class DateFormatters {
|
|||
/*
|
||||
* A strict formatter that formats or parses a year, such as '2011'.
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter STRICT_YEAR = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder()
|
||||
private static final DateFormatter STRICT_YEAR = new JavaDateFormatter("strict_year", new DateTimeFormatterBuilder()
|
||||
.appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD)
|
||||
.toFormatter(Locale.ROOT));
|
||||
|
||||
/*
|
||||
* A strict formatter that formats or parses a hour, minute and second, such as '09:43:25'.
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter STRICT_HOUR_MINUTE_SECOND =
|
||||
new CompoundDateTimeFormatter(STRICT_HOUR_MINUTE_SECOND_FORMATTER);
|
||||
private static final DateFormatter STRICT_HOUR_MINUTE_SECOND =
|
||||
new JavaDateFormatter("strict_hour_minute_second", STRICT_HOUR_MINUTE_SECOND_FORMATTER);
|
||||
|
||||
private static final DateTimeFormatter STRICT_DATE_FORMATTER = new DateTimeFormatterBuilder()
|
||||
.append(STRICT_YEAR_MONTH_DAY_FORMATTER)
|
||||
|
@ -418,7 +422,8 @@ public class DateFormatters {
|
|||
* Returns a formatter that combines a full date and time, separated by a 'T'
|
||||
* (yyyy-MM-dd'T'HH:mm:ss.SSSZZ).
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter STRICT_DATE_TIME = new CompoundDateTimeFormatter(
|
||||
private static final DateFormatter STRICT_DATE_TIME = new JavaDateFormatter("strict_date_time",
|
||||
new DateTimeFormatterBuilder().append(STRICT_DATE_FORMATTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(STRICT_DATE_FORMATTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(STRICT_DATE_FORMATTER).append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT)
|
||||
);
|
||||
|
@ -435,7 +440,7 @@ public class DateFormatters {
|
|||
* Returns a formatter for a full ordinal date and time without millis,
|
||||
* using a four digit year and three digit dayOfYear (yyyy-DDD'T'HH:mm:ssZZ).
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter STRICT_ORDINAL_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter(
|
||||
private static final DateFormatter STRICT_ORDINAL_DATE_TIME_NO_MILLIS = new JavaDateFormatter("strict_ordinal_date_time_no_millis",
|
||||
new DateTimeFormatterBuilder().append(STRICT_ORDINAL_DATE_TIME_NO_MILLIS_BASE)
|
||||
.appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(STRICT_ORDINAL_DATE_TIME_NO_MILLIS_BASE)
|
||||
|
@ -452,7 +457,9 @@ public class DateFormatters {
|
|||
* Returns a formatter that combines a full date and time without millis,
|
||||
* separated by a 'T' (yyyy-MM-dd'T'HH:mm:ssZZ).
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter STRICT_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter(
|
||||
private static final DateFormatter STRICT_DATE_TIME_NO_MILLIS = new JavaDateFormatter("strict_date_time_no_millis",
|
||||
new DateTimeFormatterBuilder().append(STRICT_DATE_TIME_NO_MILLIS_FORMATTER)
|
||||
.appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(STRICT_DATE_TIME_NO_MILLIS_FORMATTER)
|
||||
.appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(STRICT_DATE_TIME_NO_MILLIS_FORMATTER)
|
||||
|
@ -478,17 +485,19 @@ public class DateFormatters {
|
|||
* NOTE: this is not a strict formatter to retain the joda time based behaviour,
|
||||
* even though it's named like this
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter STRICT_HOUR_MINUTE_SECOND_MILLIS =
|
||||
new CompoundDateTimeFormatter(STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER, STRICT_HOUR_MINUTE_SECOND_MILLIS_FORMATTER);
|
||||
private static final DateFormatter STRICT_HOUR_MINUTE_SECOND_MILLIS =
|
||||
new JavaDateFormatter("strict_hour_minute_second_millis",
|
||||
STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER, STRICT_HOUR_MINUTE_SECOND_MILLIS_FORMATTER);
|
||||
|
||||
private static final CompoundDateTimeFormatter STRICT_HOUR_MINUTE_SECOND_FRACTION = STRICT_HOUR_MINUTE_SECOND_MILLIS;
|
||||
private static final DateFormatter STRICT_HOUR_MINUTE_SECOND_FRACTION = STRICT_HOUR_MINUTE_SECOND_MILLIS;
|
||||
|
||||
/*
|
||||
* Returns a formatter that combines a full date, two digit hour of day,
|
||||
* two digit minute of hour, two digit second of minute, and three digit
|
||||
* fraction of second (yyyy-MM-dd'T'HH:mm:ss.SSS).
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter STRICT_DATE_HOUR_MINUTE_SECOND_FRACTION = new CompoundDateTimeFormatter(
|
||||
private static final DateFormatter STRICT_DATE_HOUR_MINUTE_SECOND_FRACTION = new JavaDateFormatter(
|
||||
"strict_date_hour_minute_second_fraction",
|
||||
new DateTimeFormatterBuilder()
|
||||
.append(STRICT_YEAR_MONTH_DAY_FORMATTER)
|
||||
.appendLiteral("T")
|
||||
|
@ -503,20 +512,20 @@ public class DateFormatters {
|
|||
.toFormatter(Locale.ROOT)
|
||||
);
|
||||
|
||||
private static final CompoundDateTimeFormatter STRICT_DATE_HOUR_MINUTE_SECOND_MILLIS = STRICT_DATE_HOUR_MINUTE_SECOND_FRACTION;
|
||||
private static final DateFormatter STRICT_DATE_HOUR_MINUTE_SECOND_MILLIS = STRICT_DATE_HOUR_MINUTE_SECOND_FRACTION;
|
||||
|
||||
/*
|
||||
* Returns a formatter for a two digit hour of day. (HH)
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter STRICT_HOUR =
|
||||
new CompoundDateTimeFormatter(DateTimeFormatter.ofPattern("HH", Locale.ROOT));
|
||||
private static final DateFormatter STRICT_HOUR =
|
||||
new JavaDateFormatter("strict_hour", DateTimeFormatter.ofPattern("HH", Locale.ROOT));
|
||||
|
||||
/*
|
||||
* Returns a formatter for a two digit hour of day and two digit minute of
|
||||
* hour. (HH:mm)
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter STRICT_HOUR_MINUTE =
|
||||
new CompoundDateTimeFormatter(DateTimeFormatter.ofPattern("HH:mm", Locale.ROOT));
|
||||
private static final DateFormatter STRICT_HOUR_MINUTE =
|
||||
new JavaDateFormatter("strict_hour_minute", DateTimeFormatter.ofPattern("HH:mm", Locale.ROOT));
|
||||
|
||||
private static final DateTimeFormatter STRICT_ORDINAL_DATE_TIME_FORMATTER_BASE = new DateTimeFormatterBuilder()
|
||||
.appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD)
|
||||
|
@ -535,7 +544,7 @@ public class DateFormatters {
|
|||
* Returns a formatter for a full ordinal date and time, using a four
|
||||
* digit year and three digit dayOfYear (yyyy-DDD'T'HH:mm:ss.SSSZZ).
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter STRICT_ORDINAL_DATE_TIME = new CompoundDateTimeFormatter(
|
||||
private static final DateFormatter STRICT_ORDINAL_DATE_TIME = new JavaDateFormatter("strict_ordinal_date_time",
|
||||
new DateTimeFormatterBuilder().append(STRICT_ORDINAL_DATE_TIME_FORMATTER_BASE)
|
||||
.appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(STRICT_ORDINAL_DATE_TIME_FORMATTER_BASE)
|
||||
|
@ -566,7 +575,7 @@ public class DateFormatters {
|
|||
* hour, two digit second of minute, three digit fraction of second, and
|
||||
* time zone offset (HH:mm:ss.SSSZZ).
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter STRICT_TIME = new CompoundDateTimeFormatter(
|
||||
private static final DateFormatter STRICT_TIME = new JavaDateFormatter("strict_time",
|
||||
new DateTimeFormatterBuilder().append(STRICT_TIME_PRINTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(STRICT_TIME_FORMATTER_BASE).appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(STRICT_TIME_FORMATTER_BASE).append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT)
|
||||
|
@ -577,7 +586,7 @@ public class DateFormatters {
|
|||
* hour, two digit second of minute, three digit fraction of second, and
|
||||
* time zone offset prefixed by 'T' ('T'HH:mm:ss.SSSZZ).
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter STRICT_T_TIME = new CompoundDateTimeFormatter(
|
||||
private static final DateFormatter STRICT_T_TIME = new JavaDateFormatter("strict_t_time",
|
||||
new DateTimeFormatterBuilder().appendLiteral('T').append(STRICT_TIME_PRINTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().appendLiteral('T').append(STRICT_TIME_FORMATTER_BASE)
|
||||
.appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
|
@ -597,7 +606,8 @@ public class DateFormatters {
|
|||
* Returns a formatter for a two digit hour of day, two digit minute of
|
||||
* hour, two digit second of minute, and time zone offset (HH:mm:ssZZ).
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter STRICT_TIME_NO_MILLIS = new CompoundDateTimeFormatter(
|
||||
private static final DateFormatter STRICT_TIME_NO_MILLIS = new JavaDateFormatter("strict_time_no_millis",
|
||||
new DateTimeFormatterBuilder().append(STRICT_TIME_NO_MILLIS_BASE).appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(STRICT_TIME_NO_MILLIS_BASE).appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(STRICT_TIME_NO_MILLIS_BASE).append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT)
|
||||
);
|
||||
|
@ -607,7 +617,9 @@ public class DateFormatters {
|
|||
* hour, two digit second of minute, and time zone offset prefixed
|
||||
* by 'T' ('T'HH:mm:ssZZ).
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter STRICT_T_TIME_NO_MILLIS = new CompoundDateTimeFormatter(
|
||||
private static final DateFormatter STRICT_T_TIME_NO_MILLIS = new JavaDateFormatter("strict_t_time_no_millis",
|
||||
new DateTimeFormatterBuilder().appendLiteral("T").append(STRICT_TIME_NO_MILLIS_BASE)
|
||||
.appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().appendLiteral("T").append(STRICT_TIME_NO_MILLIS_BASE)
|
||||
.appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().appendLiteral("T").append(STRICT_TIME_NO_MILLIS_BASE)
|
||||
|
@ -632,13 +644,15 @@ public class DateFormatters {
|
|||
* Returns a formatter for a full date as four digit weekyear, two digit
|
||||
* week of weekyear, and one digit day of week (xxxx-'W'ww-e).
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter STRICT_WEEK_DATE = new CompoundDateTimeFormatter(ISO_WEEK_DATE);
|
||||
private static final DateFormatter STRICT_WEEK_DATE = new JavaDateFormatter("strict_week_date", ISO_WEEK_DATE);
|
||||
|
||||
/*
|
||||
* Returns a formatter that combines a full weekyear date and time without millis,
|
||||
* separated by a 'T' (xxxx-'W'ww-e'T'HH:mm:ssZZ).
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter STRICT_WEEK_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter(
|
||||
private static final DateFormatter STRICT_WEEK_DATE_TIME_NO_MILLIS = new JavaDateFormatter("strict_week_date_time_no_millis",
|
||||
new DateTimeFormatterBuilder().append(ISO_WEEK_DATE_T)
|
||||
.append(STRICT_TIME_NO_MILLIS_BASE).appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(ISO_WEEK_DATE_T)
|
||||
.append(STRICT_TIME_NO_MILLIS_BASE).appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(ISO_WEEK_DATE_T)
|
||||
|
@ -649,7 +663,7 @@ public class DateFormatters {
|
|||
* Returns a formatter that combines a full weekyear date and time,
|
||||
* separated by a 'T' (xxxx-'W'ww-e'T'HH:mm:ss.SSSZZ).
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter STRICT_WEEK_DATE_TIME = new CompoundDateTimeFormatter(
|
||||
private static final DateFormatter STRICT_WEEK_DATE_TIME = new JavaDateFormatter("strict_week_date_time",
|
||||
new DateTimeFormatterBuilder().append(ISO_WEEK_DATE_T).append(STRICT_TIME_PRINTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(ISO_WEEK_DATE_T).append(STRICT_TIME_FORMATTER_BASE)
|
||||
.appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
|
@ -660,7 +674,7 @@ public class DateFormatters {
|
|||
/*
|
||||
* Returns a formatter for a four digit weekyear
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter STRICT_WEEKYEAR = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder()
|
||||
private static final DateFormatter STRICT_WEEKYEAR = new JavaDateFormatter("strict_weekyear", new DateTimeFormatterBuilder()
|
||||
.appendValue(WeekFields.ISO.weekBasedYear(), 4, 10, SignStyle.EXCEEDS_PAD)
|
||||
.toFormatter(Locale.ROOT));
|
||||
|
||||
|
@ -674,13 +688,15 @@ public class DateFormatters {
|
|||
* Returns a formatter for a four digit weekyear and two digit week of
|
||||
* weekyear. (xxxx-'W'ww)
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter STRICT_WEEKYEAR_WEEK = new CompoundDateTimeFormatter(STRICT_WEEKYEAR_WEEK_FORMATTER);
|
||||
private static final DateFormatter STRICT_WEEKYEAR_WEEK =
|
||||
new JavaDateFormatter("strict_weekyear_week", STRICT_WEEKYEAR_WEEK_FORMATTER);
|
||||
|
||||
/*
|
||||
* Returns a formatter for a four digit weekyear, two digit week of
|
||||
* weekyear, and one digit day of week. (xxxx-'W'ww-e)
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter STRICT_WEEKYEAR_WEEK_DAY = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder()
|
||||
private static final DateFormatter STRICT_WEEKYEAR_WEEK_DAY = new JavaDateFormatter("strict_weekyear_week_day",
|
||||
new DateTimeFormatterBuilder()
|
||||
.append(STRICT_WEEKYEAR_WEEK_FORMATTER)
|
||||
.appendLiteral("-")
|
||||
.appendValue(WeekFields.ISO.dayOfWeek())
|
||||
|
@ -691,14 +707,14 @@ public class DateFormatters {
|
|||
* two digit minute of hour, and two digit second of
|
||||
* minute. (yyyy-MM-dd'T'HH:mm:ss)
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter STRICT_DATE_HOUR_MINUTE_SECOND =
|
||||
new CompoundDateTimeFormatter(DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss", Locale.ROOT));
|
||||
private static final DateFormatter STRICT_DATE_HOUR_MINUTE_SECOND = new JavaDateFormatter("strict_date_hour_minute_second",
|
||||
DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss", Locale.ROOT));
|
||||
|
||||
/*
|
||||
* A basic formatter for a full date as four digit year, two digit
|
||||
* month of year, and two digit day of month (yyyyMMdd).
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter BASIC_DATE = new CompoundDateTimeFormatter(
|
||||
private static final DateFormatter BASIC_DATE = new JavaDateFormatter("basic_date",
|
||||
new DateTimeFormatterBuilder()
|
||||
.appendValue(ChronoField.YEAR, 4, 4, SignStyle.NORMAL)
|
||||
.appendValue(MONTH_OF_YEAR, 2, 2, SignStyle.NOT_NEGATIVE)
|
||||
|
@ -723,7 +739,7 @@ public class DateFormatters {
|
|||
* Returns a formatter for a full ordinal date, using a four
|
||||
* digit year and three digit dayOfYear (yyyy-DDD).
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter STRICT_ORDINAL_DATE = new CompoundDateTimeFormatter(STRICT_ORDINAL_DATE_FORMATTER);
|
||||
private static final DateFormatter STRICT_ORDINAL_DATE = new JavaDateFormatter("strict_ordinal_date", STRICT_ORDINAL_DATE_FORMATTER);
|
||||
|
||||
/////////////////////////////////////////
|
||||
//
|
||||
|
@ -759,7 +775,8 @@ public class DateFormatters {
|
|||
* a date formatter with optional time, being very lenient, format is
|
||||
* yyyy-MM-dd'T'HH:mm:ss.SSSZ
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter DATE_OPTIONAL_TIME = new CompoundDateTimeFormatter(STRICT_DATE_OPTIONAL_TIME.printer,
|
||||
private static final DateFormatter DATE_OPTIONAL_TIME = new JavaDateFormatter("date_optional_time",
|
||||
STRICT_DATE_OPTIONAL_TIME_FORMATTER_1,
|
||||
new DateTimeFormatterBuilder()
|
||||
.append(DATE_FORMATTER)
|
||||
.optionalStart()
|
||||
|
@ -834,8 +851,8 @@ public class DateFormatters {
|
|||
* Returns a formatter for a full ordinal date, using a four
|
||||
* digit year and three digit dayOfYear (yyyy-DDD).
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter ORDINAL_DATE =
|
||||
new CompoundDateTimeFormatter(ORDINAL_DATE_PRINTER, ORDINAL_DATE_FORMATTER);
|
||||
private static final DateFormatter ORDINAL_DATE =
|
||||
new JavaDateFormatter("ordinal_date", ORDINAL_DATE_PRINTER, ORDINAL_DATE_FORMATTER);
|
||||
|
||||
private static final DateTimeFormatter TIME_NO_MILLIS_FORMATTER = new DateTimeFormatterBuilder()
|
||||
.appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE)
|
||||
|
@ -864,70 +881,32 @@ public class DateFormatters {
|
|||
/*
|
||||
* Returns a formatter for a four digit weekyear. (YYYY)
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter WEEK_YEAR = new CompoundDateTimeFormatter(
|
||||
private static final DateFormatter WEEK_YEAR = new JavaDateFormatter("week_year",
|
||||
new DateTimeFormatterBuilder().appendValue(WeekFields.ISO.weekBasedYear()).toFormatter(Locale.ROOT));
|
||||
|
||||
/*
|
||||
* Returns a formatter for a four digit weekyear. (uuuu)
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter YEAR = new CompoundDateTimeFormatter(
|
||||
private static final DateFormatter YEAR = new JavaDateFormatter("year",
|
||||
new DateTimeFormatterBuilder().appendValue(ChronoField.YEAR).toFormatter(Locale.ROOT));
|
||||
|
||||
/*
|
||||
* Returns a formatter for parsing the seconds since the epoch
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter EPOCH_SECOND = new CompoundDateTimeFormatter(
|
||||
private static final DateFormatter EPOCH_SECOND = new JavaDateFormatter("epoch_second",
|
||||
new DateTimeFormatterBuilder().appendValue(ChronoField.INSTANT_SECONDS).toFormatter(Locale.ROOT));
|
||||
|
||||
/*
|
||||
* Returns a formatter for parsing the milliseconds since the epoch
|
||||
* This one needs a custom implementation, because the standard date formatter can not parse negative values
|
||||
* or anything +- 999 milliseconds around the epoch
|
||||
*
|
||||
* This implementation just resorts to parsing the input directly to an Instant by trying to parse a number.
|
||||
* Parses the milliseconds since/before the epoch
|
||||
*/
|
||||
private static final DateTimeFormatter EPOCH_MILLIS_FORMATTER = new DateTimeFormatterBuilder()
|
||||
.appendValue(ChronoField.INSTANT_SECONDS, 1, 19, SignStyle.NEVER)
|
||||
.appendValue(ChronoField.MILLI_OF_SECOND, 3)
|
||||
.toFormatter(Locale.ROOT);
|
||||
|
||||
private static final class EpochDateTimeFormatter extends CompoundDateTimeFormatter {
|
||||
|
||||
private EpochDateTimeFormatter() {
|
||||
super(EPOCH_MILLIS_FORMATTER);
|
||||
}
|
||||
|
||||
private EpochDateTimeFormatter(ZoneId zoneId) {
|
||||
super(EPOCH_MILLIS_FORMATTER.withZone(zoneId));
|
||||
}
|
||||
|
||||
@Override
|
||||
public TemporalAccessor parse(String input) {
|
||||
try {
|
||||
return Instant.ofEpochMilli(Long.valueOf(input)).atZone(ZoneOffset.UTC);
|
||||
} catch (NumberFormatException e) {
|
||||
throw new DateTimeParseException("invalid number", input, 0, e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public CompoundDateTimeFormatter withZone(ZoneId zoneId) {
|
||||
return new EpochDateTimeFormatter(zoneId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String format(TemporalAccessor accessor) {
|
||||
return String.valueOf(Instant.from(accessor).toEpochMilli());
|
||||
}
|
||||
}
|
||||
|
||||
private static final CompoundDateTimeFormatter EPOCH_MILLIS = new EpochDateTimeFormatter();
|
||||
private static final DateFormatter EPOCH_MILLIS = EpochMillisDateFormatter.INSTANCE;
|
||||
|
||||
/*
|
||||
* Returns a formatter that combines a full date and two digit hour of
|
||||
* day. (yyyy-MM-dd'T'HH)
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter DATE_HOUR = new CompoundDateTimeFormatter(STRICT_DATE_HOUR.printer,
|
||||
private static final DateFormatter DATE_HOUR = new JavaDateFormatter("date_hour",
|
||||
DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH", Locale.ROOT),
|
||||
new DateTimeFormatterBuilder()
|
||||
.append(DATE_FORMATTER)
|
||||
.appendLiteral("T")
|
||||
|
@ -940,8 +919,8 @@ public class DateFormatters {
|
|||
* fraction of second (yyyy-MM-dd'T'HH:mm:ss.SSS). Parsing will parse up
|
||||
* to 3 fractional second digits.
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter DATE_HOUR_MINUTE_SECOND_MILLIS =
|
||||
new CompoundDateTimeFormatter(
|
||||
private static final DateFormatter DATE_HOUR_MINUTE_SECOND_MILLIS =
|
||||
new JavaDateFormatter("date_hour_minute_second_millis",
|
||||
new DateTimeFormatterBuilder()
|
||||
.append(STRICT_YEAR_MONTH_DAY_FORMATTER)
|
||||
.appendLiteral("T")
|
||||
|
@ -953,13 +932,14 @@ public class DateFormatters {
|
|||
.append(HOUR_MINUTE_SECOND_MILLIS_FORMATTER)
|
||||
.toFormatter(Locale.ROOT));
|
||||
|
||||
private static final CompoundDateTimeFormatter DATE_HOUR_MINUTE_SECOND_FRACTION = DATE_HOUR_MINUTE_SECOND_MILLIS;
|
||||
private static final DateFormatter DATE_HOUR_MINUTE_SECOND_FRACTION = DATE_HOUR_MINUTE_SECOND_MILLIS;
|
||||
|
||||
/*
|
||||
* Returns a formatter that combines a full date, two digit hour of day,
|
||||
* and two digit minute of hour. (yyyy-MM-dd'T'HH:mm)
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter DATE_HOUR_MINUTE = new CompoundDateTimeFormatter(STRICT_DATE_HOUR_MINUTE.printer,
|
||||
private static final DateFormatter DATE_HOUR_MINUTE = new JavaDateFormatter("date_hour_minute",
|
||||
DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm", Locale.ROOT),
|
||||
new DateTimeFormatterBuilder()
|
||||
.append(DATE_FORMATTER)
|
||||
.appendLiteral("T")
|
||||
|
@ -971,8 +951,8 @@ public class DateFormatters {
|
|||
* two digit minute of hour, and two digit second of
|
||||
* minute. (yyyy-MM-dd'T'HH:mm:ss)
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter DATE_HOUR_MINUTE_SECOND = new CompoundDateTimeFormatter(
|
||||
STRICT_DATE_HOUR_MINUTE_SECOND.printer,
|
||||
private static final DateFormatter DATE_HOUR_MINUTE_SECOND = new JavaDateFormatter("date_hour_minute_second",
|
||||
DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss", Locale.ROOT),
|
||||
new DateTimeFormatterBuilder()
|
||||
.append(DATE_FORMATTER)
|
||||
.appendLiteral("T")
|
||||
|
@ -994,8 +974,8 @@ public class DateFormatters {
|
|||
* Returns a formatter that combines a full date and time, separated by a 'T'
|
||||
* (yyyy-MM-dd'T'HH:mm:ss.SSSZZ).
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter DATE_TIME = new CompoundDateTimeFormatter(
|
||||
STRICT_DATE_TIME.printer,
|
||||
private static final DateFormatter DATE_TIME = new JavaDateFormatter("date_time",
|
||||
STRICT_DATE_OPTIONAL_TIME_FORMATTER_1,
|
||||
new DateTimeFormatterBuilder().append(DATE_TIME_FORMATTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(DATE_TIME_FORMATTER).append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT)
|
||||
);
|
||||
|
@ -1004,20 +984,22 @@ public class DateFormatters {
|
|||
* Returns a basic formatter for a full date as four digit weekyear, two
|
||||
* digit week of weekyear, and one digit day of week (YYYY'W'wwe).
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter BASIC_WEEK_DATE =
|
||||
new CompoundDateTimeFormatter(STRICT_BASIC_WEEK_DATE.printer, BASIC_WEEK_DATE_FORMATTER);
|
||||
private static final DateFormatter BASIC_WEEK_DATE =
|
||||
new JavaDateFormatter("basic_week_date", STRICT_BASIC_WEEK_DATE_PRINTER, BASIC_WEEK_DATE_FORMATTER);
|
||||
|
||||
/*
|
||||
* Returns a formatter for a full date as four digit year, two digit month
|
||||
* of year, and two digit day of month (yyyy-MM-dd).
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter DATE = new CompoundDateTimeFormatter(STRICT_DATE.printer, DATE_FORMATTER);
|
||||
private static final DateFormatter DATE = new JavaDateFormatter("date",
|
||||
DateTimeFormatter.ISO_LOCAL_DATE.withResolverStyle(ResolverStyle.LENIENT),
|
||||
DATE_FORMATTER);
|
||||
|
||||
// only the formatter, nothing optional here
|
||||
private static final DateTimeFormatter DATE_TIME_NO_MILLIS_PRINTER = new DateTimeFormatterBuilder()
|
||||
.append(STRICT_DATE.printer)
|
||||
.append(DateTimeFormatter.ISO_LOCAL_DATE.withResolverStyle(ResolverStyle.LENIENT))
|
||||
.appendLiteral('T')
|
||||
.append(STRICT_HOUR_MINUTE.printer)
|
||||
.appendPattern("HH:mm")
|
||||
.appendLiteral(':')
|
||||
.appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
|
||||
.appendZoneId()
|
||||
|
@ -1037,7 +1019,8 @@ public class DateFormatters {
|
|||
* Returns a formatter that combines a full date and time without millis, but with a timezone that can be optional
|
||||
* separated by a 'T' (yyyy-MM-dd'T'HH:mm:ssZ).
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter(DATE_TIME_NO_MILLIS_PRINTER,
|
||||
private static final DateFormatter DATE_TIME_NO_MILLIS = new JavaDateFormatter("date_time_no_millis",
|
||||
DATE_TIME_NO_MILLIS_PRINTER,
|
||||
new DateTimeFormatterBuilder().append(DATE_TIME_PREFIX).appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(DATE_TIME_PREFIX).append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(DATE_TIME_PREFIX)
|
||||
|
@ -1051,21 +1034,21 @@ public class DateFormatters {
|
|||
* hour, two digit second of minute, and three digit fraction of
|
||||
* second (HH:mm:ss.SSS).
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter HOUR_MINUTE_SECOND_MILLIS =
|
||||
new CompoundDateTimeFormatter(STRICT_HOUR_MINUTE_SECOND_FRACTION.printer, HOUR_MINUTE_SECOND_MILLIS_FORMATTER);
|
||||
private static final DateFormatter HOUR_MINUTE_SECOND_MILLIS = new JavaDateFormatter("hour_minute_second_millis",
|
||||
STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER, HOUR_MINUTE_SECOND_MILLIS_FORMATTER);
|
||||
|
||||
/*
|
||||
* Returns a formatter for a two digit hour of day and two digit minute of
|
||||
* hour. (HH:mm)
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter HOUR_MINUTE =
|
||||
new CompoundDateTimeFormatter(STRICT_HOUR_MINUTE.printer, HOUR_MINUTE_FORMATTER);
|
||||
private static final DateFormatter HOUR_MINUTE =
|
||||
new JavaDateFormatter("hour_minute", DateTimeFormatter.ofPattern("HH:mm", Locale.ROOT), HOUR_MINUTE_FORMATTER);
|
||||
|
||||
/*
|
||||
* A strict formatter that formats or parses a hour, minute and second, such as '09:43:25'.
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter HOUR_MINUTE_SECOND = new CompoundDateTimeFormatter(
|
||||
STRICT_HOUR_MINUTE_SECOND.printer,
|
||||
private static final DateFormatter HOUR_MINUTE_SECOND = new JavaDateFormatter("hour_minute_second",
|
||||
STRICT_HOUR_MINUTE_SECOND_FORMATTER,
|
||||
new DateTimeFormatterBuilder()
|
||||
.append(HOUR_MINUTE_FORMATTER)
|
||||
.appendLiteral(":")
|
||||
|
@ -1076,8 +1059,8 @@ public class DateFormatters {
|
|||
/*
|
||||
* Returns a formatter for a two digit hour of day. (HH)
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter HOUR = new CompoundDateTimeFormatter(
|
||||
STRICT_HOUR.printer,
|
||||
private static final DateFormatter HOUR = new JavaDateFormatter("hour",
|
||||
DateTimeFormatter.ofPattern("HH", Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE).toFormatter(Locale.ROOT)
|
||||
);
|
||||
|
||||
|
@ -1096,8 +1079,9 @@ public class DateFormatters {
|
|||
* Returns a formatter for a full ordinal date and time, using a four
|
||||
* digit year and three digit dayOfYear (yyyy-DDD'T'HH:mm:ss.SSSZZ).
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter ORDINAL_DATE_TIME = new CompoundDateTimeFormatter(
|
||||
STRICT_ORDINAL_DATE_TIME.printer,
|
||||
private static final DateFormatter ORDINAL_DATE_TIME = new JavaDateFormatter("ordinal_date_time",
|
||||
new DateTimeFormatterBuilder().append(STRICT_ORDINAL_DATE_TIME_FORMATTER_BASE)
|
||||
.appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(ORDINAL_DATE_TIME_FORMATTER_BASE)
|
||||
.appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(ORDINAL_DATE_TIME_FORMATTER_BASE)
|
||||
|
@ -1114,8 +1098,9 @@ public class DateFormatters {
|
|||
* Returns a formatter for a full ordinal date and time without millis,
|
||||
* using a four digit year and three digit dayOfYear (yyyy-DDD'T'HH:mm:ssZZ).
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter ORDINAL_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter(
|
||||
STRICT_ORDINAL_DATE_TIME_NO_MILLIS.printer,
|
||||
private static final DateFormatter ORDINAL_DATE_TIME_NO_MILLIS = new JavaDateFormatter("ordinal_date_time_no_millis",
|
||||
new DateTimeFormatterBuilder().append(STRICT_ORDINAL_DATE_TIME_NO_MILLIS_BASE)
|
||||
.appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(ORDINAL_DATE_TIME_NO_MILLIS_BASE)
|
||||
.appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(ORDINAL_DATE_TIME_NO_MILLIS_BASE)
|
||||
|
@ -1126,8 +1111,8 @@ public class DateFormatters {
|
|||
* Returns a formatter that combines a full weekyear date and time,
|
||||
* separated by a 'T' (xxxx-'W'ww-e'T'HH:mm:ss.SSSZZ).
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter WEEK_DATE_TIME = new CompoundDateTimeFormatter(
|
||||
STRICT_WEEK_DATE_TIME.printer,
|
||||
private static final DateFormatter WEEK_DATE_TIME = new JavaDateFormatter("week_date_time",
|
||||
new DateTimeFormatterBuilder().append(ISO_WEEK_DATE_T).append(STRICT_TIME_PRINTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(WEEK_DATE_FORMATTER).appendLiteral("T").append(TIME_PREFIX)
|
||||
.appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(WEEK_DATE_FORMATTER).appendLiteral("T").append(TIME_PREFIX)
|
||||
|
@ -1138,8 +1123,9 @@ public class DateFormatters {
|
|||
* Returns a formatter that combines a full weekyear date and time,
|
||||
* separated by a 'T' (xxxx-'W'ww-e'T'HH:mm:ssZZ).
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter WEEK_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter(
|
||||
STRICT_WEEK_DATE_TIME_NO_MILLIS.printer,
|
||||
private static final DateFormatter WEEK_DATE_TIME_NO_MILLIS = new JavaDateFormatter("week_date_time_no_millis",
|
||||
new DateTimeFormatterBuilder().append(ISO_WEEK_DATE_T)
|
||||
.append(STRICT_TIME_NO_MILLIS_BASE).appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(WEEK_DATE_FORMATTER).append(T_TIME_NO_MILLIS_FORMATTER)
|
||||
.appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(WEEK_DATE_FORMATTER).append(T_TIME_NO_MILLIS_FORMATTER)
|
||||
|
@ -1150,8 +1136,11 @@ public class DateFormatters {
|
|||
* Returns a basic formatter that combines a basic weekyear date and time,
|
||||
* separated by a 'T' (xxxx'W'wwe'T'HHmmss.SSSX).
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter BASIC_WEEK_DATE_TIME = new CompoundDateTimeFormatter(
|
||||
STRICT_BASIC_WEEK_DATE_TIME.printer,
|
||||
private static final DateFormatter BASIC_WEEK_DATE_TIME = new JavaDateFormatter("basic_week_date_time",
|
||||
new DateTimeFormatterBuilder()
|
||||
.append(STRICT_BASIC_WEEK_DATE_PRINTER)
|
||||
.append(DateTimeFormatter.ofPattern("'T'HHmmss.SSSX", Locale.ROOT))
|
||||
.toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(BASIC_WEEK_DATE_FORMATTER).append(BASIC_T_TIME_FORMATTER)
|
||||
.appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(BASIC_WEEK_DATE_FORMATTER).append(BASIC_T_TIME_FORMATTER)
|
||||
|
@ -1162,8 +1151,10 @@ public class DateFormatters {
|
|||
* Returns a basic formatter that combines a basic weekyear date and time,
|
||||
* separated by a 'T' (xxxx'W'wwe'T'HHmmssX).
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter BASIC_WEEK_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter(
|
||||
STRICT_BASIC_WEEK_DATE_TIME_NO_MILLIS.printer,
|
||||
private static final DateFormatter BASIC_WEEK_DATE_TIME_NO_MILLIS = new JavaDateFormatter("basic_week_date_time_no_millis",
|
||||
new DateTimeFormatterBuilder()
|
||||
.append(STRICT_BASIC_WEEK_DATE_PRINTER).append(DateTimeFormatter.ofPattern("'T'HHmmssX", Locale.ROOT))
|
||||
.toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(BASIC_WEEK_DATE_FORMATTER).appendLiteral("T").append(BASIC_TIME_NO_MILLIS_BASE)
|
||||
.appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(BASIC_WEEK_DATE_FORMATTER).appendLiteral("T").append(BASIC_TIME_NO_MILLIS_BASE)
|
||||
|
@ -1175,8 +1166,8 @@ public class DateFormatters {
|
|||
* hour, two digit second of minute, three digit fraction of second, and
|
||||
* time zone offset (HH:mm:ss.SSSZZ).
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter TIME = new CompoundDateTimeFormatter(
|
||||
STRICT_TIME.printer,
|
||||
private static final DateFormatter TIME = new JavaDateFormatter("time",
|
||||
new DateTimeFormatterBuilder().append(STRICT_TIME_PRINTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(TIME_PREFIX).appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(TIME_PREFIX).append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT)
|
||||
);
|
||||
|
@ -1185,8 +1176,8 @@ public class DateFormatters {
|
|||
* Returns a formatter for a two digit hour of day, two digit minute of
|
||||
* hour, two digit second of minute, andtime zone offset (HH:mm:ssZZ).
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter TIME_NO_MILLIS = new CompoundDateTimeFormatter(
|
||||
STRICT_TIME_NO_MILLIS.printer,
|
||||
private static final DateFormatter TIME_NO_MILLIS = new JavaDateFormatter("time_no_millis",
|
||||
new DateTimeFormatterBuilder().append(STRICT_TIME_NO_MILLIS_BASE).appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(TIME_NO_MILLIS_FORMATTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(TIME_NO_MILLIS_FORMATTER).append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT)
|
||||
);
|
||||
|
@ -1196,8 +1187,8 @@ public class DateFormatters {
|
|||
* hour, two digit second of minute, three digit fraction of second, and
|
||||
* time zone offset prefixed by 'T' ('T'HH:mm:ss.SSSZZ).
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter T_TIME = new CompoundDateTimeFormatter(
|
||||
STRICT_T_TIME.printer,
|
||||
private static final DateFormatter T_TIME = new JavaDateFormatter("t_time",
|
||||
new DateTimeFormatterBuilder().appendLiteral('T').append(STRICT_TIME_PRINTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().appendLiteral("T").append(TIME_PREFIX)
|
||||
.appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().appendLiteral("T").append(TIME_PREFIX)
|
||||
|
@ -1209,8 +1200,9 @@ public class DateFormatters {
|
|||
* hour, two digit second of minute, and time zone offset prefixed
|
||||
* by 'T' ('T'HH:mm:ssZZ).
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter T_TIME_NO_MILLIS = new CompoundDateTimeFormatter(
|
||||
STRICT_T_TIME_NO_MILLIS.printer,
|
||||
private static final DateFormatter T_TIME_NO_MILLIS = new JavaDateFormatter("t_time_no_millis",
|
||||
new DateTimeFormatterBuilder().appendLiteral("T").append(STRICT_TIME_NO_MILLIS_BASE)
|
||||
.appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(T_TIME_NO_MILLIS_FORMATTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().append(T_TIME_NO_MILLIS_FORMATTER).append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT)
|
||||
);
|
||||
|
@ -1218,16 +1210,20 @@ public class DateFormatters {
|
|||
/*
|
||||
* A strict formatter that formats or parses a year and a month, such as '2011-12'.
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter YEAR_MONTH = new CompoundDateTimeFormatter(
|
||||
STRICT_YEAR_MONTH.printer,
|
||||
private static final DateFormatter YEAR_MONTH = new JavaDateFormatter("year_month",
|
||||
new DateTimeFormatterBuilder()
|
||||
.appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD)
|
||||
.appendLiteral("-")
|
||||
.appendValue(MONTH_OF_YEAR, 2, 2, SignStyle.NOT_NEGATIVE)
|
||||
.toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder().appendValue(ChronoField.YEAR).appendLiteral("-").appendValue(MONTH_OF_YEAR).toFormatter(Locale.ROOT)
|
||||
);
|
||||
|
||||
/*
|
||||
* A strict date formatter that formats or parses a date without an offset, such as '2011-12-03'.
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter YEAR_MONTH_DAY = new CompoundDateTimeFormatter(
|
||||
STRICT_YEAR_MONTH_DAY.printer,
|
||||
private static final DateFormatter YEAR_MONTH_DAY = new JavaDateFormatter("year_month_day",
|
||||
STRICT_YEAR_MONTH_DAY_FORMATTER,
|
||||
new DateTimeFormatterBuilder()
|
||||
.appendValue(ChronoField.YEAR)
|
||||
.appendLiteral("-")
|
||||
|
@ -1241,13 +1237,13 @@ public class DateFormatters {
|
|||
* Returns a formatter for a full date as four digit weekyear, two digit
|
||||
* week of weekyear, and one digit day of week (xxxx-'W'ww-e).
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter WEEK_DATE = new CompoundDateTimeFormatter(STRICT_WEEK_DATE.printer, WEEK_DATE_FORMATTER);
|
||||
private static final DateFormatter WEEK_DATE = new JavaDateFormatter("week_date", ISO_WEEK_DATE, WEEK_DATE_FORMATTER);
|
||||
|
||||
/*
|
||||
* Returns a formatter for a four digit weekyear and two digit week of
|
||||
* weekyear. (xxxx-'W'ww)
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter WEEKYEAR_WEEK = new CompoundDateTimeFormatter(STRICT_WEEKYEAR_WEEK.printer,
|
||||
private static final DateFormatter WEEKYEAR_WEEK = new JavaDateFormatter("weekyear_week", STRICT_WEEKYEAR_WEEK_FORMATTER,
|
||||
new DateTimeFormatterBuilder()
|
||||
.appendValue(WeekFields.ISO.weekBasedYear())
|
||||
.appendLiteral("-W")
|
||||
|
@ -1259,8 +1255,12 @@ public class DateFormatters {
|
|||
* Returns a formatter for a four digit weekyear, two digit week of
|
||||
* weekyear, and one digit day of week. (xxxx-'W'ww-e)
|
||||
*/
|
||||
private static final CompoundDateTimeFormatter WEEKYEAR_WEEK_DAY = new CompoundDateTimeFormatter(
|
||||
STRICT_WEEKYEAR_WEEK_DAY.printer,
|
||||
private static final DateFormatter WEEKYEAR_WEEK_DAY = new JavaDateFormatter("weekyear_week_day",
|
||||
new DateTimeFormatterBuilder()
|
||||
.append(STRICT_WEEKYEAR_WEEK_FORMATTER)
|
||||
.appendLiteral("-")
|
||||
.appendValue(WeekFields.ISO.dayOfWeek())
|
||||
.toFormatter(Locale.ROOT),
|
||||
new DateTimeFormatterBuilder()
|
||||
.appendValue(WeekFields.ISO.weekBasedYear())
|
||||
.appendLiteral("-W")
|
||||
|
@ -1276,11 +1276,11 @@ public class DateFormatters {
|
|||
//
|
||||
/////////////////////////////////////////
|
||||
|
||||
public static CompoundDateTimeFormatter forPattern(String input) {
|
||||
public static DateFormatter forPattern(String input) {
|
||||
return forPattern(input, Locale.ROOT);
|
||||
}
|
||||
|
||||
public static CompoundDateTimeFormatter forPattern(String input, Locale locale) {
|
||||
public static DateFormatter forPattern(String input, Locale locale) {
|
||||
if (Strings.hasLength(input)) {
|
||||
input = input.trim();
|
||||
}
|
||||
|
@ -1452,21 +1452,20 @@ public class DateFormatters {
|
|||
if (formats.length == 1) {
|
||||
return forPattern(formats[0], locale);
|
||||
} else {
|
||||
Collection<DateTimeFormatter> parsers = new LinkedHashSet<>(formats.length);
|
||||
for (String format : formats) {
|
||||
CompoundDateTimeFormatter dateTimeFormatter = forPattern(format, locale);
|
||||
try {
|
||||
parsers.addAll(Arrays.asList(dateTimeFormatter.parsers));
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new IllegalArgumentException("Invalid format: [" + input + "]: " + e.getMessage(), e);
|
||||
try {
|
||||
DateFormatter[] formatters = new DateFormatter[formats.length];
|
||||
for (int i = 0; i < formats.length; i++) {
|
||||
formatters[i] = forPattern(formats[i], locale);
|
||||
}
|
||||
}
|
||||
|
||||
return new CompoundDateTimeFormatter(parsers.toArray(new DateTimeFormatter[0]));
|
||||
return DateFormatter.merge(formatters);
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new IllegalArgumentException("Invalid format: [" + input + "]: " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
return new CompoundDateTimeFormatter(new DateTimeFormatterBuilder().appendPattern(input).toFormatter(locale));
|
||||
return new JavaDateFormatter(input, new DateTimeFormatterBuilder().appendPattern(input).toFormatter(locale));
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new IllegalArgumentException("Invalid format: [" + input + "]: " + e.getMessage(), e);
|
||||
}
|
||||
|
|
|
@ -58,10 +58,10 @@ public class DateMathParser {
|
|||
ROUND_UP_BASE_FIELDS.put(ChronoField.MILLI_OF_SECOND, 999L);
|
||||
}
|
||||
|
||||
private final CompoundDateTimeFormatter formatter;
|
||||
private final CompoundDateTimeFormatter roundUpFormatter;
|
||||
private final DateFormatter formatter;
|
||||
private final DateFormatter roundUpFormatter;
|
||||
|
||||
public DateMathParser(CompoundDateTimeFormatter formatter) {
|
||||
public DateMathParser(DateFormatter formatter) {
|
||||
Objects.requireNonNull(formatter);
|
||||
this.formatter = formatter;
|
||||
this.roundUpFormatter = formatter.parseDefaulting(ROUND_UP_BASE_FIELDS);
|
||||
|
@ -247,7 +247,7 @@ public class DateMathParser {
|
|||
}
|
||||
|
||||
private long parseDateTime(String value, ZoneId timeZone, boolean roundUpIfNoTime) {
|
||||
CompoundDateTimeFormatter formatter = roundUpIfNoTime ? this.roundUpFormatter : this.formatter;
|
||||
DateFormatter formatter = roundUpIfNoTime ? this.roundUpFormatter : this.formatter;
|
||||
try {
|
||||
if (timeZone == null) {
|
||||
return DateFormatters.toZonedDateTime(formatter.parse(value)).toInstant().toEpochMilli();
|
||||
|
|
|
@ -0,0 +1,73 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.time;
|
||||
|
||||
import java.time.Instant;
|
||||
import java.time.ZoneId;
|
||||
import java.time.ZoneOffset;
|
||||
import java.time.format.DateTimeParseException;
|
||||
import java.time.temporal.TemporalAccessor;
|
||||
import java.time.temporal.TemporalField;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* This is a special formatter to parse the milliseconds since the epoch.
|
||||
* There is no way using a native java time date formatter to resemble
|
||||
* the required behaviour to parse negative milliseconds as well.
|
||||
*
|
||||
* This implementation simply tries to convert the input to a long and uses
|
||||
* this as the milliseconds since the epoch without involving any other
|
||||
* java time code
|
||||
*/
|
||||
class EpochMillisDateFormatter implements DateFormatter {
|
||||
|
||||
public static DateFormatter INSTANCE = new EpochMillisDateFormatter();
|
||||
|
||||
private EpochMillisDateFormatter() {}
|
||||
|
||||
@Override
|
||||
public TemporalAccessor parse(String input) {
|
||||
try {
|
||||
return Instant.ofEpochMilli(Long.valueOf(input)).atZone(ZoneOffset.UTC);
|
||||
} catch (NumberFormatException e) {
|
||||
throw new DateTimeParseException("invalid number", input, 0, e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public DateFormatter withZone(ZoneId zoneId) {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String format(TemporalAccessor accessor) {
|
||||
return String.valueOf(Instant.from(accessor).toEpochMilli());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String pattern() {
|
||||
return "epoch_millis";
|
||||
}
|
||||
|
||||
@Override
|
||||
public DateFormatter parseDefaulting(Map<TemporalField, Long> fields) {
|
||||
return this;
|
||||
}
|
||||
}
|
|
@ -16,6 +16,7 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.time;
|
||||
|
||||
import java.time.ZoneId;
|
||||
|
@ -27,33 +28,28 @@ import java.time.temporal.TemporalField;
|
|||
import java.util.Arrays;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
/**
|
||||
* wrapper class around java.time.DateTimeFormatter that supports multiple formats for easier parsing,
|
||||
* and one specific format for printing
|
||||
*/
|
||||
public class CompoundDateTimeFormatter {
|
||||
class JavaDateFormatter implements DateFormatter {
|
||||
|
||||
private static final Consumer<DateTimeFormatter[]> SAME_TIME_ZONE_VALIDATOR = (parsers) -> {
|
||||
private final String format;
|
||||
private final DateTimeFormatter printer;
|
||||
private final DateTimeFormatter[] parsers;
|
||||
|
||||
JavaDateFormatter(String format, DateTimeFormatter printer, DateTimeFormatter... parsers) {
|
||||
long distinctZones = Arrays.stream(parsers).map(DateTimeFormatter::getZone).distinct().count();
|
||||
if (distinctZones > 1) {
|
||||
throw new IllegalArgumentException("formatters must have the same time zone");
|
||||
}
|
||||
};
|
||||
|
||||
final DateTimeFormatter printer;
|
||||
final DateTimeFormatter[] parsers;
|
||||
|
||||
CompoundDateTimeFormatter(DateTimeFormatter ... parsers) {
|
||||
if (parsers.length == 0) {
|
||||
throw new IllegalArgumentException("at least one date time formatter is required");
|
||||
this.parsers = new DateTimeFormatter[]{printer};
|
||||
} else {
|
||||
this.parsers = parsers;
|
||||
}
|
||||
SAME_TIME_ZONE_VALIDATOR.accept(parsers);
|
||||
this.printer = parsers[0];
|
||||
this.parsers = parsers;
|
||||
this.format = format;
|
||||
this.printer = printer;
|
||||
}
|
||||
|
||||
@Override
|
||||
public TemporalAccessor parse(String input) {
|
||||
DateTimeParseException failure = null;
|
||||
for (int i = 0; i < parsers.length; i++) {
|
||||
|
@ -72,13 +68,8 @@ public class CompoundDateTimeFormatter {
|
|||
throw failure;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configure a specific time zone for a date formatter
|
||||
*
|
||||
* @param zoneId The zoneId this formatter shoulduse
|
||||
* @return The new formatter with all parsers switched to the specified timezone
|
||||
*/
|
||||
public CompoundDateTimeFormatter withZone(ZoneId zoneId) {
|
||||
@Override
|
||||
public DateFormatter withZone(ZoneId zoneId) {
|
||||
// shortcurt to not create new objects unnecessarily
|
||||
if (zoneId.equals(parsers[0].getZone())) {
|
||||
return this;
|
||||
|
@ -89,25 +80,33 @@ public class CompoundDateTimeFormatter {
|
|||
parsersWithZone[i] = parsers[i].withZone(zoneId);
|
||||
}
|
||||
|
||||
return new CompoundDateTimeFormatter(parsersWithZone);
|
||||
}
|
||||
|
||||
/**
|
||||
* Configure defaults for missing values in a parser, then return a new compound date formatter
|
||||
*/
|
||||
CompoundDateTimeFormatter parseDefaulting(Map<TemporalField, Long> fields) {
|
||||
final DateTimeFormatter[] parsersWithDefaulting = new DateTimeFormatter[parsers.length];
|
||||
for (int i = 0; i < parsers.length; i++) {
|
||||
DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder().append(parsers[i]);
|
||||
fields.forEach(builder::parseDefaulting);
|
||||
parsersWithDefaulting[i] = builder.toFormatter(Locale.ROOT);
|
||||
}
|
||||
|
||||
return new CompoundDateTimeFormatter(parsersWithDefaulting);
|
||||
return new JavaDateFormatter(format, printer.withZone(zoneId), parsersWithZone);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String format(TemporalAccessor accessor) {
|
||||
return printer.format(accessor);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String pattern() {
|
||||
return format;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DateFormatter parseDefaulting(Map<TemporalField, Long> fields) {
|
||||
final DateTimeFormatterBuilder parseDefaultingBuilder = new DateTimeFormatterBuilder().append(printer);
|
||||
fields.forEach(parseDefaultingBuilder::parseDefaulting);
|
||||
if (parsers.length == 1 && parsers[0].equals(printer)) {
|
||||
return new JavaDateFormatter(format, parseDefaultingBuilder.toFormatter(Locale.ROOT));
|
||||
} else {
|
||||
final DateTimeFormatter[] parsersWithDefaulting = new DateTimeFormatter[parsers.length];
|
||||
for (int i = 0; i < parsers.length; i++) {
|
||||
DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder().append(parsers[i]);
|
||||
fields.forEach(builder::parseDefaulting);
|
||||
parsersWithDefaulting[i] = builder.toFormatter(Locale.ROOT);
|
||||
}
|
||||
return new JavaDateFormatter(format, parseDefaultingBuilder.toFormatter(Locale.ROOT), parsersWithDefaulting);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -21,7 +21,7 @@ package org.elasticsearch.common.xcontent;
|
|||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.time.CompoundDateTimeFormatter;
|
||||
import org.elasticsearch.common.time.DateFormatter;
|
||||
import org.elasticsearch.common.time.DateFormatters;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
|
@ -64,9 +64,9 @@ import java.util.function.Function;
|
|||
public class XContentElasticsearchExtension implements XContentBuilderExtension {
|
||||
|
||||
public static final DateTimeFormatter DEFAULT_DATE_PRINTER = ISODateTimeFormat.dateTime().withZone(DateTimeZone.UTC);
|
||||
public static final CompoundDateTimeFormatter DEFAULT_FORMATTER = DateFormatters.forPattern("strict_date_optional_time_nanos");
|
||||
public static final CompoundDateTimeFormatter LOCAL_TIME_FORMATTER = DateFormatters.forPattern("HH:mm:ss.SSS");
|
||||
public static final CompoundDateTimeFormatter OFFSET_TIME_FORMATTER = DateFormatters.forPattern("HH:mm:ss.SSSZZZZZ");
|
||||
public static final DateFormatter DEFAULT_FORMATTER = DateFormatters.forPattern("strict_date_optional_time_nanos");
|
||||
public static final DateFormatter LOCAL_TIME_FORMATTER = DateFormatters.forPattern("HH:mm:ss.SSS");
|
||||
public static final DateFormatter OFFSET_TIME_FORMATTER = DateFormatters.forPattern("HH:mm:ss.SSSZZZZZ");
|
||||
|
||||
@Override
|
||||
public Map<Class<?>, XContentBuilder.Writer> getXContentWriters() {
|
||||
|
|
|
@ -40,7 +40,6 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.SearchScript;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -48,6 +47,7 @@ import java.util.HashMap;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import org.elasticsearch.script.TermsSetQueryScript;
|
||||
|
||||
public final class TermsSetQueryBuilder extends AbstractQueryBuilder<TermsSetQueryBuilder> {
|
||||
|
||||
|
@ -262,13 +262,12 @@ public final class TermsSetQueryBuilder extends AbstractQueryBuilder<TermsSetQue
|
|||
IndexNumericFieldData fieldData = context.getForField(msmFieldType);
|
||||
longValuesSource = new FieldValuesSource(fieldData);
|
||||
} else if (minimumShouldMatchScript != null) {
|
||||
SearchScript.Factory factory = context.getScriptService().compile(minimumShouldMatchScript,
|
||||
SearchScript.TERMS_SET_QUERY_CONTEXT);
|
||||
TermsSetQueryScript.Factory factory = context.getScriptService().compile(minimumShouldMatchScript,
|
||||
TermsSetQueryScript.CONTEXT);
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.putAll(minimumShouldMatchScript.getParams());
|
||||
params.put("num_terms", values.size());
|
||||
SearchScript.LeafFactory leafFactory = factory.newFactory(params, context.lookup());
|
||||
longValuesSource = new ScriptLongValueSource(minimumShouldMatchScript, leafFactory);
|
||||
longValuesSource = new ScriptLongValueSource(minimumShouldMatchScript, factory.newFactory(params, context.lookup()));
|
||||
} else {
|
||||
throw new IllegalStateException("No minimum should match has been specified");
|
||||
}
|
||||
|
@ -278,26 +277,26 @@ public final class TermsSetQueryBuilder extends AbstractQueryBuilder<TermsSetQue
|
|||
static final class ScriptLongValueSource extends LongValuesSource {
|
||||
|
||||
private final Script script;
|
||||
private final SearchScript.LeafFactory leafFactory;
|
||||
private final TermsSetQueryScript.LeafFactory leafFactory;
|
||||
|
||||
ScriptLongValueSource(Script script, SearchScript.LeafFactory leafFactory) {
|
||||
ScriptLongValueSource(Script script, TermsSetQueryScript.LeafFactory leafFactory) {
|
||||
this.script = script;
|
||||
this.leafFactory = leafFactory;
|
||||
}
|
||||
|
||||
@Override
|
||||
public LongValues getValues(LeafReaderContext ctx, DoubleValues scores) throws IOException {
|
||||
SearchScript searchScript = leafFactory.newInstance(ctx);
|
||||
TermsSetQueryScript script = leafFactory.newInstance(ctx);
|
||||
return new LongValues() {
|
||||
@Override
|
||||
public long longValue() throws IOException {
|
||||
return searchScript.runAsLong();
|
||||
return script.runAsLong();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean advanceExact(int doc) throws IOException {
|
||||
searchScript.setDocument(doc);
|
||||
return searchScript.run() != null;
|
||||
script.setDocument(doc);
|
||||
return script.execute() != null;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -21,11 +21,14 @@ package org.elasticsearch.index.shard;
|
|||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.elasticsearch.Assertions;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.concurrent.FutureUtils;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
@ -34,6 +37,7 @@ import java.util.concurrent.ScheduledExecutorService;
|
|||
import java.util.concurrent.ScheduledFuture;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.elasticsearch.index.seqno.SequenceNumbers.NO_OPS_PERFORMED;
|
||||
import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO;
|
||||
|
@ -63,7 +67,7 @@ public class GlobalCheckpointListeners implements Closeable {
|
|||
|
||||
// guarded by this
|
||||
private boolean closed;
|
||||
private Map<GlobalCheckpointListener, ScheduledFuture<?>> listeners;
|
||||
private final Map<GlobalCheckpointListener, Tuple<Long, ScheduledFuture<?>>> listeners = new LinkedHashMap<>();
|
||||
private long lastKnownGlobalCheckpoint = UNASSIGNED_SEQ_NO;
|
||||
|
||||
private final ShardId shardId;
|
||||
|
@ -91,68 +95,65 @@ public class GlobalCheckpointListeners implements Closeable {
|
|||
}
|
||||
|
||||
/**
|
||||
* Add a global checkpoint listener. If the global checkpoint is above the current global checkpoint known to the listener then the
|
||||
* listener will be asynchronously notified on the executor used to construct this collection of global checkpoint listeners. If the
|
||||
* shard is closed then the listener will be asynchronously notified on the executor used to construct this collection of global
|
||||
* checkpoint listeners. The listener will only be notified of at most one event, either the global checkpoint is updated or the shard
|
||||
* is closed. A listener must re-register after one of these events to receive subsequent events. Callers may add a timeout to be
|
||||
* notified after if the timeout elapses. In this case, the listener will be notified with a {@link TimeoutException}. Passing null for
|
||||
* the timeout means no timeout will be associated to the listener.
|
||||
* Add a global checkpoint listener. If the global checkpoint is equal to or above the global checkpoint the listener is waiting for,
|
||||
* then the listener will be asynchronously notified on the executor used to construct this collection of global checkpoint listeners.
|
||||
* If the shard is closed then the listener will be asynchronously notified on the executor used to construct this collection of global
|
||||
* checkpoint listeners. The listener will only be notified of at most one event, either the global checkpoint is updated above the
|
||||
* global checkpoint the listener is waiting for, or the shard is closed. A listener must re-register after one of these events to
|
||||
* receive subsequent events. Callers may add a timeout to be notified after if the timeout elapses. In this case, the listener will be
|
||||
* notified with a {@link TimeoutException}. Passing null fo the timeout means no timeout will be associated to the listener.
|
||||
*
|
||||
* @param currentGlobalCheckpoint the current global checkpoint known to the listener
|
||||
* @param listener the listener
|
||||
* @param timeout the listener timeout, or null if no timeout
|
||||
* @param waitingForGlobalCheckpoint the current global checkpoint known to the listener
|
||||
* @param listener the listener
|
||||
* @param timeout the listener timeout, or null if no timeout
|
||||
*/
|
||||
synchronized void add(final long currentGlobalCheckpoint, final GlobalCheckpointListener listener, final TimeValue timeout) {
|
||||
synchronized void add(final long waitingForGlobalCheckpoint, final GlobalCheckpointListener listener, final TimeValue timeout) {
|
||||
if (closed) {
|
||||
executor.execute(() -> notifyListener(listener, UNASSIGNED_SEQ_NO, new IndexShardClosedException(shardId)));
|
||||
return;
|
||||
}
|
||||
if (lastKnownGlobalCheckpoint > currentGlobalCheckpoint) {
|
||||
if (lastKnownGlobalCheckpoint >= waitingForGlobalCheckpoint) {
|
||||
// notify directly
|
||||
executor.execute(() -> notifyListener(listener, lastKnownGlobalCheckpoint, null));
|
||||
} else {
|
||||
if (listeners == null) {
|
||||
listeners = new LinkedHashMap<>();
|
||||
}
|
||||
if (timeout == null) {
|
||||
listeners.put(listener, null);
|
||||
listeners.put(listener, Tuple.tuple(waitingForGlobalCheckpoint, null));
|
||||
} else {
|
||||
listeners.put(
|
||||
listener,
|
||||
scheduler.schedule(
|
||||
() -> {
|
||||
final boolean removed;
|
||||
synchronized (this) {
|
||||
/*
|
||||
* Note that the listeners map can be null if a notification nulled out the map reference when
|
||||
* notifying listeners, and then our scheduled execution occurred before we could be cancelled by
|
||||
* the notification. In this case, we would have blocked waiting for access to this critical
|
||||
* section.
|
||||
*
|
||||
* What is more, we know that this listener has a timeout associated with it (otherwise we would
|
||||
* not be here) so the return value from remove being null is an indication that we are not in the
|
||||
* map. This can happen if a notification nulled out the listeners, and then our scheduled execution
|
||||
* occurred before we could be cancelled by the notification, and then another thread added a
|
||||
* listener causing the listeners map reference to be non-null again. In this case, our listener
|
||||
* here would not be in the map and we should not fire the timeout logic.
|
||||
*/
|
||||
removed = listeners != null && listeners.remove(listener) != null;
|
||||
}
|
||||
if (removed) {
|
||||
final TimeoutException e = new TimeoutException(timeout.getStringRep());
|
||||
logger.trace("global checkpoint listener timed out", e);
|
||||
executor.execute(() -> notifyListener(listener, UNASSIGNED_SEQ_NO, e));
|
||||
}
|
||||
},
|
||||
timeout.nanos(),
|
||||
TimeUnit.NANOSECONDS));
|
||||
Tuple.tuple(
|
||||
waitingForGlobalCheckpoint,
|
||||
scheduler.schedule(
|
||||
() -> {
|
||||
final boolean removed;
|
||||
synchronized (this) {
|
||||
/*
|
||||
* We know that this listener has a timeout associated with it (otherwise we would not be
|
||||
* here) so the future component of the return value from remove being null is an indication
|
||||
* that we are not in the map. This can happen if a notification collected us into listeners
|
||||
* to be notified and removed us from the map, and then our scheduled execution occurred
|
||||
* before we could be cancelled by the notification. In this case, our listener here would
|
||||
* not be in the map and we should not fire the timeout logic.
|
||||
*/
|
||||
removed = listeners.remove(listener).v2() != null;
|
||||
}
|
||||
if (removed) {
|
||||
final TimeoutException e = new TimeoutException(timeout.getStringRep());
|
||||
logger.trace("global checkpoint listener timed out", e);
|
||||
executor.execute(() -> notifyListener(listener, UNASSIGNED_SEQ_NO, e));
|
||||
}
|
||||
},
|
||||
timeout.nanos(),
|
||||
TimeUnit.NANOSECONDS)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void close() throws IOException {
|
||||
if (closed) {
|
||||
assert listeners.isEmpty() : listeners;
|
||||
}
|
||||
closed = true;
|
||||
notifyListeners(UNASSIGNED_SEQ_NO, new IndexShardClosedException(shardId));
|
||||
}
|
||||
|
@ -163,7 +164,7 @@ public class GlobalCheckpointListeners implements Closeable {
|
|||
* @return the number of listeners pending notification
|
||||
*/
|
||||
synchronized int pendingListeners() {
|
||||
return listeners == null ? 0 : listeners.size();
|
||||
return listeners.size();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -173,7 +174,7 @@ public class GlobalCheckpointListeners implements Closeable {
|
|||
* @return a scheduled future representing the timeout future for the listener, otherwise null
|
||||
*/
|
||||
synchronized ScheduledFuture<?> getTimeoutFuture(final GlobalCheckpointListener listener) {
|
||||
return listeners.get(listener);
|
||||
return listeners.get(listener).v2();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -191,28 +192,39 @@ public class GlobalCheckpointListeners implements Closeable {
|
|||
}
|
||||
|
||||
private void notifyListeners(final long globalCheckpoint, final IndexShardClosedException e) {
|
||||
assert Thread.holdsLock(this);
|
||||
assert (globalCheckpoint == UNASSIGNED_SEQ_NO && e != null) || (globalCheckpoint >= NO_OPS_PERFORMED && e == null);
|
||||
if (listeners != null) {
|
||||
// capture the current listeners
|
||||
final Map<GlobalCheckpointListener, ScheduledFuture<?>> currentListeners = listeners;
|
||||
listeners = null;
|
||||
if (currentListeners != null) {
|
||||
executor.execute(() -> {
|
||||
for (final Map.Entry<GlobalCheckpointListener, ScheduledFuture<?>> listener : currentListeners.entrySet()) {
|
||||
/*
|
||||
* We do not want to interrupt any timeouts that fired, these will detect that the listener has been notified and
|
||||
* not trigger the timeout.
|
||||
*/
|
||||
FutureUtils.cancel(listener.getValue());
|
||||
notifyListener(listener.getKey(), globalCheckpoint, e);
|
||||
}
|
||||
});
|
||||
}
|
||||
assert Thread.holdsLock(this) : Thread.currentThread();
|
||||
assertNotification(globalCheckpoint, e);
|
||||
|
||||
final Map<GlobalCheckpointListener, Tuple<Long, ScheduledFuture<?>>> listenersToNotify;
|
||||
if (globalCheckpoint != UNASSIGNED_SEQ_NO) {
|
||||
listenersToNotify =
|
||||
listeners
|
||||
.entrySet()
|
||||
.stream()
|
||||
.filter(entry -> entry.getValue().v1() <= globalCheckpoint)
|
||||
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
|
||||
listenersToNotify.keySet().forEach(listeners::remove);
|
||||
} else {
|
||||
listenersToNotify = new HashMap<>(listeners);
|
||||
listeners.clear();
|
||||
}
|
||||
if (listenersToNotify.isEmpty() == false) {
|
||||
executor.execute(() ->
|
||||
listenersToNotify
|
||||
.forEach((listener, t) -> {
|
||||
/*
|
||||
* We do not want to interrupt any timeouts that fired, these will detect that the listener has been
|
||||
* notified and not trigger the timeout.
|
||||
*/
|
||||
FutureUtils.cancel(t.v2());
|
||||
notifyListener(listener, globalCheckpoint, e);
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
private void notifyListener(final GlobalCheckpointListener listener, final long globalCheckpoint, final Exception e) {
|
||||
assertNotification(globalCheckpoint, e);
|
||||
|
||||
try {
|
||||
listener.accept(globalCheckpoint, e);
|
||||
} catch (final Exception caught) {
|
||||
|
@ -225,10 +237,21 @@ public class GlobalCheckpointListeners implements Closeable {
|
|||
} else if (e instanceof IndexShardClosedException) {
|
||||
logger.warn("error notifying global checkpoint listener of closed shard", caught);
|
||||
} else {
|
||||
assert e instanceof TimeoutException : e;
|
||||
logger.warn("error notifying global checkpoint listener of timeout", caught);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void assertNotification(final long globalCheckpoint, final Exception e) {
|
||||
if (Assertions.ENABLED) {
|
||||
assert globalCheckpoint >= UNASSIGNED_SEQ_NO : globalCheckpoint;
|
||||
if (globalCheckpoint != UNASSIGNED_SEQ_NO) {
|
||||
assert e == null : e;
|
||||
} else {
|
||||
assert e != null;
|
||||
assert e instanceof IndexShardClosedException || e instanceof TimeoutException : e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1781,19 +1781,20 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
|||
}
|
||||
|
||||
/**
|
||||
* Add a global checkpoint listener. If the global checkpoint is above the current global checkpoint known to the listener then the
|
||||
* listener will fire immediately on the calling thread. If the specified timeout elapses before the listener is notified, the listener
|
||||
* will be notified with an {@link TimeoutException}. A caller may pass null to specify no timeout.
|
||||
* Add a global checkpoint listener. If the global checkpoint is equal to or above the global checkpoint the listener is waiting for,
|
||||
* then the listener will be notified immediately via an executor (so possibly not on the current thread). If the specified timeout
|
||||
* elapses before the listener is notified, the listener will be notified with an {@link TimeoutException}. A caller may pass null to
|
||||
* specify no timeout.
|
||||
*
|
||||
* @param currentGlobalCheckpoint the current global checkpoint known to the listener
|
||||
* @param listener the listener
|
||||
* @param timeout the timeout
|
||||
* @param waitingForGlobalCheckpoint the global checkpoint the listener is waiting for
|
||||
* @param listener the listener
|
||||
* @param timeout the timeout
|
||||
*/
|
||||
public void addGlobalCheckpointListener(
|
||||
final long currentGlobalCheckpoint,
|
||||
final long waitingForGlobalCheckpoint,
|
||||
final GlobalCheckpointListeners.GlobalCheckpointListener listener,
|
||||
final TimeValue timeout) {
|
||||
this.globalCheckpointListeners.add(currentGlobalCheckpoint, listener, timeout);
|
||||
this.globalCheckpointListeners.add(waitingForGlobalCheckpoint, listener, timeout);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -21,7 +21,7 @@ package org.elasticsearch.monitor.jvm;
|
|||
|
||||
import org.apache.lucene.util.CollectionUtil;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.time.CompoundDateTimeFormatter;
|
||||
import org.elasticsearch.common.time.DateFormatter;
|
||||
import org.elasticsearch.common.time.DateFormatters;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
|
||||
|
@ -43,7 +43,7 @@ public class HotThreads {
|
|||
|
||||
private static final Object mutex = new Object();
|
||||
|
||||
private static final CompoundDateTimeFormatter DATE_TIME_FORMATTER = DateFormatters.forPattern("dateOptionalTime");
|
||||
private static final DateFormatter DATE_TIME_FORMATTER = DateFormatters.forPattern("dateOptionalTime");
|
||||
|
||||
private int busiestThreads = 3;
|
||||
private TimeValue interval = new TimeValue(500, TimeUnit.MILLISECONDS);
|
||||
|
|
|
@ -25,7 +25,7 @@ import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse
|
|||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.Table;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.time.CompoundDateTimeFormatter;
|
||||
import org.elasticsearch.common.time.DateFormatter;
|
||||
import org.elasticsearch.common.time.DateFormatters;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
|
@ -99,7 +99,7 @@ public class RestSnapshotAction extends AbstractCatAction {
|
|||
.endHeaders();
|
||||
}
|
||||
|
||||
private static final CompoundDateTimeFormatter FORMATTER = DateFormatters.forPattern("HH:mm:ss").withZone(ZoneOffset.UTC);
|
||||
private static final DateFormatter FORMATTER = DateFormatters.forPattern("HH:mm:ss").withZone(ZoneOffset.UTC);
|
||||
|
||||
private Table buildTable(RestRequest req, GetSnapshotsResponse getSnapshotsResponse) {
|
||||
Table table = getTableWithHeader(req);
|
||||
|
|
|
@ -27,7 +27,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodes;
|
|||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.Table;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.time.CompoundDateTimeFormatter;
|
||||
import org.elasticsearch.common.time.DateFormatter;
|
||||
import org.elasticsearch.common.time.DateFormatters;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
|
@ -125,7 +125,7 @@ public class RestTasksAction extends AbstractCatAction {
|
|||
return table;
|
||||
}
|
||||
|
||||
private static final CompoundDateTimeFormatter FORMATTER = DateFormatters.forPattern("HH:mm:ss").withZone(ZoneOffset.UTC);
|
||||
private static final DateFormatter FORMATTER = DateFormatters.forPattern("HH:mm:ss").withZone(ZoneOffset.UTC);
|
||||
|
||||
private void buildRow(Table table, boolean fullId, boolean detailed, DiscoveryNodes discoveryNodes, TaskInfo taskInfo) {
|
||||
table.startRow();
|
||||
|
|
|
@ -0,0 +1,105 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.script;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
|
||||
public final class ParameterMap implements Map<String, Object> {
|
||||
|
||||
private static final DeprecationLogger DEPRECATION_LOGGER =
|
||||
new DeprecationLogger(LogManager.getLogger(ParameterMap.class));
|
||||
|
||||
private final Map<String, Object> params;
|
||||
|
||||
private final Map<String, String> deprecations;
|
||||
|
||||
ParameterMap(Map<String, Object> params, Map<String, String> deprecations) {
|
||||
this.params = params;
|
||||
this.deprecations = deprecations;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return params.size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isEmpty() {
|
||||
return params.isEmpty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean containsKey(final Object key) {
|
||||
return params.containsKey(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean containsValue(final Object value) {
|
||||
return params.containsValue(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object get(final Object key) {
|
||||
String deprecationMessage = deprecations.get(key);
|
||||
if (deprecationMessage != null) {
|
||||
DEPRECATION_LOGGER.deprecated(deprecationMessage);
|
||||
}
|
||||
return params.get(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object put(final String key, final Object value) {
|
||||
return params.put(key, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object remove(final Object key) {
|
||||
return params.remove(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void putAll(final Map<? extends String, ?> m) {
|
||||
params.putAll(m);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
params.clear();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<String> keySet() {
|
||||
return params.keySet();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<Object> values() {
|
||||
return params.values();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Entry<String, Object>> entrySet() {
|
||||
return params.entrySet();
|
||||
}
|
||||
}
|
|
@ -44,7 +44,7 @@ public class ScriptModule {
|
|||
SearchScript.AGGS_CONTEXT,
|
||||
ScoreScript.CONTEXT,
|
||||
SearchScript.SCRIPT_SORT_CONTEXT,
|
||||
SearchScript.TERMS_SET_QUERY_CONTEXT,
|
||||
TermsSetQueryScript.CONTEXT,
|
||||
ExecutableScript.CONTEXT,
|
||||
UpdateScript.CONTEXT,
|
||||
BucketAggregationScript.CONTEXT,
|
||||
|
|
|
@ -149,6 +149,4 @@ public abstract class SearchScript implements ScorerAware, ExecutableScript {
|
|||
public static final ScriptContext<Factory> AGGS_CONTEXT = new ScriptContext<>("aggs", Factory.class);
|
||||
// Can return a double. (For ScriptSortType#NUMBER only, for ScriptSortType#STRING normal CONTEXT should be used)
|
||||
public static final ScriptContext<Factory> SCRIPT_SORT_CONTEXT = new ScriptContext<>("sort", Factory.class);
|
||||
// Can return a long
|
||||
public static final ScriptContext<Factory> TERMS_SET_QUERY_CONTEXT = new ScriptContext<>("terms_set", Factory.class);
|
||||
}
|
||||
|
|
|
@ -0,0 +1,112 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.script;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.elasticsearch.index.fielddata.ScriptDocValues;
|
||||
import org.elasticsearch.search.lookup.LeafSearchLookup;
|
||||
import org.elasticsearch.search.lookup.SearchLookup;
|
||||
|
||||
public abstract class TermsSetQueryScript {
|
||||
|
||||
public static final String[] PARAMETERS = {};
|
||||
|
||||
public static final ScriptContext<Factory> CONTEXT = new ScriptContext<>("terms_set", Factory.class);
|
||||
|
||||
private static final Map<String, String> DEPRECATIONS;
|
||||
|
||||
static {
|
||||
Map<String, String> deprecations = new HashMap<>();
|
||||
deprecations.put(
|
||||
"doc",
|
||||
"Accessing variable [doc] via [params.doc] from within a terms-set-query-script " +
|
||||
"is deprecated in favor of directly accessing [doc]."
|
||||
);
|
||||
deprecations.put(
|
||||
"_doc",
|
||||
"Accessing variable [doc] via [params._doc] from within a terms-set-query-script " +
|
||||
"is deprecated in favor of directly accessing [doc]."
|
||||
);
|
||||
DEPRECATIONS = Collections.unmodifiableMap(deprecations);
|
||||
}
|
||||
|
||||
/**
|
||||
* The generic runtime parameters for the script.
|
||||
*/
|
||||
private final Map<String, Object> params;
|
||||
|
||||
/**
|
||||
* A leaf lookup for the bound segment this script will operate on.
|
||||
*/
|
||||
private final LeafSearchLookup leafLookup;
|
||||
|
||||
public TermsSetQueryScript(Map<String, Object> params, SearchLookup lookup, LeafReaderContext leafContext) {
|
||||
this.params = new ParameterMap(params, DEPRECATIONS);
|
||||
this.leafLookup = lookup.getLeafSearchLookup(leafContext);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the parameters for this script.
|
||||
*/
|
||||
public Map<String, Object> getParams() {
|
||||
this.params.putAll(leafLookup.asMap());
|
||||
return params;
|
||||
}
|
||||
|
||||
/**
|
||||
* The doc lookup for the Lucene segment this script was created for.
|
||||
*/
|
||||
public Map<String, ScriptDocValues<?>> getDoc() {
|
||||
return leafLookup.doc();
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the current document to run the script on next.
|
||||
*/
|
||||
public void setDocument(int docid) {
|
||||
leafLookup.setDocument(docid);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the result as a long. This is used by aggregation scripts over long fields.
|
||||
*/
|
||||
public long runAsLong() {
|
||||
return execute().longValue();
|
||||
}
|
||||
|
||||
public abstract Number execute();
|
||||
|
||||
/**
|
||||
* A factory to construct {@link TermsSetQueryScript} instances.
|
||||
*/
|
||||
public interface LeafFactory {
|
||||
TermsSetQueryScript newInstance(LeafReaderContext ctx) throws IOException;
|
||||
}
|
||||
|
||||
/**
|
||||
* A factory to construct stateful {@link TermsSetQueryScript} factories for a specific index.
|
||||
*/
|
||||
public interface Factory {
|
||||
LeafFactory newFactory(Map<String, Object> params, SearchLookup lookup);
|
||||
}
|
||||
}
|
|
@ -174,6 +174,16 @@ abstract class TopDocsCollectorContext extends QueryCollectorContext {
|
|||
}
|
||||
|
||||
abstract static class SimpleTopDocsCollectorContext extends TopDocsCollectorContext {
|
||||
|
||||
private static TopDocsCollector<?> createCollector(@Nullable SortAndFormats sortAndFormats, int numHits,
|
||||
@Nullable ScoreDoc searchAfter, int hitCountThreshold) {
|
||||
if (sortAndFormats == null) {
|
||||
return TopScoreDocCollector.create(numHits, searchAfter, hitCountThreshold);
|
||||
} else {
|
||||
return TopFieldCollector.create(sortAndFormats.sort, numHits, (FieldDoc) searchAfter, hitCountThreshold);
|
||||
}
|
||||
}
|
||||
|
||||
private final @Nullable SortAndFormats sortAndFormats;
|
||||
private final Collector collector;
|
||||
private final Supplier<TotalHits> totalHitsSupplier;
|
||||
|
@ -201,12 +211,27 @@ abstract class TopDocsCollectorContext extends QueryCollectorContext {
|
|||
boolean hasFilterCollector) throws IOException {
|
||||
super(REASON_SEARCH_TOP_HITS, numHits);
|
||||
this.sortAndFormats = sortAndFormats;
|
||||
|
||||
// implicit total hit counts are valid only when there is no filter collector in the chain
|
||||
final int hitCount = hasFilterCollector ? -1 : shortcutTotalHitCount(reader, query);
|
||||
final TopDocsCollector<?> topDocsCollector;
|
||||
if (hitCount == -1 && trackTotalHits) {
|
||||
topDocsCollector = createCollector(sortAndFormats, numHits, searchAfter, Integer.MAX_VALUE);
|
||||
topDocsSupplier = new CachedSupplier<>(topDocsCollector::topDocs);
|
||||
totalHitsSupplier = () -> topDocsSupplier.get().totalHits;
|
||||
} else {
|
||||
topDocsCollector = createCollector(sortAndFormats, numHits, searchAfter, 1); // don't compute hit counts via the collector
|
||||
topDocsSupplier = new CachedSupplier<>(topDocsCollector::topDocs);
|
||||
if (hitCount == -1) {
|
||||
assert trackTotalHits == false;
|
||||
totalHitsSupplier = () -> new TotalHits(0, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO);
|
||||
} else {
|
||||
totalHitsSupplier = () -> new TotalHits(hitCount, TotalHits.Relation.EQUAL_TO);
|
||||
}
|
||||
}
|
||||
MaxScoreCollector maxScoreCollector = null;
|
||||
if (sortAndFormats == null) {
|
||||
final TopDocsCollector<?> topDocsCollector = TopScoreDocCollector.create(numHits, searchAfter, Integer.MAX_VALUE);
|
||||
this.collector = topDocsCollector;
|
||||
this.topDocsSupplier = new CachedSupplier<>(topDocsCollector::topDocs);
|
||||
this.totalHitsSupplier = () -> topDocsSupplier.get().totalHits;
|
||||
this.maxScoreSupplier = () -> {
|
||||
maxScoreSupplier = () -> {
|
||||
TopDocs topDocs = topDocsSupplier.get();
|
||||
if (topDocs.scoreDocs.length == 0) {
|
||||
return Float.NaN;
|
||||
|
@ -214,42 +239,13 @@ abstract class TopDocsCollectorContext extends QueryCollectorContext {
|
|||
return topDocs.scoreDocs[0].score;
|
||||
}
|
||||
};
|
||||
} else if (trackMaxScore) {
|
||||
maxScoreCollector = new MaxScoreCollector();
|
||||
maxScoreSupplier = maxScoreCollector::getMaxScore;
|
||||
} else {
|
||||
/**
|
||||
* We explicitly don't track total hits in the topdocs collector, it can early terminate
|
||||
* if the sort matches the index sort.
|
||||
*/
|
||||
final TopDocsCollector<?> topDocsCollector = TopFieldCollector.create(sortAndFormats.sort, numHits,
|
||||
(FieldDoc) searchAfter, 1);
|
||||
this.topDocsSupplier = new CachedSupplier<>(topDocsCollector::topDocs);
|
||||
TotalHitCountCollector hitCountCollector = null;
|
||||
if (trackTotalHits) {
|
||||
// implicit total hit counts are valid only when there is no filter collector in the chain
|
||||
int count = hasFilterCollector ? -1 : shortcutTotalHitCount(reader, query);
|
||||
if (count != -1) {
|
||||
// we can extract the total count from the shard statistics directly
|
||||
this.totalHitsSupplier = () -> new TotalHits(count, TotalHits.Relation.EQUAL_TO);
|
||||
} else {
|
||||
// wrap a collector that counts the total number of hits even
|
||||
// if the top docs collector terminates early
|
||||
final TotalHitCountCollector countingCollector = new TotalHitCountCollector();
|
||||
hitCountCollector = countingCollector;
|
||||
this.totalHitsSupplier = () -> new TotalHits(countingCollector.getTotalHits(), TotalHits.Relation.EQUAL_TO);
|
||||
}
|
||||
} else {
|
||||
// total hit count is not needed
|
||||
// for bwc hit count is set to 0, it will be converted to -1 by the coordinating node
|
||||
this.totalHitsSupplier = () -> new TotalHits(0, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO);
|
||||
}
|
||||
MaxScoreCollector maxScoreCollector = null;
|
||||
if (trackMaxScore) {
|
||||
maxScoreCollector = new MaxScoreCollector();
|
||||
maxScoreSupplier = maxScoreCollector::getMaxScore;
|
||||
} else {
|
||||
maxScoreSupplier = () -> Float.NaN;
|
||||
}
|
||||
collector = MultiCollector.wrap(topDocsCollector, hitCountCollector, maxScoreCollector);
|
||||
maxScoreSupplier = () -> Float.NaN;
|
||||
}
|
||||
this.collector = MultiCollector.wrap(topDocsCollector, maxScoreCollector);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -27,7 +27,7 @@ import org.elasticsearch.common.ParseField;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.time.CompoundDateTimeFormatter;
|
||||
import org.elasticsearch.common.time.DateFormatter;
|
||||
import org.elasticsearch.common.time.DateFormatters;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
|
@ -52,7 +52,7 @@ public final class SnapshotInfo implements Comparable<SnapshotInfo>, ToXContent,
|
|||
|
||||
public static final String CONTEXT_MODE_PARAM = "context_mode";
|
||||
public static final String CONTEXT_MODE_SNAPSHOT = "SNAPSHOT";
|
||||
private static final CompoundDateTimeFormatter DATE_TIME_FORMATTER = DateFormatters.forPattern("strictDateOptionalTime");
|
||||
private static final DateFormatter DATE_TIME_FORMATTER = DateFormatters.forPattern("strictDateOptionalTime");
|
||||
private static final String SNAPSHOT = "snapshot";
|
||||
private static final String UUID = "uuid";
|
||||
private static final String INDICES = "indices";
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.common.joda;
|
||||
|
||||
import org.elasticsearch.common.time.CompoundDateTimeFormatter;
|
||||
import org.elasticsearch.common.time.DateFormatter;
|
||||
import org.elasticsearch.common.time.DateFormatters;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.joda.time.DateTime;
|
||||
|
@ -485,7 +485,7 @@ public class JavaJodaTimeDuellingTests extends ESTestCase {
|
|||
FormatDateTimeFormatter jodaFormatter = Joda.forPattern(format);
|
||||
DateTime jodaDateTime = jodaFormatter.parser().parseDateTime(input);
|
||||
|
||||
CompoundDateTimeFormatter javaTimeFormatter = DateFormatters.forPattern(format);
|
||||
DateFormatter javaTimeFormatter = DateFormatters.forPattern(format);
|
||||
TemporalAccessor javaTimeAccessor = javaTimeFormatter.parse(input);
|
||||
ZonedDateTime zonedDateTime = DateFormatters.toZonedDateTime(javaTimeAccessor);
|
||||
|
||||
|
@ -507,7 +507,7 @@ public class JavaJodaTimeDuellingTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private void assertJavaTimeParseException(String input, String format, String expectedMessage) {
|
||||
CompoundDateTimeFormatter javaTimeFormatter = DateFormatters.forPattern(format);
|
||||
DateFormatter javaTimeFormatter = DateFormatters.forPattern(format);
|
||||
DateTimeParseException dateTimeParseException = expectThrows(DateTimeParseException.class, () -> javaTimeFormatter.parse(input));
|
||||
assertThat(dateTimeParseException.getMessage(), startsWith(expectedMessage));
|
||||
}
|
||||
|
|
|
@ -180,6 +180,7 @@ public class SettingTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/33135")
|
||||
public void testValidateStringSetting() {
|
||||
Settings settings = Settings.builder().putList("foo.bar", Arrays.asList("bla-a", "bla-b")).build();
|
||||
Setting<String> stringSetting = Setting.simpleString("foo.bar", Property.NodeScope);
|
||||
|
|
|
@ -31,17 +31,15 @@ import static org.hamcrest.Matchers.is;
|
|||
|
||||
public class DateFormattersTests extends ESTestCase {
|
||||
|
||||
// the epoch milli parser is a bit special, as it does not use date formatter, see comments in DateFormatters
|
||||
public void testEpochMilliParser() {
|
||||
CompoundDateTimeFormatter formatter = DateFormatters.forPattern("epoch_millis");
|
||||
DateFormatter formatter = DateFormatters.forPattern("epoch_millis");
|
||||
|
||||
DateTimeParseException e = expectThrows(DateTimeParseException.class, () -> formatter.parse("invalid"));
|
||||
assertThat(e.getMessage(), containsString("invalid number"));
|
||||
|
||||
// different zone, should still yield the same output, as epoch is time zoned independent
|
||||
// different zone, should still yield the same output, as epoch is time zone independent
|
||||
ZoneId zoneId = randomZone();
|
||||
CompoundDateTimeFormatter zonedFormatter = formatter.withZone(zoneId);
|
||||
assertThat(zonedFormatter.printer.getZone(), is(zoneId));
|
||||
DateFormatter zonedFormatter = formatter.withZone(zoneId);
|
||||
|
||||
// test with negative and non negative values
|
||||
assertThatSameDateTime(formatter, zonedFormatter, randomNonNegativeLong() * -1);
|
||||
|
@ -58,14 +56,21 @@ public class DateFormattersTests extends ESTestCase {
|
|||
assertSameFormat(formatter, 1);
|
||||
}
|
||||
|
||||
private void assertThatSameDateTime(CompoundDateTimeFormatter formatter, CompoundDateTimeFormatter zonedFormatter, long millis) {
|
||||
public void testEpochMilliParsersWithDifferentFormatters() {
|
||||
DateFormatter formatter = DateFormatters.forPattern("strict_date_optional_time||epoch_millis");
|
||||
TemporalAccessor accessor = formatter.parse("123");
|
||||
assertThat(DateFormatters.toZonedDateTime(accessor).toInstant().toEpochMilli(), is(123L));
|
||||
assertThat(formatter.pattern(), is("strict_date_optional_time||epoch_millis"));
|
||||
}
|
||||
|
||||
private void assertThatSameDateTime(DateFormatter formatter, DateFormatter zonedFormatter, long millis) {
|
||||
String millisAsString = String.valueOf(millis);
|
||||
ZonedDateTime formatterZonedDateTime = DateFormatters.toZonedDateTime(formatter.parse(millisAsString));
|
||||
ZonedDateTime zonedFormatterZonedDateTime = DateFormatters.toZonedDateTime(zonedFormatter.parse(millisAsString));
|
||||
assertThat(formatterZonedDateTime.toInstant().toEpochMilli(), is(zonedFormatterZonedDateTime.toInstant().toEpochMilli()));
|
||||
}
|
||||
|
||||
private void assertSameFormat(CompoundDateTimeFormatter formatter, long millis) {
|
||||
private void assertSameFormat(DateFormatter formatter, long millis) {
|
||||
String millisAsString = String.valueOf(millis);
|
||||
TemporalAccessor accessor = formatter.parse(millisAsString);
|
||||
assertThat(millisAsString, is(formatter.format(accessor)));
|
||||
|
|
|
@ -35,7 +35,7 @@ import static org.hamcrest.Matchers.is;
|
|||
|
||||
public class DateMathParserTests extends ESTestCase {
|
||||
|
||||
private final CompoundDateTimeFormatter formatter = DateFormatters.forPattern("dateOptionalTime||epoch_millis");
|
||||
private final DateFormatter formatter = DateFormatters.forPattern("dateOptionalTime||epoch_millis");
|
||||
private final DateMathParser parser = new DateMathParser(formatter);
|
||||
|
||||
public void testBasicDates() {
|
||||
|
@ -138,7 +138,7 @@ public class DateMathParserTests extends ESTestCase {
|
|||
|
||||
public void testRoundingPreservesEpochAsBaseDate() {
|
||||
// If a user only specifies times, then the date needs to always be 1970-01-01 regardless of rounding
|
||||
CompoundDateTimeFormatter formatter = DateFormatters.forPattern("HH:mm:ss");
|
||||
DateFormatter formatter = DateFormatters.forPattern("HH:mm:ss");
|
||||
DateMathParser parser = new DateMathParser(formatter);
|
||||
ZonedDateTime zonedDateTime = DateFormatters.toZonedDateTime(formatter.parse("04:52:20"));
|
||||
assertThat(zonedDateTime.getYear(), is(1970));
|
||||
|
@ -164,7 +164,7 @@ public class DateMathParserTests extends ESTestCase {
|
|||
assertDateMathEquals("2014-11-18T09:20", "2014-11-18T08:20:59.999Z", 0, true, ZoneId.of("CET"));
|
||||
|
||||
// implicit rounding with explicit timezone in the date format
|
||||
CompoundDateTimeFormatter formatter = DateFormatters.forPattern("yyyy-MM-ddXXX");
|
||||
DateFormatter formatter = DateFormatters.forPattern("yyyy-MM-ddXXX");
|
||||
DateMathParser parser = new DateMathParser(formatter);
|
||||
long time = parser.parse("2011-10-09+01:00", () -> 0, false, null);
|
||||
assertEquals(this.parser.parse("2011-10-09T00:00:00.000+01:00", () -> 0), time);
|
||||
|
|