Migrate repository settings to the new settings API
This commit is contained in:
parent
d371ef35f4
commit
ae5da3432c
|
@ -47,7 +47,6 @@ import org.elasticsearch.gateway.GatewayService;
|
||||||
import org.elasticsearch.discovery.zen.fd.FaultDetection;
|
import org.elasticsearch.discovery.zen.fd.FaultDetection;
|
||||||
import org.elasticsearch.discovery.zen.ping.unicast.UnicastZenPing;
|
import org.elasticsearch.discovery.zen.ping.unicast.UnicastZenPing;
|
||||||
import org.elasticsearch.gateway.PrimaryShardAllocator;
|
import org.elasticsearch.gateway.PrimaryShardAllocator;
|
||||||
import org.elasticsearch.http.netty.NettyHttpChannel;
|
|
||||||
import org.elasticsearch.http.netty.NettyHttpServerTransport;
|
import org.elasticsearch.http.netty.NettyHttpServerTransport;
|
||||||
import org.elasticsearch.index.IndexSettings;
|
import org.elasticsearch.index.IndexSettings;
|
||||||
import org.elasticsearch.index.store.IndexStoreConfig;
|
import org.elasticsearch.index.store.IndexStoreConfig;
|
||||||
|
@ -59,6 +58,8 @@ import org.elasticsearch.indices.recovery.RecoverySettings;
|
||||||
import org.elasticsearch.indices.store.IndicesStore;
|
import org.elasticsearch.indices.store.IndicesStore;
|
||||||
import org.elasticsearch.indices.ttl.IndicesTTLService;
|
import org.elasticsearch.indices.ttl.IndicesTTLService;
|
||||||
import org.elasticsearch.node.Node;
|
import org.elasticsearch.node.Node;
|
||||||
|
import org.elasticsearch.repositories.fs.FsRepository;
|
||||||
|
import org.elasticsearch.repositories.uri.URLRepository;
|
||||||
import org.elasticsearch.script.ScriptService;
|
import org.elasticsearch.script.ScriptService;
|
||||||
import org.elasticsearch.search.SearchService;
|
import org.elasticsearch.search.SearchService;
|
||||||
import org.elasticsearch.threadpool.ThreadPool;
|
import org.elasticsearch.threadpool.ThreadPool;
|
||||||
|
@ -129,6 +130,9 @@ public final class ClusterSettings extends AbstractScopedSettings {
|
||||||
FilterAllocationDecider.CLUSTER_ROUTING_INCLUDE_GROUP_SETTING,
|
FilterAllocationDecider.CLUSTER_ROUTING_INCLUDE_GROUP_SETTING,
|
||||||
FilterAllocationDecider.CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING,
|
FilterAllocationDecider.CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING,
|
||||||
FilterAllocationDecider.CLUSTER_ROUTING_REQUIRE_GROUP_SETTING,
|
FilterAllocationDecider.CLUSTER_ROUTING_REQUIRE_GROUP_SETTING,
|
||||||
|
FsRepository.REPOSITORIES_CHUNK_SIZE_SETTING,
|
||||||
|
FsRepository.REPOSITORIES_COMPRESS_SETTING,
|
||||||
|
FsRepository.REPOSITORIES_LOCATION_SETTING,
|
||||||
IndexStoreConfig.INDICES_STORE_THROTTLE_TYPE_SETTING,
|
IndexStoreConfig.INDICES_STORE_THROTTLE_TYPE_SETTING,
|
||||||
IndexStoreConfig.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING,
|
IndexStoreConfig.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING,
|
||||||
IndicesTTLService.INDICES_TTL_INTERVAL_SETTING,
|
IndicesTTLService.INDICES_TTL_INTERVAL_SETTING,
|
||||||
|
@ -231,6 +235,9 @@ public final class ClusterSettings extends AbstractScopedSettings {
|
||||||
UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_CONCURRENT_CONNECTS_SETTING,
|
UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_CONCURRENT_CONNECTS_SETTING,
|
||||||
SearchService.DEFAULT_KEEPALIVE_SETTING,
|
SearchService.DEFAULT_KEEPALIVE_SETTING,
|
||||||
SearchService.KEEPALIVE_INTERVAL_SETTING,
|
SearchService.KEEPALIVE_INTERVAL_SETTING,
|
||||||
Node.WRITE_PORTS_FIELD_SETTING
|
Node.WRITE_PORTS_FIELD_SETTING,
|
||||||
)));
|
URLRepository.ALLOWED_URLS_SETTING,
|
||||||
|
URLRepository.REPOSITORIES_LIST_DIRECTORIES_SETTING,
|
||||||
|
URLRepository.REPOSITORIES_URL_SETTING,
|
||||||
|
URLRepository.SUPPORTED_PROTOCOLS_SETTING)));
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,6 +23,7 @@ import org.elasticsearch.common.blobstore.BlobPath;
|
||||||
import org.elasticsearch.common.blobstore.BlobStore;
|
import org.elasticsearch.common.blobstore.BlobStore;
|
||||||
import org.elasticsearch.common.blobstore.fs.FsBlobStore;
|
import org.elasticsearch.common.blobstore.fs.FsBlobStore;
|
||||||
import org.elasticsearch.common.inject.Inject;
|
import org.elasticsearch.common.inject.Inject;
|
||||||
|
import org.elasticsearch.common.settings.Setting;
|
||||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||||
import org.elasticsearch.env.Environment;
|
import org.elasticsearch.env.Environment;
|
||||||
import org.elasticsearch.index.snapshots.IndexShardRepository;
|
import org.elasticsearch.index.snapshots.IndexShardRepository;
|
||||||
|
@ -33,6 +34,7 @@ import org.elasticsearch.repositories.blobstore.BlobStoreRepository;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.nio.file.Path;
|
import java.nio.file.Path;
|
||||||
|
import java.util.function.Function;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Shared file system implementation of the BlobStoreRepository
|
* Shared file system implementation of the BlobStoreRepository
|
||||||
|
@ -49,6 +51,13 @@ public class FsRepository extends BlobStoreRepository {
|
||||||
|
|
||||||
public final static String TYPE = "fs";
|
public final static String TYPE = "fs";
|
||||||
|
|
||||||
|
public static final Setting<String> LOCATION_SETTING = new Setting<>("location", "", Function.identity(), false, Setting.Scope.CLUSTER);
|
||||||
|
public static final Setting<String> REPOSITORIES_LOCATION_SETTING = new Setting<>("repositories.fs.location", "", Function.identity(), false, Setting.Scope.CLUSTER);
|
||||||
|
public static final Setting<ByteSizeValue> CHUNK_SIZE_SETTING = Setting.byteSizeSetting("chunk_size", "-1", false, Setting.Scope.CLUSTER);
|
||||||
|
public static final Setting<ByteSizeValue> REPOSITORIES_CHUNK_SIZE_SETTING = Setting.byteSizeSetting("repositories.fs.chunk_size", "-1", false, Setting.Scope.CLUSTER);
|
||||||
|
public static final Setting<Boolean> COMPRESS_SETTING = Setting.boolSetting("compress", false, false, Setting.Scope.CLUSTER);
|
||||||
|
public static final Setting<Boolean> REPOSITORIES_COMPRESS_SETTING = Setting.boolSetting("repositories.fs.compress", false, false, Setting.Scope.CLUSTER);
|
||||||
|
|
||||||
private final FsBlobStore blobStore;
|
private final FsBlobStore blobStore;
|
||||||
|
|
||||||
private ByteSizeValue chunkSize;
|
private ByteSizeValue chunkSize;
|
||||||
|
@ -68,7 +77,7 @@ public class FsRepository extends BlobStoreRepository {
|
||||||
public FsRepository(RepositoryName name, RepositorySettings repositorySettings, IndexShardRepository indexShardRepository, Environment environment) throws IOException {
|
public FsRepository(RepositoryName name, RepositorySettings repositorySettings, IndexShardRepository indexShardRepository, Environment environment) throws IOException {
|
||||||
super(name.getName(), repositorySettings, indexShardRepository);
|
super(name.getName(), repositorySettings, indexShardRepository);
|
||||||
Path locationFile;
|
Path locationFile;
|
||||||
String location = repositorySettings.settings().get("location", settings.get("repositories.fs.location"));
|
String location = LOCATION_SETTING.exists(repositorySettings.settings()) ? LOCATION_SETTING.get(repositorySettings.settings()) : REPOSITORIES_LOCATION_SETTING.get(settings);
|
||||||
if (location == null) {
|
if (location == null) {
|
||||||
logger.warn("the repository location is missing, it should point to a shared file system location that is available on all master and data nodes");
|
logger.warn("the repository location is missing, it should point to a shared file system location that is available on all master and data nodes");
|
||||||
throw new RepositoryException(name.name(), "missing location");
|
throw new RepositoryException(name.name(), "missing location");
|
||||||
|
@ -85,8 +94,14 @@ public class FsRepository extends BlobStoreRepository {
|
||||||
}
|
}
|
||||||
|
|
||||||
blobStore = new FsBlobStore(settings, locationFile);
|
blobStore = new FsBlobStore(settings, locationFile);
|
||||||
this.chunkSize = repositorySettings.settings().getAsBytesSize("chunk_size", settings.getAsBytesSize("repositories.fs.chunk_size", null));
|
if (CHUNK_SIZE_SETTING.exists(repositorySettings.settings())) {
|
||||||
this.compress = repositorySettings.settings().getAsBoolean("compress", settings.getAsBoolean("repositories.fs.compress", false));
|
this.chunkSize = CHUNK_SIZE_SETTING.get(repositorySettings.settings());
|
||||||
|
} else if (REPOSITORIES_CHUNK_SIZE_SETTING.exists(settings)) {
|
||||||
|
this.chunkSize = REPOSITORIES_CHUNK_SIZE_SETTING.get(settings);
|
||||||
|
} else {
|
||||||
|
this.chunkSize = null;
|
||||||
|
}
|
||||||
|
this.compress = COMPRESS_SETTING.exists(repositorySettings.settings()) ? COMPRESS_SETTING.get(repositorySettings.settings()) : REPOSITORIES_COMPRESS_SETTING.get(settings);
|
||||||
this.basePath = BlobPath.cleanPath();
|
this.basePath = BlobPath.cleanPath();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -20,11 +20,11 @@
|
||||||
package org.elasticsearch.repositories.uri;
|
package org.elasticsearch.repositories.uri;
|
||||||
|
|
||||||
import org.elasticsearch.cluster.metadata.SnapshotId;
|
import org.elasticsearch.cluster.metadata.SnapshotId;
|
||||||
import org.elasticsearch.common.Strings;
|
|
||||||
import org.elasticsearch.common.blobstore.BlobPath;
|
import org.elasticsearch.common.blobstore.BlobPath;
|
||||||
import org.elasticsearch.common.blobstore.BlobStore;
|
import org.elasticsearch.common.blobstore.BlobStore;
|
||||||
import org.elasticsearch.common.blobstore.url.URLBlobStore;
|
import org.elasticsearch.common.blobstore.url.URLBlobStore;
|
||||||
import org.elasticsearch.common.inject.Inject;
|
import org.elasticsearch.common.inject.Inject;
|
||||||
|
import org.elasticsearch.common.settings.Setting;
|
||||||
import org.elasticsearch.common.util.URIPattern;
|
import org.elasticsearch.common.util.URIPattern;
|
||||||
import org.elasticsearch.env.Environment;
|
import org.elasticsearch.env.Environment;
|
||||||
import org.elasticsearch.index.snapshots.IndexShardRepository;
|
import org.elasticsearch.index.snapshots.IndexShardRepository;
|
||||||
|
@ -34,9 +34,13 @@ import org.elasticsearch.repositories.RepositorySettings;
|
||||||
import org.elasticsearch.repositories.blobstore.BlobStoreRepository;
|
import org.elasticsearch.repositories.blobstore.BlobStoreRepository;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.net.MalformedURLException;
|
||||||
import java.net.URISyntaxException;
|
import java.net.URISyntaxException;
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Collections;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.function.Function;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Read-only URL-based implementation of the BlobStoreRepository
|
* Read-only URL-based implementation of the BlobStoreRepository
|
||||||
|
@ -51,13 +55,21 @@ public class URLRepository extends BlobStoreRepository {
|
||||||
|
|
||||||
public final static String TYPE = "url";
|
public final static String TYPE = "url";
|
||||||
|
|
||||||
public final static String[] DEFAULT_SUPPORTED_PROTOCOLS = {"http", "https", "ftp", "file", "jar"};
|
public static final Setting<List<String>> SUPPORTED_PROTOCOLS_SETTING = Setting.listSetting("repositories.url.supported_protocols",
|
||||||
|
Arrays.asList("http", "https", "ftp", "file", "jar"), Function.identity(), false, Setting.Scope.CLUSTER);
|
||||||
|
|
||||||
public final static String SUPPORTED_PROTOCOLS_SETTING = "repositories.url.supported_protocols";
|
public static final Setting<List<URIPattern>> ALLOWED_URLS_SETTING = Setting.listSetting("repositories.url.allowed_urls",
|
||||||
|
Collections.emptyList(), URIPattern::new, false, Setting.Scope.CLUSTER);
|
||||||
|
|
||||||
public final static String ALLOWED_URLS_SETTING = "repositories.url.allowed_urls";
|
public static final Setting<URL> URL_SETTING = new Setting<>("url", "http:", URLRepository::parseURL, false, Setting.Scope.CLUSTER);
|
||||||
|
public static final Setting<URL> REPOSITORIES_URL_SETTING = new Setting<>("repositories.url.url", (s) -> s.get("repositories.uri.url", "http:"),
|
||||||
|
URLRepository::parseURL, false, Setting.Scope.CLUSTER);
|
||||||
|
|
||||||
private final String[] supportedProtocols;
|
public static final Setting<Boolean> LIST_DIRECTORIES_SETTING = Setting.boolSetting("list_directories", true, false, Setting.Scope.CLUSTER);
|
||||||
|
public static final Setting<Boolean> REPOSITORIES_LIST_DIRECTORIES_SETTING = Setting.boolSetting("repositories.uri.list_directories", true,
|
||||||
|
false, Setting.Scope.CLUSTER);
|
||||||
|
|
||||||
|
private final List<String> supportedProtocols;
|
||||||
|
|
||||||
private final URIPattern[] urlWhiteList;
|
private final URIPattern[] urlWhiteList;
|
||||||
|
|
||||||
|
@ -79,21 +91,16 @@ public class URLRepository extends BlobStoreRepository {
|
||||||
@Inject
|
@Inject
|
||||||
public URLRepository(RepositoryName name, RepositorySettings repositorySettings, IndexShardRepository indexShardRepository, Environment environment) throws IOException {
|
public URLRepository(RepositoryName name, RepositorySettings repositorySettings, IndexShardRepository indexShardRepository, Environment environment) throws IOException {
|
||||||
super(name.getName(), repositorySettings, indexShardRepository);
|
super(name.getName(), repositorySettings, indexShardRepository);
|
||||||
URL url;
|
|
||||||
String path = repositorySettings.settings().get("url", settings.get("repositories.url.url", settings.get("repositories.uri.url")));
|
if (URL_SETTING.exists(repositorySettings.settings()) == false && REPOSITORIES_URL_SETTING.exists(settings) == false) {
|
||||||
if (path == null) {
|
|
||||||
throw new RepositoryException(name.name(), "missing url");
|
throw new RepositoryException(name.name(), "missing url");
|
||||||
} else {
|
|
||||||
url = new URL(path);
|
|
||||||
}
|
|
||||||
supportedProtocols = settings.getAsArray(SUPPORTED_PROTOCOLS_SETTING, DEFAULT_SUPPORTED_PROTOCOLS);
|
|
||||||
String[] urlWhiteList = settings.getAsArray(ALLOWED_URLS_SETTING, Strings.EMPTY_ARRAY);
|
|
||||||
this.urlWhiteList = new URIPattern[urlWhiteList.length];
|
|
||||||
for (int i = 0; i < urlWhiteList.length; i++) {
|
|
||||||
this.urlWhiteList[i] = new URIPattern(urlWhiteList[i]);
|
|
||||||
}
|
}
|
||||||
|
supportedProtocols = SUPPORTED_PROTOCOLS_SETTING.get(settings);
|
||||||
|
urlWhiteList = ALLOWED_URLS_SETTING.get(settings).toArray(new URIPattern[]{});
|
||||||
this.environment = environment;
|
this.environment = environment;
|
||||||
listDirectories = repositorySettings.settings().getAsBoolean("list_directories", settings.getAsBoolean("repositories.uri.list_directories", true));
|
listDirectories = LIST_DIRECTORIES_SETTING.exists(repositorySettings.settings()) ? LIST_DIRECTORIES_SETTING.get(repositorySettings.settings()) : REPOSITORIES_LIST_DIRECTORIES_SETTING.get(settings);
|
||||||
|
|
||||||
|
URL url = URL_SETTING.exists(repositorySettings.settings()) ? URL_SETTING.get(repositorySettings.settings()) : REPOSITORIES_URL_SETTING.get(settings);
|
||||||
URL normalizedURL = checkURL(url);
|
URL normalizedURL = checkURL(url);
|
||||||
blobStore = new URLBlobStore(settings, normalizedURL);
|
blobStore = new URLBlobStore(settings, normalizedURL);
|
||||||
basePath = BlobPath.cleanPath();
|
basePath = BlobPath.cleanPath();
|
||||||
|
@ -147,8 +154,8 @@ public class URLRepository extends BlobStoreRepository {
|
||||||
// We didn't match white list - try to resolve against path.repo
|
// We didn't match white list - try to resolve against path.repo
|
||||||
URL normalizedUrl = environment.resolveRepoURL(url);
|
URL normalizedUrl = environment.resolveRepoURL(url);
|
||||||
if (normalizedUrl == null) {
|
if (normalizedUrl == null) {
|
||||||
logger.warn("The specified url [{}] doesn't start with any repository paths specified by the path.repo setting: [{}] or by repositories.url.allowed_urls setting: [{}] ", url, environment.repoFiles());
|
logger.warn("The specified url [{}] doesn't start with any repository paths specified by the path.repo setting or by {} setting: [{}] ", url, ALLOWED_URLS_SETTING.getKey(), environment.repoFiles());
|
||||||
throw new RepositoryException(repositoryName, "file url [" + url + "] doesn't match any of the locations specified by path.repo or repositories.url.allowed_urls");
|
throw new RepositoryException(repositoryName, "file url [" + url + "] doesn't match any of the locations specified by path.repo or " + ALLOWED_URLS_SETTING.getKey());
|
||||||
}
|
}
|
||||||
return normalizedUrl;
|
return normalizedUrl;
|
||||||
}
|
}
|
||||||
|
@ -161,4 +168,11 @@ public class URLRepository extends BlobStoreRepository {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static URL parseURL(String s) {
|
||||||
|
try {
|
||||||
|
return new URL(s);
|
||||||
|
} catch (MalformedURLException e) {
|
||||||
|
throw new IllegalArgumentException("Unable to parse URL repository setting", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
/*
|
||||||
/*
|
/*
|
||||||
* Licensed to Elasticsearch under one or more contributor
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
* license agreements. See the NOTICE file distributed with
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
@ -28,6 +29,7 @@ import org.elasticsearch.cluster.metadata.IndexTemplateMetaData;
|
||||||
import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider;
|
import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.env.Environment;
|
import org.elasticsearch.env.Environment;
|
||||||
|
import org.elasticsearch.repositories.uri.URLRepository;
|
||||||
import org.elasticsearch.rest.RestStatus;
|
import org.elasticsearch.rest.RestStatus;
|
||||||
import org.elasticsearch.snapshots.AbstractSnapshotIntegTestCase;
|
import org.elasticsearch.snapshots.AbstractSnapshotIntegTestCase;
|
||||||
import org.elasticsearch.snapshots.RestoreInfo;
|
import org.elasticsearch.snapshots.RestoreInfo;
|
||||||
|
@ -73,7 +75,7 @@ public class RestoreBackwardsCompatIT extends AbstractSnapshotIntegTestCase {
|
||||||
URI repoJarPatternUri = new URI("jar:" + getBwcIndicesPath().toUri().toString() + "*.zip!/repo/");
|
URI repoJarPatternUri = new URI("jar:" + getBwcIndicesPath().toUri().toString() + "*.zip!/repo/");
|
||||||
return settingsBuilder()
|
return settingsBuilder()
|
||||||
.put(super.nodeSettings(nodeOrdinal))
|
.put(super.nodeSettings(nodeOrdinal))
|
||||||
.putArray("repositories.url.allowed_urls", repoJarPatternUri.toString())
|
.putArray(URLRepository.ALLOWED_URLS_SETTING.getKey(), repoJarPatternUri.toString())
|
||||||
.build();
|
.build();
|
||||||
} catch (URISyntaxException ex) {
|
} catch (URISyntaxException ex) {
|
||||||
throw new IllegalArgumentException(ex);
|
throw new IllegalArgumentException(ex);
|
||||||
|
|
Loading…
Reference in New Issue