[ML][Inference] changing setting to be memorySizeSettting (#49259) (#49302)

This commit is contained in:
Benjamin Trent 2019-11-19 07:56:40 -05:00 committed by GitHub
parent 38aec2e298
commit 19602fd573
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 2 additions and 3 deletions

View File

@ -20,7 +20,6 @@ import org.elasticsearch.common.cache.RemovalNotification;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.set.Sets;
@ -57,8 +56,8 @@ public class ModelLoadingService implements ClusterStateListener {
* Once the limit is reached, LRU models are evicted in favor of new models
*/
public static final Setting<ByteSizeValue> INFERENCE_MODEL_CACHE_SIZE =
Setting.byteSizeSetting("xpack.ml.inference_model.cache_size",
new ByteSizeValue(1, ByteSizeUnit.GB),
Setting.memorySizeSetting("xpack.ml.inference_model.cache_size",
"40%",
Setting.Property.NodeScope);
/**