SOLR-9502: ResponseWriters should natively support MapSerializable

This commit is contained in:
Noble Paul 2016-09-19 17:45:17 +05:30
parent 286b35b020
commit 1e18c12c19
17 changed files with 94 additions and 65 deletions

View File

@ -169,6 +169,8 @@ Other Changes
* SOLR-9365: Reduce noise in solr logs during graceful shutdown. (Cao Manh Dat via shalin)
* SOLR-9451: Make clusterstatus command logging less verbose. (Varun Thacker)
* SOLR-9502: ResponseWriters should natively support MapSerializable (noble)
================== 6.2.1 ==================
Bug Fixes

View File

@ -22,6 +22,7 @@ import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.apache.solr.common.MapSerializable;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.params.CoreAdminParams;
import org.apache.solr.common.util.StrUtils;
@ -228,7 +229,7 @@ public class ConfigOverlay implements MapSerializable {
}
@Override
public Map<String, Object> toMap() {
public Map<String, Object> toMap(Map<String, Object> map) {
Map result = new LinkedHashMap();
result.put(ZNODEVER, znodeVersion);
result.putAll(data);

View File

@ -16,6 +16,7 @@
*/
package org.apache.solr.core;
import org.apache.solr.common.MapSerializable;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.util.DOMUtil;
import org.w3c.dom.Node;
@ -33,7 +34,7 @@ import static org.apache.solr.schema.FieldType.CLASS_NAME;
* An Object which represents a Plugin of any type
*
*/
public class PluginInfo implements MapSerializable{
public class PluginInfo implements MapSerializable {
public final String name, className, type;
public final NamedList initArgs;
public final Map<String, String> attributes;
@ -128,21 +129,23 @@ public class PluginInfo implements MapSerializable{
List<PluginInfo> l = getChildren(type);
return l.isEmpty() ? null:l.get(0);
}
public Map<String,Object> toMap(){
LinkedHashMap m = new LinkedHashMap(attributes);
public Map<String, Object> toMap(Map<String, Object> map) {
map.putAll(attributes);
Map m = map;
if(initArgs!=null ) m.putAll(initArgs.asMap(3));
if(children != null){
for (PluginInfo child : children) {
Object old = m.get(child.name);
if(old == null){
m.put(child.name, child.toMap());
m.put(child.name, child.toMap(new LinkedHashMap<>()));
} else if (old instanceof List) {
List list = (List) old;
list.add(child.toMap());
list.add(child.toMap(new LinkedHashMap<>()));
} else {
ArrayList l = new ArrayList();
l.add(old);
l.add(child.toMap());
l.add(child.toMap(new LinkedHashMap<>()));
m.put(child.name,l);
}
}

View File

@ -28,6 +28,7 @@ import java.util.Map;
import com.google.common.collect.ImmutableMap;
import org.apache.solr.cloud.ZkSolrResourceLoader;
import org.apache.solr.common.MapSerializable;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.cloud.SolrZkClient;
import org.apache.solr.common.params.MapSolrParams;
@ -124,7 +125,7 @@ public class RequestParams implements MapSerializable {
}
@Override
public Map<String, Object> toMap() {
public Map<String, Object> toMap(Map<String, Object> map) {
return getMapWithVersion(data, znodeVersion);
}
@ -140,7 +141,7 @@ public class RequestParams implements MapSerializable {
Map p = (Map) deepCopy.get(NAME);
if (p == null) deepCopy.put(NAME, p = new LinkedHashMap());
if (paramSet == null) p.remove(name);
else p.put(name, paramSet.toMap());
else p.put(name, paramSet.toMap(new LinkedHashMap<>()));
return new RequestParams(deepCopy, znodeVersion);
}
@ -225,8 +226,7 @@ public class RequestParams implements MapSerializable {
}
@Override
public Map<String, Object> toMap() {
LinkedHashMap result = new LinkedHashMap();
public Map<String, Object> toMap(Map<String, Object> result) {
result.putAll(defaults);
if (appends != null) result.put(APPENDS, appends);
if (invariants != null) result.put(INVARIANTS, invariants);

View File

@ -47,6 +47,7 @@ import org.apache.lucene.index.IndexDeletionPolicy;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.util.Version;
import org.apache.solr.cloud.ZkSolrResourceLoader;
import org.apache.solr.common.MapSerializable;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.util.IOUtils;
@ -537,8 +538,7 @@ public class SolrConfig extends Config implements MapSerializable {
}
@Override
public Map<String, Object> toMap() {
LinkedHashMap map = new LinkedHashMap();
public Map<String, Object> toMap(Map<String, Object> map) {
map.put("agentId", agentId);
map.put("serviceUrl", serviceUrl);
map.put("rootName", rootName);
@ -561,7 +561,7 @@ public class SolrConfig extends Config implements MapSerializable {
= Pattern.compile("\\bmax-age=(\\d+)");
@Override
public Map<String, Object> toMap() {
public Map<String, Object> toMap(Map<String, Object> map) {
return makeMap("never304", never304,
"etagSeed", etagSeed,
"lastModFrom", lastModFrom.name().toLowerCase(Locale.ROOT),
@ -675,7 +675,7 @@ public class SolrConfig extends Config implements MapSerializable {
@Override
public Map<String, Object> toMap() {
public Map<String, Object> toMap(Map<String, Object> map) {
LinkedHashMap result = new LinkedHashMap();
result.put("indexWriter", makeMap("closeWaitsForMerges", indexWriterCloseWaitsForMerges));
result.put("commitWithin", makeMap("softCommit", commitWithinSoftCommit));
@ -841,11 +841,10 @@ public class SolrConfig extends Config implements MapSerializable {
}
@Override
public Map<String, Object> toMap() {
LinkedHashMap result = new LinkedHashMap();
public Map<String, Object> toMap(Map<String, Object> result) {
if (getZnodeVersion() > -1) result.put(ZNODEVER, getZnodeVersion());
result.put("luceneMatchVersion", luceneMatchVersion);
result.put("updateHandler", getUpdateHandlerInfo().toMap());
result.put("updateHandler", getUpdateHandlerInfo());
Map m = new LinkedHashMap();
result.put("query", m);
m.put("useFilterForSortedQuery", useFilterForSortedQuery);
@ -853,7 +852,7 @@ public class SolrConfig extends Config implements MapSerializable {
m.put("queryResultMaxDocsCached", queryResultMaxDocsCached);
m.put("enableLazyFieldLoading", enableLazyFieldLoading);
m.put("maxBooleanClauses", booleanQueryMaxClauseCount);
if (jmxConfig != null) result.put("jmx", jmxConfig.toMap());
if (jmxConfig != null) result.put("jmx", jmxConfig);
for (SolrPluginInfo plugin : plugins) {
List<PluginInfo> infos = getPluginInfos(plugin.clazz.getName());
if (infos == null || infos.isEmpty()) continue;
@ -861,16 +860,16 @@ public class SolrConfig extends Config implements MapSerializable {
tag = tag.replace("/", "");
if (plugin.options.contains(PluginOpts.REQUIRE_NAME)) {
LinkedHashMap items = new LinkedHashMap();
for (PluginInfo info : infos) items.put(info.name, info.toMap());
for (PluginInfo info : infos) items.put(info.name, info);
for (Map.Entry e : overlay.getNamedPlugins(plugin.tag).entrySet()) items.put(e.getKey(), e.getValue());
result.put(tag, items);
} else {
if (plugin.options.contains(MULTI_OK)) {
ArrayList<Map> l = new ArrayList<>();
for (PluginInfo info : infos) l.add(info.toMap());
ArrayList<MapSerializable> l = new ArrayList<>();
for (PluginInfo info : infos) l.add(info);
result.put(tag, l);
} else {
result.put(tag, infos.get(0).toMap());
result.put(tag, infos.get(0));
}
}
@ -879,15 +878,15 @@ public class SolrConfig extends Config implements MapSerializable {
addCacheConfig(m, filterCacheConfig, queryResultCacheConfig, documentCacheConfig, fieldValueCacheConfig);
if (jmxConfig != null) result.put("jmx", jmxConfig.toMap());
if (jmxConfig != null) result.put("jmx", jmxConfig);
m = new LinkedHashMap();
result.put("requestDispatcher", m);
m.put("handleSelect", handleSelect);
if (httpCachingConfig != null) m.put("httpCaching", httpCachingConfig.toMap());
if (httpCachingConfig != null) m.put("httpCaching", httpCachingConfig);
m.put("requestParsers", makeMap("multipartUploadLimitKB", multipartUploadLimitKB,
"formUploadLimitKB", formUploadLimitKB,
"addHttpRequestToContext", addHttpRequestToContext));
if (indexConfig != null) result.put("indexConfig", indexConfig.toMap());
if (indexConfig != null) result.put("indexConfig", indexConfig);
m = new LinkedHashMap();
result.put("peerSync", m);
@ -900,7 +899,7 @@ public class SolrConfig extends Config implements MapSerializable {
private void addCacheConfig(Map queryMap, CacheConfig... cache) {
if (cache == null) return;
for (CacheConfig config : cache) if (config != null) queryMap.put(config.getNodeName(), config.toMap());
for (CacheConfig config : cache) if (config != null) queryMap.put(config.getNodeName(), config);
}

View File

@ -168,7 +168,7 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa
resp.add("config", getConfigDetails());
} else {
if (ConfigOverlay.NAME.equals(parts.get(1))) {
resp.add(ConfigOverlay.NAME, req.getCore().getSolrConfig().getOverlay().toMap());
resp.add(ConfigOverlay.NAME, req.getCore().getSolrConfig().getOverlay());
} else if (RequestParams.NAME.equals(parts.get(1))) {
if (parts.size() == 3) {
RequestParams params = req.getCore().getSolrConfig().getRequestParams();
@ -176,11 +176,11 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa
Map m = new LinkedHashMap<>();
m.put(ZNODEVER, params.getZnodeVersion());
if (p != null) {
m.put(RequestParams.NAME, makeMap(parts.get(2), p.toMap()));
m.put(RequestParams.NAME, makeMap(parts.get(2), p.toMap(new LinkedHashMap<>())));
}
resp.add(SolrQueryResponse.NAME, m);
} else {
resp.add(SolrQueryResponse.NAME, req.getCore().getSolrConfig().getRequestParams().toMap());
resp.add(SolrQueryResponse.NAME, req.getCore().getSolrConfig().getRequestParams());
}
} else {
@ -233,14 +233,14 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa
}
private Map<String, Object> getConfigDetails() {
Map<String, Object> map = req.getCore().getSolrConfig().toMap();
Map<String, Object> map = req.getCore().getSolrConfig().toMap(new LinkedHashMap<>());
Map reqHandlers = (Map) map.get(SolrRequestHandler.TYPE);
if (reqHandlers == null) map.put(SolrRequestHandler.TYPE, reqHandlers = new LinkedHashMap<>());
List<PluginInfo> plugins = req.getCore().getImplicitHandlers();
for (PluginInfo plugin : plugins) {
if (SolrRequestHandler.TYPE.equals(plugin.type)) {
if (!reqHandlers.containsKey(plugin.name)) {
reqHandlers.put(plugin.name, plugin.toMap());
reqHandlers.put(plugin.name, plugin.toMap(new LinkedHashMap<>()));
}
}
}
@ -354,7 +354,7 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa
if (ops.isEmpty()) {
ZkController.touchConfDir(zkLoader);
} else {
log.debug("persisting params data : {}", Utils.toJSONString(params.toMap()));
log.debug("persisting params data : {}", Utils.toJSONString(params.toMap(new LinkedHashMap<>())));
int latestVersion = ZkController.persistConfigResourceToZooKeeper(zkLoader,
params.getZnodeVersion(), RequestParams.RESOURCE, params.toByteArray(), true);

View File

@ -631,7 +631,7 @@ public class RealTimeGetComponent extends SearchComponent
// and would avoid mismatch if documents are being actively index especially during PeerSync
if (doFingerprint) {
IndexFingerprint fingerprint = IndexFingerprint.getFingerprint(req.getCore(), Long.MAX_VALUE);
rb.rsp.add("fingerprint", fingerprint.toObject());
rb.rsp.add("fingerprint", fingerprint);
}
try (UpdateLog.RecentUpdates recentUpdates = ulog.getRecentUpdates()) {
@ -695,7 +695,7 @@ public class RealTimeGetComponent extends SearchComponent
if (doFingerprint) {
long maxVersionForUpdate = Collections.min(versions, PeerSync.absComparator);
IndexFingerprint fingerprint = IndexFingerprint.getFingerprint(req.getCore(), Math.abs(maxVersionForUpdate));
rb.rsp.add("fingerprint", fingerprint.toObject());
rb.rsp.add("fingerprint", fingerprint);
}
List<Object> updates = new ArrayList<>(versions.size());

View File

@ -32,6 +32,7 @@ import org.apache.solr.client.solrj.io.Tuple;
import org.apache.solr.client.solrj.io.stream.TupleStream;
import org.apache.solr.client.solrj.io.stream.expr.Explanation;
import org.apache.solr.common.EnumFieldValue;
import org.apache.solr.common.MapSerializable;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.util.Base64;
@ -121,7 +122,7 @@ public abstract class TextResponseWriter {
// to get a handler might be faster (but types must be exact to do that...)
// go in order of most common to least common
if (val==null) {
if (val == null) {
writeNull(name);
} else if (val instanceof String) {
writeStr(name, val.toString(), true);
@ -186,6 +187,9 @@ public abstract class TextResponseWriter {
writeStr(name, val.toString(), true);
} else if (val instanceof WriteableValue) {
((WriteableValue)val).write(name, this);
} else if (val instanceof MapSerializable) {
//todo find a better way to reuse the map more efficiently
writeMap(name, ((MapSerializable) val).toMap(new NamedList().asShallowMap()), false, true);
} else {
// default... for debugging only
writeStr(name, val.getClass().getName() + ':' + val.toString(), true);
@ -206,7 +210,7 @@ public abstract class TextResponseWriter {
// it may need special formatting. same for double.
writeFloat(name, ((Float)val).floatValue());
} else if (val instanceof Double) {
writeDouble(name, ((Double)val).doubleValue());
writeDouble(name, ((Double) val).doubleValue());
} else if (val instanceof Short) {
writeInt(name, val.toString());
} else if (val instanceof Byte) {

View File

@ -63,7 +63,7 @@ import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.Pair;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.core.Config;
import org.apache.solr.core.MapSerializable;
import org.apache.solr.common.MapSerializable;
import org.apache.solr.core.SolrConfig;
import org.apache.solr.core.SolrResourceLoader;
import org.apache.solr.request.LocalSolrQueryRequest;
@ -1489,7 +1489,7 @@ public class IndexSchema {
@Override
public Map<String, Object> toMap() {
public Map<String, Object> toMap(Map<String, Object> map) {
return Stream.of(Handler.values())
.filter(it -> name == null || it.nameLower.equals(name))
.map(it -> new Pair<>(it.realName, it.fun.apply(this)))
@ -1507,7 +1507,7 @@ public class IndexSchema {
public Map<String, Object> getNamedPropertyValues(String name, SolrParams params) {
return new SchemaProps(name, params, this).toMap();
return new SchemaProps(name, params, this).toMap(new LinkedHashMap<>());
}

View File

@ -26,7 +26,7 @@ import java.util.Map;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.util.StrUtils;
import org.apache.solr.core.MapSerializable;
import org.apache.solr.common.MapSerializable;
import org.apache.solr.core.SolrConfig;
import org.apache.solr.core.SolrResourceLoader;
import org.apache.solr.util.DOMUtil;
@ -148,7 +148,7 @@ public class CacheConfig implements MapSerializable{
}
@Override
public Map<String, Object> toMap() {
public Map<String, Object> toMap(Map<String, Object> map) {
Map result = Collections.unmodifiableMap(args);
return result;
}

View File

@ -26,6 +26,7 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.util.Bits;
import org.apache.solr.common.MapSerializable;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.util.Hash;
import org.apache.solr.common.util.NamedList;
@ -38,7 +39,7 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** @lucene.internal */
public class IndexFingerprint {
public class IndexFingerprint implements MapSerializable {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private long maxVersionSpecified;
@ -154,11 +155,8 @@ public class IndexFingerprint {
return cmp;
}
/**
* Create a generic object suitable for serializing with ResponseWriters
*/
public Object toObject() {
Map<String,Object> map = new LinkedHashMap<>();
@Override
public Map<String, Object> toMap(Map<String, Object> map) {
map.put("maxVersionSpecified", maxVersionSpecified);
map.put("maxVersionEncountered", maxVersionEncountered);
map.put("maxInHash", maxInHash);
@ -200,6 +198,6 @@ public class IndexFingerprint {
@Override
public String toString() {
return toObject().toString();
return toMap(new LinkedHashMap<>()).toString();
}
}

View File

@ -36,7 +36,7 @@ import org.apache.lucene.util.Version;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.Utils;
import org.apache.solr.core.DirectoryFactory;
import org.apache.solr.core.MapSerializable;
import org.apache.solr.common.MapSerializable;
import org.apache.solr.core.PluginInfo;
import org.apache.solr.core.SolrConfig;
import org.apache.solr.core.SolrCore;
@ -187,7 +187,7 @@ public class SolrIndexConfig implements MapSerializable {
}
@Override
public Map<String, Object> toMap() {
public Map<String, Object> toMap(Map<String, Object> map) {
Map<String, Object> m = Utils.makeMap("useCompoundFile", effectiveUseCompoundFileSetting,
"maxBufferedDocs", maxBufferedDocs,
"maxMergeDocs", maxMergeDocs,
@ -196,13 +196,13 @@ public class SolrIndexConfig implements MapSerializable {
"writeLockTimeout", writeLockTimeout,
"lockType", lockType,
"infoStreamEnabled", infoStream != InfoStream.NO_OUTPUT);
if(mergeSchedulerInfo != null) m.put("mergeScheduler",mergeSchedulerInfo.toMap());
if(mergeSchedulerInfo != null) m.put("mergeScheduler",mergeSchedulerInfo);
if (mergePolicyInfo != null) {
m.put("mergePolicy", mergePolicyInfo.toMap());
m.put("mergePolicy", mergePolicyInfo);
} else if (mergePolicyFactoryInfo != null) {
m.put("mergePolicyFactory", mergePolicyFactoryInfo.toMap());
m.put("mergePolicyFactory", mergePolicyFactoryInfo);
}
if(mergedSegmentWarmerInfo != null) m.put("mergedSegmentWarmer",mergedSegmentWarmerInfo.toMap());
if(mergedSegmentWarmerInfo != null) m.put("mergedSegmentWarmer",mergedSegmentWarmerInfo);
return m;
}

View File

@ -19,6 +19,7 @@ package org.apache.solr.core;
import javax.xml.xpath.XPathConstants;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import org.apache.lucene.index.ConcurrentMergeScheduler;
import org.apache.lucene.index.IndexWriterConfig;
@ -147,8 +148,8 @@ public class TestConfig extends SolrTestCaseJ4 {
assertNull("non-null mergedSegmentWarmer", iwc.getMergedSegmentWarmer());
final int numDefaultsMapped = sic.toMap().size();
assertEquals("numDefaultsTested vs. numDefaultsMapped+numNullDefaults ="+sic.toMap().keySet(), numDefaultsTested, numDefaultsMapped+numNullDefaults);
final int numDefaultsMapped = sic.toMap(new LinkedHashMap<>()).size();
assertEquals("numDefaultsTested vs. numDefaultsMapped+numNullDefaults ="+sic.toMap(new LinkedHashMap<>()).keySet(), numDefaultsTested, numDefaultsMapped+numNullDefaults);
}

View File

@ -17,6 +17,7 @@
package org.apache.solr.update;
import java.nio.file.Path;
import java.util.LinkedHashMap;
import java.util.Map;
import org.apache.lucene.index.ConcurrentMergeScheduler;
@ -27,6 +28,7 @@ import org.apache.lucene.index.TieredMergePolicy;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.common.MapSerializable;
import org.apache.solr.core.DirectoryFactory;
import org.apache.solr.core.SolrConfig;
import org.apache.solr.core.TestMergePolicyConfig;
@ -152,7 +154,7 @@ public class SolrIndexConfigTest extends SolrTestCaseJ4 {
}
assertNotNull(solrIndexConfig.mergeSchedulerInfo);
Map<String, Object> m = solrIndexConfig.toMap();
Map<String, Object> m = solrIndexConfig.toMap(new LinkedHashMap<>());
int mSizeExpected = 0;
++mSizeExpected; assertTrue(m.get("useCompoundFile") instanceof Boolean);
@ -180,18 +182,18 @@ public class SolrIndexConfigTest extends SolrTestCaseJ4 {
assertFalse(Boolean.valueOf(m.get("infoStreamEnabled").toString()).booleanValue());
}
++mSizeExpected; assertTrue(m.get("mergeScheduler") instanceof Map);
++mSizeExpected; assertTrue(m.get("mergeScheduler") instanceof MapSerializable);
if (solrConfigFileName.equals(solrConfigFileNameTieredMergePolicyFactory) ||
solrConfigFileName.equals(solrConfigFileNameWarmerRandomMergePolicyFactory)) {
assertNull(m.get("mergePolicy"));
++mSizeExpected; assertTrue(m.get("mergePolicyFactory") instanceof Map);
++mSizeExpected; assertTrue(m.get("mergePolicyFactory") instanceof MapSerializable);
} else {
++mSizeExpected; assertTrue(m.get("mergePolicy") instanceof Map);
++mSizeExpected; assertTrue(m.get("mergePolicy") instanceof MapSerializable);
assertNull(m.get("mergePolicyFactory"));
}
if (solrConfigFileName.equals(solrConfigFileNameWarmerRandomMergePolicy) ||
solrConfigFileName.equals(solrConfigFileNameWarmerRandomMergePolicyFactory)) {
++mSizeExpected; assertTrue(m.get("mergedSegmentWarmer") instanceof Map);
++mSizeExpected; assertTrue(m.get("mergedSegmentWarmer") instanceof MapSerializable);
} else {
assertNull(m.get("mergedSegmentWarmer"));
}

View File

@ -14,10 +14,17 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.core;
package org.apache.solr.common;
import java.util.Map;
/**This is to facilitate just in time creation of objects before writing
* it to the response.
*/
public interface MapSerializable {
public Map<String, Object> toMap();
/**Use the passed map to minimize object creation.
* Do not keep a reference to the passed map and reuse it.
* it may be reused by the framework
*/
Map toMap(Map<String, Object> map);
}

View File

@ -33,6 +33,7 @@ import java.util.Map;
import java.util.Map.Entry;
import org.apache.solr.common.EnumFieldValue;
import org.apache.solr.common.MapSerializable;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.SolrInputDocument;
@ -336,6 +337,12 @@ public class JavaBinCodec {
writeMapEntry((Map.Entry)val);
return true;
}
if (val instanceof MapSerializable) {
//todo find a better way to reuse the map more efficiently
writeMap(((MapSerializable) val).toMap(new NamedList().asShallowMap()));
return true;
}
return false;
}

View File

@ -441,9 +441,14 @@ public class NamedList<T> implements Cloneable, Serializable, Iterable<Map.Entry
@Override
public void putAll(Map m) {
boolean isEmpty = isEmpty();
for (Object o : m.entrySet()) {
Map.Entry e = (Entry) o;
put(e.getKey() == null ? null : e.getKey().toString(), (T) e.getValue());
if (isEmpty) {// we know that there are no duplicates
add((String) e.getKey(), (T) e.getValue());
} else {
put(e.getKey() == null ? null : e.getKey().toString(), (T) e.getValue());
}
}
}