diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/index/shard/service/InternalIndexShard.java b/modules/elasticsearch/src/main/java/org/elasticsearch/index/shard/service/InternalIndexShard.java
index 44419fda5b7..7c72725f0dc 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/index/shard/service/InternalIndexShard.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/index/shard/service/InternalIndexShard.java
@@ -526,114 +526,4 @@ public class InternalIndexShard extends AbstractIndexShardComponent implements I
}
}
}
-
- // I wrote all this code, and now there is no need for it since dynamic mappings are autoamtically
- // broadcast to all the cluster when updated, so we won't be in a state when the mappings are not up to
- // date, in any case, lets leave it here for now
-
-// /**
-// * The mapping sniffer reads docs from the index and introduces them into the mapping service. This is
-// * because of dynamic fields and we want to reintroduce them.
-// *
-// *
Note, this is done on the shard level, we might have other dynamic fields in other shards, but
-// * this will be taken care off in another component.
-// */
-// private class ShardMappingSniffer implements Runnable {
-// @Override public void run() {
-// engine.refresh(new Engine.Refresh(true));
-//
-// TermEnum termEnum = null;
-// Engine.Searcher searcher = searcher();
-// try {
-// List typeNames = newArrayList();
-// termEnum = searcher.reader().terms(new Term(TypeFieldMapper.NAME, ""));
-// while (true) {
-// Term term = termEnum.term();
-// if (term == null) {
-// break;
-// }
-// if (!term.field().equals(TypeFieldMapper.NAME)) {
-// break;
-// }
-// typeNames.add(term.text());
-// termEnum.next();
-// }
-//
-// logger.debug("Sniffing mapping for [{}]", typeNames);
-//
-// for (final String type : typeNames) {
-// threadPool.execute(new Runnable() {
-// @Override public void run() {
-// Engine.Searcher searcher = searcher();
-// try {
-// Query query = new ConstantScoreQuery(filterCache.cache(new TermFilter(new Term(TypeFieldMapper.NAME, type))));
-// long typeCount = Lucene.count(searcher().searcher(), query, -1);
-//
-// int marker = (int) (typeCount / mappingSnifferDocs);
-// if (marker == 0) {
-// marker = 1;
-// }
-// final int fMarker = marker;
-// searcher.searcher().search(query, new Collector() {
-//
-// private final FieldSelector fieldSelector = new UidAndSourceFieldSelector();
-// private int counter = 0;
-// private IndexReader reader;
-//
-// @Override public void setScorer(Scorer scorer) throws IOException {
-// }
-//
-// @Override public void collect(int doc) throws IOException {
-// if (state == IndexShardState.CLOSED) {
-// throw new IOException("CLOSED");
-// }
-// if (++counter == fMarker) {
-// counter = 0;
-//
-// Document document = reader.document(doc, fieldSelector);
-// Uid uid = Uid.createUid(document.get(UidFieldMapper.NAME));
-// String source = document.get(SourceFieldMapper.NAME);
-//
-// mapperService.type(uid.type()).parse(uid.type(), uid.id(), source);
-// }
-// }
-//
-// @Override public void setNextReader(IndexReader reader, int docBase) throws IOException {
-// this.reader = reader;
-// }
-//
-// @Override public boolean acceptsDocsOutOfOrder() {
-// return true;
-// }
-// });
-// } catch (IOException e) {
-// if (e.getMessage().equals("CLOSED")) {
-// // ignore, we got closed
-// } else {
-// logger.warn("Failed to sniff mapping for type [" + type + "]", e);
-// }
-// } finally {
-// searcher.release();
-// }
-// }
-// });
-// }
-// } catch (IOException e) {
-// if (e.getMessage().equals("CLOSED")) {
-// // ignore, we got closed
-// } else {
-// logger.warn("Failed to sniff mapping", e);
-// }
-// } finally {
-// if (termEnum != null) {
-// try {
-// termEnum.close();
-// } catch (IOException e) {
-// // ignore
-// }
-// }
-// searcher.release();
-// }
-// }
-// }
}
\ No newline at end of file