remove snappy support

relates to #2459
This commit is contained in:
Shay Banon 2012-12-03 12:30:13 +01:00
parent 677e6ce4ef
commit f17ad829ac
19 changed files with 1 additions and 930 deletions

10
pom.xml
View File

@ -219,14 +219,6 @@
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.xerial.snappy</groupId>
<artifactId>snappy-java</artifactId>
<version>1.0.4.1</version>
<scope>compile</scope>
<optional>true</optional>
</dependency>
<!-- We don't use this since the publish pom is then messed up -->
<!--
<dependency>
@ -544,7 +536,7 @@
</data>
<data>
<src>${project.build.directory}/lib</src>
<includes>lucene*, log4j*, jna*, snappy-java-*, spatial4j*, jts*</includes>
<includes>lucene*, log4j*, jna*, spatial4j*, jts*</includes>
<type>directory</type>
<mapper>
<type>perm</type>

View File

@ -7,7 +7,6 @@
<include>org.apache.lucene:lucene*</include>
<include>log4j:log4j</include>
<include>net.java.dev.jna:jna</include>
<include>org.xerial.snappy:snappy-java</include>
<include>com.spatial4j:spatial4j</include>
<include>com.vividsolutions:jts</include>
</includes>

View File

@ -27,9 +27,6 @@ import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.compress.lzf.LZFCompressor;
import org.elasticsearch.common.compress.snappy.UnavailableSnappyCompressor;
import org.elasticsearch.common.compress.snappy.xerial.XerialSnappy;
import org.elasticsearch.common.compress.snappy.xerial.XerialSnappyCompressor;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.io.stream.CachedStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput;
@ -54,19 +51,8 @@ public class CompressorFactory {
static {
List<Compressor> compressorsX = Lists.newArrayList();
compressorsX.add(LZF);
boolean addedSnappy = false;
if (XerialSnappy.available) {
compressorsX.add(new XerialSnappyCompressor());
addedSnappy = true;
} else {
Loggers.getLogger(CompressorFactory.class).debug("failed to load xerial snappy-java", XerialSnappy.failure);
}
if (!addedSnappy) {
compressorsX.add(new UnavailableSnappyCompressor());
}
compressors = compressorsX.toArray(new Compressor[compressorsX.size()]);
MapBuilder<String, Compressor> compressorsByTypeX = MapBuilder.newMapBuilder();
for (Compressor compressor : compressors) {
compressorsByTypeX.put(compressor.type(), compressor);

View File

@ -1,67 +0,0 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.compress.snappy;
import org.apache.lucene.store.IndexInput;
import org.elasticsearch.common.compress.CompressedIndexInput;
import java.io.IOException;
import java.util.Arrays;
/**
*/
public abstract class SnappyCompressedIndexInput extends CompressedIndexInput<SnappyCompressorContext> {
protected int chunkSize;
protected int maxCompressedChunkLength;
protected byte[] inputBuffer;
public SnappyCompressedIndexInput(IndexInput in, SnappyCompressorContext context) throws IOException {
super(in, context);
this.uncompressed = new byte[chunkSize];
this.uncompressedLength = chunkSize;
this.inputBuffer = new byte[Math.max(chunkSize, maxCompressedChunkLength)];
}
@Override
protected void readHeader(IndexInput in) throws IOException {
byte[] header = new byte[SnappyCompressor.HEADER.length];
in.readBytes(header, 0, header.length);
if (!Arrays.equals(header, SnappyCompressor.HEADER)) {
throw new IOException("wrong snappy compressed header [" + Arrays.toString(header) + "]");
}
this.chunkSize = in.readVInt();
this.maxCompressedChunkLength = in.readVInt();
}
@Override
protected void doClose() throws IOException {
// nothing to do here
}
@Override
public IndexInput clone() {
SnappyCompressedIndexInput cloned = (SnappyCompressedIndexInput) super.clone();
cloned.inputBuffer = new byte[inputBuffer.length];
return cloned;
}
}

View File

@ -1,64 +0,0 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.compress.snappy;
import com.ning.compress.BufferRecycler;
import org.apache.lucene.store.IndexOutput;
import org.elasticsearch.common.compress.CompressedIndexOutput;
import java.io.IOException;
/**
*/
public abstract class SnappyCompressedIndexOutput extends CompressedIndexOutput<SnappyCompressorContext> {
protected final BufferRecycler recycler;
protected byte[] compressedBuffer;
public SnappyCompressedIndexOutput(IndexOutput out, SnappyCompressorContext context) throws IOException {
super(out, context);
this.recycler = BufferRecycler.instance();
this.uncompressed = this.recycler.allocOutputBuffer(context.compressChunkLength());
this.uncompressedLength = context.compressChunkLength();
this.compressedBuffer = recycler.allocEncodingBuffer(context.compressMaxCompressedChunkLength());
}
@Override
protected void writeHeader(IndexOutput out) throws IOException {
out.writeBytes(SnappyCompressor.HEADER, SnappyCompressor.HEADER.length);
out.writeVInt(context.compressChunkLength());
out.writeVInt(context.compressMaxCompressedChunkLength());
}
@Override
protected void doClose() throws IOException {
byte[] buf = uncompressed;
if (buf != null) {
uncompressed = null;
recycler.releaseOutputBuffer(buf);
}
buf = compressedBuffer;
if (buf != null) {
compressedBuffer = null;
recycler.releaseEncodeBuffer(buf);
}
}
}

View File

@ -1,72 +0,0 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.compress.snappy;
import com.ning.compress.BufferRecycler;
import org.elasticsearch.common.compress.CompressedStreamInput;
import org.elasticsearch.common.io.stream.StreamInput;
import java.io.IOException;
import java.util.Arrays;
/**
*/
public abstract class SnappyCompressedStreamInput extends CompressedStreamInput<SnappyCompressorContext> {
protected final BufferRecycler recycler;
protected int chunkSize;
protected int maxCompressedChunkLength;
protected byte[] inputBuffer;
public SnappyCompressedStreamInput(StreamInput in, SnappyCompressorContext context) throws IOException {
super(in, context);
this.recycler = BufferRecycler.instance();
this.uncompressed = recycler.allocDecodeBuffer(Math.max(chunkSize, maxCompressedChunkLength));
this.inputBuffer = recycler.allocInputBuffer(Math.max(chunkSize, maxCompressedChunkLength));
}
@Override
public void readHeader(StreamInput in) throws IOException {
byte[] header = new byte[SnappyCompressor.HEADER.length];
in.readBytes(header, 0, header.length);
if (!Arrays.equals(header, SnappyCompressor.HEADER)) {
throw new IOException("wrong snappy compressed header [" + Arrays.toString(header) + "]");
}
this.chunkSize = in.readVInt();
this.maxCompressedChunkLength = in.readVInt();
}
@Override
protected void doClose() throws IOException {
byte[] buf = uncompressed;
if (buf != null) {
uncompressed = null;
recycler.releaseDecodeBuffer(uncompressed);
}
buf = inputBuffer;
if (buf != null) {
inputBuffer = null;
recycler.releaseInputBuffer(inputBuffer);
}
}
}

View File

@ -1,64 +0,0 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.compress.snappy;
import com.ning.compress.BufferRecycler;
import org.elasticsearch.common.compress.CompressedStreamOutput;
import org.elasticsearch.common.io.stream.StreamOutput;
import java.io.IOException;
/**
*/
public abstract class SnappyCompressedStreamOutput extends CompressedStreamOutput<SnappyCompressorContext> {
protected final BufferRecycler recycler;
protected byte[] compressedBuffer;
public SnappyCompressedStreamOutput(StreamOutput out, SnappyCompressorContext context) throws IOException {
super(out, context);
this.recycler = BufferRecycler.instance();
this.uncompressed = this.recycler.allocOutputBuffer(context.compressChunkLength());
this.uncompressedLength = context.compressChunkLength();
this.compressedBuffer = recycler.allocEncodingBuffer(context.compressMaxCompressedChunkLength());
}
@Override
public void writeHeader(StreamOutput out) throws IOException {
out.writeBytes(SnappyCompressor.HEADER);
out.writeVInt(context.compressChunkLength());
out.writeVInt(context.compressMaxCompressedChunkLength());
}
@Override
protected void doClose() throws IOException {
byte[] buf = uncompressed;
if (buf != null) {
uncompressed = null;
recycler.releaseOutputBuffer(buf);
}
buf = compressedBuffer;
if (buf != null) {
compressedBuffer = null;
recycler.releaseEncodeBuffer(buf);
}
}
}

View File

@ -1,142 +0,0 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.compress.snappy;
import org.apache.lucene.store.IndexInput;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.Compressor;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.io.stream.BytesStreamInput;
import org.elasticsearch.common.io.stream.CachedStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.jboss.netty.buffer.ChannelBuffer;
import java.io.IOException;
/**
*/
public abstract class SnappyCompressor implements Compressor {
public static final byte[] HEADER = {'s', 'n', 'a', 'p', 'p', 'y', 0};
protected SnappyCompressorContext compressorContext;
// default block size (32k)
static final int DEFAULT_CHUNK_SIZE = 1 << 15;
protected SnappyCompressor() {
this.compressorContext = new SnappyCompressorContext(DEFAULT_CHUNK_SIZE, maxCompressedLength(DEFAULT_CHUNK_SIZE));
}
@Override
public void configure(Settings settings) {
int chunkLength = (int) settings.getAsBytesSize("compress.snappy.chunk_size", new ByteSizeValue(compressorContext.compressChunkLength())).bytes();
int maxCompressedChunkLength = maxCompressedLength(chunkLength);
this.compressorContext = new SnappyCompressorContext(chunkLength, maxCompressedChunkLength);
}
protected abstract int maxCompressedLength(int length);
@Override
public boolean isCompressed(byte[] data, int offset, int length) {
if (length < HEADER.length) {
return false;
}
for (int i = 0; i < HEADER.length; i++) {
if (data[offset + i] != HEADER[i]) {
return false;
}
}
return true;
}
@Override
public boolean isCompressed(BytesReference bytes) {
if (bytes.length() < HEADER.length) {
return false;
}
for (int i = 0; i < HEADER.length; i++) {
if (bytes.get(i) != HEADER[i]) {
return false;
}
}
return true;
}
@Override
public boolean isCompressed(ChannelBuffer buffer) {
if (buffer.readableBytes() < HEADER.length) {
return false;
}
int offset = buffer.readerIndex();
for (int i = 0; i < HEADER.length; i++) {
if (buffer.getByte(offset + i) != HEADER[i]) {
return false;
}
}
return true;
}
@Override
public boolean isCompressed(IndexInput in) throws IOException {
long currentPointer = in.getFilePointer();
// since we have some metdata before the first compressed header, we check on our specific header
if (in.length() - currentPointer < (HEADER.length)) {
return false;
}
for (int i = 0; i < HEADER.length; i++) {
if (in.readByte() != HEADER[i]) {
in.seek(currentPointer);
return false;
}
}
in.seek(currentPointer);
return true;
}
@Override
public byte[] compress(byte[] data, int offset, int length) throws IOException {
// this needs to be the same format as regular streams reading from it!
CachedStreamOutput.Entry entry = CachedStreamOutput.popEntry();
try {
StreamOutput compressed = entry.bytes(this);
compressed.writeBytes(data, offset, length);
compressed.close();
return entry.bytes().bytes().copyBytesArray().toBytes();
} finally {
CachedStreamOutput.pushEntry(entry);
}
}
@Override
public byte[] uncompress(byte[] data, int offset, int length) throws IOException {
StreamInput compressed = streamInput(new BytesStreamInput(data, offset, length, false));
CachedStreamOutput.Entry entry = CachedStreamOutput.popEntry();
try {
Streams.copy(compressed, entry.bytes());
return entry.bytes().bytes().copyBytesArray().toBytes();
} finally {
CachedStreamOutput.pushEntry(entry);
}
}
}

View File

@ -1,43 +0,0 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.compress.snappy;
import org.elasticsearch.common.compress.CompressorContext;
/**
*/
public class SnappyCompressorContext implements CompressorContext {
private final int compressChunkLength;
private final int compressMaxCompressedChunkLength;
public SnappyCompressorContext(int compressChunkLength, int compressMaxCompressedChunkLength) {
this.compressChunkLength = compressChunkLength;
this.compressMaxCompressedChunkLength = compressMaxCompressedChunkLength;
}
public int compressChunkLength() {
return compressChunkLength;
}
public int compressMaxCompressedChunkLength() {
return compressMaxCompressedChunkLength;
}
}

View File

@ -1,77 +0,0 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.compress.snappy;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.elasticsearch.ElasticSearchIllegalStateException;
import org.elasticsearch.common.compress.CompressedIndexInput;
import org.elasticsearch.common.compress.CompressedIndexOutput;
import org.elasticsearch.common.compress.CompressedStreamInput;
import org.elasticsearch.common.compress.CompressedStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import java.io.IOException;
/**
*/
public class UnavailableSnappyCompressor extends SnappyCompressor {
@Override
public String type() {
return "snappy";
}
@Override
protected int maxCompressedLength(int length) {
return length;
}
@Override
public byte[] uncompress(byte[] data, int offset, int length) throws IOException {
throw new ElasticSearchIllegalStateException("snappy unavailable");
}
@Override
public byte[] compress(byte[] data, int offset, int length) throws IOException {
throw new ElasticSearchIllegalStateException("snappy unavailable");
}
@Override
public CompressedStreamInput streamInput(StreamInput in) throws IOException {
throw new ElasticSearchIllegalStateException("snappy unavailable");
}
@Override
public CompressedStreamOutput streamOutput(StreamOutput out) throws IOException {
throw new ElasticSearchIllegalStateException("snappy unavailable");
}
@Override
public CompressedIndexInput indexInput(IndexInput in) throws IOException {
throw new ElasticSearchIllegalStateException("snappy unavailable");
}
@Override
public CompressedIndexOutput indexOutput(IndexOutput out) throws IOException {
throw new ElasticSearchIllegalStateException("snappy unavailable");
}
}

View File

@ -1,54 +0,0 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.compress.snappy.xerial;
import com.google.common.io.NullOutputStream;
import org.xerial.snappy.Snappy;
import java.io.PrintStream;
/**
*/
public class XerialSnappy {
public static final boolean available;
public static final Throwable failure;
static {
Throwable failureX = null;
boolean availableX;
// Yuck!, we need to do this since snappy 1.0.4.1 does e.printStackTrace
// when failing to load the snappy library, and we don't want it displayed...
PrintStream err = System.err;
try {
System.setErr(new PrintStream(new NullOutputStream()));
byte[] tests = Snappy.compress("test");
Snappy.uncompressString(tests);
availableX = true;
} catch (Throwable e) {
availableX = false;
failureX = e;
} finally {
System.setErr(err);
}
available = availableX;
failure = failureX;
}
}

View File

@ -1,49 +0,0 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.compress.snappy.xerial;
import org.apache.lucene.store.IndexInput;
import org.elasticsearch.common.compress.snappy.SnappyCompressedIndexInput;
import org.elasticsearch.common.compress.snappy.SnappyCompressorContext;
import org.xerial.snappy.Snappy;
import java.io.IOException;
/**
*/
public class XerialSnappyCompressedIndexInput extends SnappyCompressedIndexInput {
public XerialSnappyCompressedIndexInput(IndexInput in, SnappyCompressorContext context) throws IOException {
super(in, context);
}
@Override
protected int uncompress(IndexInput in, byte[] out) throws IOException {
boolean compressed = in.readByte() != 0;
int length = in.readVInt();
if (!compressed) {
in.readBytes(out, 0, length);
return length;
} else {
in.readBytes(inputBuffer, 0, length);
return Snappy.rawUncompress(inputBuffer, 0, length, out, 0);
}
}
}

View File

@ -1,51 +0,0 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.compress.snappy.xerial;
import org.apache.lucene.store.IndexOutput;
import org.elasticsearch.common.compress.snappy.SnappyCompressedIndexOutput;
import org.elasticsearch.common.compress.snappy.SnappyCompressorContext;
import org.xerial.snappy.Snappy;
import java.io.IOException;
/**
*/
public class XerialSnappyCompressedIndexOutput extends SnappyCompressedIndexOutput {
public XerialSnappyCompressedIndexOutput(IndexOutput out, SnappyCompressorContext context) throws IOException {
super(out, context);
}
@Override
protected void compress(byte[] data, int offset, int len, IndexOutput out) throws IOException {
int compressedLength = Snappy.rawCompress(data, offset, len, compressedBuffer, 0);
// use uncompressed input if less than 12.5% compression
if (compressedLength >= (len - (len / 8))) {
out.writeByte((byte) 0);
out.writeVInt(len);
out.writeBytes(data, offset, len);
} else {
out.writeByte((byte) 1);
out.writeVInt(compressedLength);
out.writeBytes(compressedBuffer, 0, compressedLength);
}
}
}

View File

@ -1,54 +0,0 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.compress.snappy.xerial;
import org.elasticsearch.common.compress.snappy.SnappyCompressedStreamInput;
import org.elasticsearch.common.compress.snappy.SnappyCompressorContext;
import org.elasticsearch.common.io.stream.StreamInput;
import org.xerial.snappy.Snappy;
import java.io.IOException;
/**
*/
public class XerialSnappyCompressedStreamInput extends SnappyCompressedStreamInput {
public XerialSnappyCompressedStreamInput(StreamInput in, SnappyCompressorContext context) throws IOException {
super(in, context);
}
@Override
protected int uncompress(StreamInput in, byte[] out) throws IOException {
int compressedByte = in.read();
// if we are on the "tip", just return 0 uncompressed data...
if (compressedByte == -1) {
return 0;
}
boolean compressed = compressedByte == 1;
int length = in.readVInt();
if (!compressed) {
in.readBytes(out, 0, length);
return length;
} else {
in.readBytes(inputBuffer, 0, length);
return Snappy.rawUncompress(inputBuffer, 0, length, out, 0);
}
}
}

View File

@ -1,51 +0,0 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.compress.snappy.xerial;
import org.elasticsearch.common.compress.snappy.SnappyCompressedStreamOutput;
import org.elasticsearch.common.compress.snappy.SnappyCompressorContext;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.xerial.snappy.Snappy;
import java.io.IOException;
/**
*/
public class XerialSnappyCompressedStreamOutput extends SnappyCompressedStreamOutput {
public XerialSnappyCompressedStreamOutput(StreamOutput out, SnappyCompressorContext context) throws IOException {
super(out, context);
}
@Override
protected void compress(byte[] data, int offset, int len, StreamOutput out) throws IOException {
int compressedLength = Snappy.rawCompress(data, offset, len, compressedBuffer, 0);
// use uncompressed input if less than 12.5% compression
if (compressedLength >= (len - (len / 8))) {
out.writeBoolean(false);
out.writeVInt(len);
out.writeBytes(data, offset, len);
} else {
out.writeBoolean(true);
out.writeVInt(compressedLength);
out.writeBytes(compressedBuffer, 0, compressedLength);
}
}
}

View File

@ -1,68 +0,0 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.compress.snappy.xerial;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.elasticsearch.common.compress.CompressedIndexInput;
import org.elasticsearch.common.compress.CompressedIndexOutput;
import org.elasticsearch.common.compress.CompressedStreamInput;
import org.elasticsearch.common.compress.CompressedStreamOutput;
import org.elasticsearch.common.compress.snappy.SnappyCompressor;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.xerial.snappy.Snappy;
import java.io.IOException;
/**
*/
public class XerialSnappyCompressor extends SnappyCompressor {
@Override
public String type() {
return "snappy";
}
@Override
protected int maxCompressedLength(int length) {
return Snappy.maxCompressedLength(length);
}
@Override
public CompressedStreamInput streamInput(StreamInput in) throws IOException {
return new XerialSnappyCompressedStreamInput(in, compressorContext);
}
@Override
public CompressedStreamOutput streamOutput(StreamOutput out) throws IOException {
return new XerialSnappyCompressedStreamOutput(out, compressorContext);
}
@Override
public CompressedIndexInput indexInput(IndexInput in) throws IOException {
return new XerialSnappyCompressedIndexInput(in, compressorContext);
}
@Override
public CompressedIndexOutput indexOutput(IndexOutput out) throws IOException {
return new XerialSnappyCompressedIndexOutput(out, compressorContext);
}
}

View File

@ -28,7 +28,6 @@ import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.store.NIOFSDirectory;
import org.elasticsearch.common.compress.CompressedDirectory;
import org.elasticsearch.common.compress.lzf.LZFCompressor;
import org.elasticsearch.common.compress.snappy.xerial.XerialSnappyCompressor;
import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.unit.ByteSizeValue;
@ -55,9 +54,6 @@ public class LuceneCompressionBenchmark {
Directory compressedLzfDir = new CompressedDirectory(new NIOFSDirectory(new File(testFile, "compressed_lzf")), new LZFCompressor(), false, "fdt", "tvf");
IndexWriter compressedLzfWriter = new IndexWriter(compressedLzfDir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
Directory compressedSnappyDir = new CompressedDirectory(new NIOFSDirectory(new File(testFile, "compressed_snappy")), new XerialSnappyCompressor(), false, "fdt", "tvf");
IndexWriter compressedSnappyWriter = new IndexWriter(compressedSnappyDir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
System.out.println("feeding data...");
TestData testData = new TestData();
while (testData.next() && testData.getTotalSize() < MAX_SIZE) {
@ -73,23 +69,18 @@ public class LuceneCompressionBenchmark {
}
uncompressedWriter.addDocument(doc);
compressedLzfWriter.addDocument(doc);
compressedSnappyWriter.addDocument(doc);
}
System.out.println("optimizing...");
uncompressedWriter.forceMerge(1);
compressedLzfWriter.forceMerge(1);
compressedSnappyWriter.forceMerge(1);
uncompressedWriter.waitForMerges();
compressedLzfWriter.waitForMerges();
compressedSnappyWriter.waitForMerges();
System.out.println("done");
uncompressedWriter.close();
compressedLzfWriter.close();
compressedSnappyWriter.close();
compressedLzfDir.close();
compressedSnappyDir.close();
uncompressedDir.close();
}

View File

@ -24,8 +24,6 @@ import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.compress.CompressorFactory;
import org.elasticsearch.common.compress.lzf.LZFCompressor;
import org.elasticsearch.common.compress.snappy.xerial.XerialSnappy;
import org.elasticsearch.common.compress.snappy.xerial.XerialSnappyCompressor;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.query.QueryBuilders;
@ -71,24 +69,6 @@ public class SearchSourceCompressTests extends AbstractNodesTests {
verifySource(null);
}
@Test
public void testSourceCompressionXerialSnappy() throws IOException {
if (XerialSnappy.available) {
CompressorFactory.setDefaultCompressor(new XerialSnappyCompressor());
verifySource(true);
verifySource(false);
verifySource(null);
}
}
@Test
public void testAll() throws IOException {
testSourceCompressionLZF();
testSourceCompressionXerialSnappy();
testSourceCompressionLZF();
testSourceCompressionXerialSnappy();
}
private void verifySource(Boolean compress) throws IOException {
try {
client.admin().indices().prepareDelete("test").execute().actionGet();

View File

@ -32,8 +32,6 @@ import org.elasticsearch.common.compress.CompressedIndexInput;
import org.elasticsearch.common.compress.CompressedIndexOutput;
import org.elasticsearch.common.compress.Compressor;
import org.elasticsearch.common.compress.lzf.LZFCompressor;
import org.elasticsearch.common.compress.snappy.xerial.XerialSnappy;
import org.elasticsearch.common.compress.snappy.xerial.XerialSnappyCompressor;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.unit.SizeValue;
import org.testng.annotations.Test;
@ -48,30 +46,11 @@ import static org.hamcrest.Matchers.equalTo;
@Test
public class CompressIndexInputOutputTests {
@Test
public void testXerialSnappy() throws Exception {
if (XerialSnappy.available) {
testCompressor(new XerialSnappyCompressor());
}
}
@Test
public void testLZF() throws Exception {
testCompressor(new LZFCompressor());
}
@Test
public void testSideAffects() throws Exception {
if (XerialSnappy.available) {
testCompressor(new XerialSnappyCompressor());
}
testCompressor(new LZFCompressor());
if (XerialSnappy.available) {
testCompressor(new XerialSnappyCompressor());
}
testCompressor(new LZFCompressor());
}
private void testCompressor(Compressor compressor) throws Exception {
empty(compressor);
simple(compressor);