Merge remote-tracking branch 'dakrone/validate-plugin-checksum'

This commit is contained in:
Lee Hinman 2015-08-14 14:06:37 -06:00
commit cd03e61f4b
4 changed files with 198 additions and 10 deletions

View File

@ -21,20 +21,25 @@ package org.elasticsearch.common.http.client;
import com.google.common.base.Charsets;
import com.google.common.base.Strings;
import com.google.common.hash.Hashing;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.ElasticsearchTimeoutException;
import org.elasticsearch.Version;
import org.elasticsearch.*;
import org.elasticsearch.common.Base64;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.ByteArray;
import java.io.*;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLConnection;
import java.nio.file.Files;
import java.nio.file.NoSuchFileException;
import java.nio.file.Path;
import java.nio.file.attribute.FileTime;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.Callable;
/**
*
@ -83,6 +88,81 @@ public class HttpDownloadHelper {
return getThread.wasSuccessful();
}
public interface Checksummer {
/** Return the hex string for the given byte array */
String checksum(byte[] filebytes);
/** Human-readable name for the checksum format */
String name();
}
/** Checksummer for SHA1 */
public static Checksummer SHA1_CHECKSUM = new Checksummer() {
@Override
public String checksum(byte[] filebytes) {
return Hashing.sha1().hashBytes(filebytes).toString();
}
@Override
public String name() {
return "SHA1";
}
};
/** Checksummer for MD5 */
public static Checksummer MD5_CHECKSUM = new Checksummer() {
@Override
public String checksum(byte[] filebytes) {
return Hashing.md5().hashBytes(filebytes).toString();
}
@Override
public String name() {
return "MD5";
}
};
/**
* Download the given checksum URL to the destination and check the checksum
* @param checksumURL URL for the checksum file
* @param originalFile original file to calculate checksum of
* @param checksumFile destination to download the checksum file to
* @param hashFunc class used to calculate the checksum of the file
* @return true if the checksum was validated, false if it did not exist
* @throws Exception if the checksum failed to match
*/
public boolean downloadAndVerifyChecksum(URL checksumURL, Path originalFile, Path checksumFile,
@Nullable DownloadProgress progress,
TimeValue timeout, Checksummer hashFunc) throws Exception {
try {
if (download(checksumURL, checksumFile, progress, timeout)) {
byte[] fileBytes = Files.readAllBytes(originalFile);
List<String> checksumLines = Files.readAllLines(checksumFile);
if (checksumLines.size() != 1) {
throw new ElasticsearchCorruptionException("invalid format for checksum file (" +
hashFunc.name() + "), expected 1 line, got: " + checksumLines.size());
}
String checksumHex = checksumLines.get(0);
String fileHex = hashFunc.checksum(fileBytes);
if (fileHex.equals(checksumHex) == false) {
throw new ElasticsearchCorruptionException("incorrect hash (" + hashFunc.name() +
"), file hash: [" + fileHex + "], expected: [" + checksumHex + "]");
}
return true;
}
} catch (FileNotFoundException | NoSuchFileException e) {
// checksum file doesn't exist
return false;
} catch (IOException e) {
if (ExceptionsHelper.unwrapCause(e) instanceof FileNotFoundException) {
// checksum file didn't exist
return false;
}
throw e;
} finally {
IOUtils.deleteFilesIgnoringExceptions(checksumFile);
}
return false;
}
/**
* Interface implemented for reporting

View File

@ -24,11 +24,13 @@ import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterators;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.Build;
import org.elasticsearch.ElasticsearchCorruptionException;
import org.elasticsearch.ElasticsearchTimeoutException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.Version;
import org.elasticsearch.bootstrap.JarHell;
import org.elasticsearch.common.cli.Terminal;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.http.client.HttpDownloadHelper;
import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.unit.TimeValue;
@ -125,6 +127,7 @@ public class PluginManager {
HttpDownloadHelper downloadHelper = new HttpDownloadHelper();
boolean downloaded = false;
boolean verified = false;
HttpDownloadHelper.DownloadProgress progress;
if (outputMode == OutputMode.SILENT) {
progress = new HttpDownloadHelper.NullProgress();
@ -145,7 +148,14 @@ public class PluginManager {
try {
downloadHelper.download(pluginUrl, pluginFile, progress, this.timeout);
downloaded = true;
} catch (ElasticsearchTimeoutException e) {
terminal.println("Verifying %s checksums if available ...", pluginUrl.toExternalForm());
Tuple<URL, Path> sha1Info = pluginHandle.newChecksumUrlAndFile(environment, pluginUrl, "sha1");
verified = downloadHelper.downloadAndVerifyChecksum(sha1Info.v1(), pluginFile,
sha1Info.v2(), progress, this.timeout, HttpDownloadHelper.SHA1_CHECKSUM);
Tuple<URL, Path> md5Info = pluginHandle.newChecksumUrlAndFile(environment, pluginUrl, "md5");
verified = verified || downloadHelper.downloadAndVerifyChecksum(md5Info.v1(), pluginFile,
md5Info.v2(), progress, this.timeout, HttpDownloadHelper.MD5_CHECKSUM);
} catch (ElasticsearchTimeoutException | ElasticsearchCorruptionException e) {
throw e;
} catch (Exception e) {
// ignore
@ -164,8 +174,15 @@ public class PluginManager {
try {
downloadHelper.download(url, pluginFile, progress, this.timeout);
downloaded = true;
terminal.println("Verifying %s checksums if available ...", url.toExternalForm());
Tuple<URL, Path> sha1Info = pluginHandle.newChecksumUrlAndFile(environment, url, "sha1");
verified = downloadHelper.downloadAndVerifyChecksum(sha1Info.v1(), pluginFile,
sha1Info.v2(), progress, this.timeout, HttpDownloadHelper.SHA1_CHECKSUM);
Tuple<URL, Path> md5Info = pluginHandle.newChecksumUrlAndFile(environment, url, "md5");
verified = verified || downloadHelper.downloadAndVerifyChecksum(md5Info.v1(), pluginFile,
md5Info.v2(), progress, this.timeout, HttpDownloadHelper.MD5_CHECKSUM);
break;
} catch (ElasticsearchTimeoutException e) {
} catch (ElasticsearchTimeoutException | ElasticsearchCorruptionException e) {
throw e;
} catch (Exception e) {
terminal.println(VERBOSE, "Failed: %s", ExceptionsHelper.detailedMessage(e));
@ -178,6 +195,10 @@ public class PluginManager {
IOUtils.deleteFilesIgnoringExceptions(pluginFile);
throw new IOException("failed to download out of all possible locations..., use --verbose to get detailed information");
}
if (verified == false) {
terminal.println("NOTE: Unable to verify checksum for downloaded plugin (unable to find .sha1 or .md5 file to verify)");
}
return pluginFile;
}
@ -469,6 +490,11 @@ public class PluginManager {
return Files.createTempFile(env.tmpFile(), name, ".zip");
}
Tuple<URL, Path> newChecksumUrlAndFile(Environment env, URL originalUrl, String suffix) throws IOException {
URL newUrl = new URL(originalUrl.toString() + "." + suffix);
return new Tuple<>(newUrl, Files.createTempFile(env.tmpFile(), name, ".zip." + suffix));
}
Path extractedDir(Environment env) {
return env.pluginsFile().resolve(name);
}

View File

@ -19,6 +19,7 @@
package org.elasticsearch.plugins;
import com.google.common.base.Charsets;
import com.google.common.hash.Hashing;
import org.apache.http.impl.client.HttpClients;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.Version;
@ -50,7 +51,10 @@ import org.junit.Test;
import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLSocketFactory;
import java.io.BufferedWriter;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.net.InetSocketAddress;
import java.nio.charset.StandardCharsets;
import java.nio.file.FileVisitResult;
@ -73,8 +77,7 @@ import static org.elasticsearch.common.io.FileSystemUtilsTests.assertFileContent
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
import static org.elasticsearch.plugins.PluginInfoTests.writeProperties;
import static org.elasticsearch.test.ESIntegTestCase.Scope;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertDirectoryExists;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFileExists;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*;
import static org.hamcrest.Matchers.*;
import static org.jboss.netty.handler.codec.http.HttpVersion.HTTP_1_1;
@ -106,6 +109,26 @@ public class PluginManagerIT extends ESIntegTestCase {
System.clearProperty("es.default.path.home");
}
private void writeSha1(Path file, boolean corrupt) throws IOException {
String sha1Hex = Hashing.sha1().hashBytes(Files.readAllBytes(file)).toString();
try (BufferedWriter out = Files.newBufferedWriter(file.resolveSibling(file.getFileName() + ".sha1"), Charsets.UTF_8)) {
out.write(sha1Hex);
if (corrupt) {
out.write("bad");
}
}
}
private void writeMd5(Path file, boolean corrupt) throws IOException {
String md5Hex = Hashing.md5().hashBytes(Files.readAllBytes(file)).toString();
try (BufferedWriter out = Files.newBufferedWriter(file.resolveSibling(file.getFileName() + ".md5"), Charsets.UTF_8)) {
out.write(md5Hex);
if (corrupt) {
out.write("bad");
}
}
}
/** creates a plugin .zip and returns the url for testing */
private String createPlugin(final Path structure, String... properties) throws IOException {
writeProperties(structure, properties);
@ -120,9 +143,35 @@ public class PluginManagerIT extends ESIntegTestCase {
}
});
}
if (randomBoolean()) {
writeSha1(zip, false);
} else if (randomBoolean()) {
writeMd5(zip, false);
}
return zip.toUri().toURL().toString();
}
/** creates a plugin .zip and bad checksum file and returns the url for testing */
private String createPluginWithBadChecksum(final Path structure, String... properties) throws IOException {
writeProperties(structure, properties);
Path zip = createTempDir().resolve(structure.getFileName() + ".zip");
try (ZipOutputStream stream = new ZipOutputStream(Files.newOutputStream(zip))) {
Files.walkFileTree(structure, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
stream.putNextEntry(new ZipEntry(structure.relativize(file).toString()));
Files.copy(file, stream);
return FileVisitResult.CONTINUE;
}
});
}
if (randomBoolean()) {
writeSha1(zip, true);
} else {
writeMd5(zip, true);
}
return zip.toUri().toURL().toString();
}
@Test
public void testThatPluginNameMustBeSupplied() throws IOException {
Path pluginDir = createTempDir().resolve("fake-plugin");
@ -342,15 +391,30 @@ public class PluginManagerIT extends ESIntegTestCase {
Files.createDirectories(pluginDir.resolve("_site"));
Files.createFile(pluginDir.resolve("_site").resolve("somefile"));
String pluginUrl = createPlugin(pluginDir,
"description", "fake desc",
"version", "1.0",
"site", "true");
"description", "fake desc",
"version", "1.0",
"site", "true");
assertStatusOk(String.format(Locale.ROOT, "install %s --url %s --verbose", pluginName, pluginUrl));
assertThatPluginIsListed(pluginName);
// We want to check that Plugin Manager moves content to _site
assertFileExists(initialSettings.v2().pluginsFile().resolve(pluginName).resolve("_site"));
}
@Test
public void testInstallPluginWithBadChecksum() throws IOException {
String pluginName = "fake-plugin";
Path pluginDir = createTempDir().resolve(pluginName);
Files.createDirectories(pluginDir.resolve("_site"));
Files.createFile(pluginDir.resolve("_site").resolve("somefile"));
String pluginUrl = createPluginWithBadChecksum(pluginDir,
"description", "fake desc",
"version", "1.0",
"site", "true");
assertStatus(String.format(Locale.ROOT, "install %s --url %s --verbose", pluginName, pluginUrl),
ExitStatus.IO_ERROR);
assertThatPluginIsNotListed(pluginName);
assertFileNotExists(initialSettings.v2().pluginsFile().resolve(pluginName).resolve("_site"));
}
private void singlePluginInstallAndRemove(String pluginDescriptor, String pluginName, String pluginCoordinates) throws IOException {
logger.info("--> trying to download and install [{}]", pluginDescriptor);
@ -592,4 +656,11 @@ public class PluginManagerIT extends ESIntegTestCase {
String message = String.format(Locale.ROOT, "Terminal output was: %s", terminal.getTerminalOutput());
assertThat(message, terminal.getTerminalOutput(), hasItem(containsString(pluginName)));
}
private void assertThatPluginIsNotListed(String pluginName) {
terminal.getTerminalOutput().clear();
assertStatusOk("list");
String message = String.format(Locale.ROOT, "Terminal output was: %s", terminal.getTerminalOutput());
assertFalse(message, terminal.getTerminalOutput().contains(pluginName));
}
}

View File

@ -22,6 +22,7 @@ package org.elasticsearch.plugins;
import com.google.common.io.Files;
import org.elasticsearch.Build;
import org.elasticsearch.Version;
import org.elasticsearch.common.http.client.HttpDownloadHelper;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.test.ESTestCase;
@ -30,6 +31,7 @@ import org.junit.Test;
import java.io.IOException;
import java.net.URL;
import java.nio.charset.Charset;
import java.nio.file.Path;
import java.util.Iterator;
import java.util.Locale;
@ -129,4 +131,13 @@ public class PluginManagerUnitTests extends ESTestCase {
URL expected = new URL("https", "github.com", "/" + user + "/" + pluginName + "/" + "archive/master.zip");
assertThat(handle.urls().get(0), is(expected));
}
@Test
public void testDownloadHelperChecksums() throws Exception {
// Sanity check to make sure the checksum functions never change how they checksum things
assertEquals("0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33",
HttpDownloadHelper.SHA1_CHECKSUM.checksum("foo".getBytes(Charset.forName("UTF-8"))));
assertEquals("acbd18db4cc2f85cedef654fccc4a4d8",
HttpDownloadHelper.MD5_CHECKSUM.checksum("foo".getBytes(Charset.forName("UTF-8"))));
}
}