From 1283155ddcb153b90c8788fb38e019d984a8fe1a Mon Sep 17 00:00:00 2001 From: Michael Stack Date: Mon, 9 Apr 2012 22:27:55 +0000 Subject: [PATCH] HBASE-5748 Enable lib directory in jar file for coprocessor git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1311498 13f79535-47bb-0310-9956-ffa450edef68 --- .../hbase/coprocessor/CoprocessorHost.java | 19 ++++ .../hbase/coprocessor/TestClassLoading.java | 87 +++++++++++++++++++ 2 files changed, 106 insertions(+) diff --git a/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java b/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java index db60d051008..4f07d5af8d0 100644 --- a/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java +++ b/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java @@ -39,12 +39,16 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.SortedCopyOnWriteSet; import org.apache.hadoop.hbase.util.VersionInfo; import org.apache.hadoop.hbase.Server; +import org.apache.hadoop.io.IOUtils; import java.io.File; +import java.io.FileOutputStream; import java.io.IOException; import java.net.URL; import java.net.URLClassLoader; import java.util.*; +import java.util.jar.JarEntry; +import java.util.jar.JarFile; /** * Provides the common setup framework and runtime services for coprocessor @@ -195,6 +199,21 @@ public abstract class CoprocessorHost { // method which returns URLs for as long as it is available List paths = new ArrayList(); paths.add(new File(dst.toString()).getCanonicalFile().toURL()); + + JarFile jarFile = new JarFile(dst.toString()); + Enumeration entries = jarFile.entries(); + while (entries.hasMoreElements()) { + JarEntry entry = entries.nextElement(); + if (entry.getName().matches("/lib/[^/]+\\.jar")) { + File file = new File(System.getProperty("java.io.tmpdir") + + java.io.File.separator +"." + pathPrefix + + "." + className + "." + System.currentTimeMillis() + "." + entry.getName().substring(5)); + IOUtils.copyBytes(jarFile.getInputStream(entry), new FileOutputStream(file), conf, true); + file.deleteOnExit(); + paths.add(file.toURL()); + } + } + StringTokenizer st = new StringTokenizer(cp, File.pathSeparator); while (st.hasMoreTokens()) { paths.add((new File(st.nextToken())).getCanonicalFile().toURL()); diff --git a/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java b/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java index 368a0e52498..1918b1e7e4a 100644 --- a/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java +++ b/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java @@ -382,6 +382,93 @@ public class TestClassLoading { assertFalse("Configuration key 'k4' wasn't configured", found5_k4); } + @Test + public void testClassLoadingFromLibDirInJar() throws Exception { + FileSystem fs = cluster.getFileSystem(); + + File innerJarFile1 = buildCoprocessorJar(cpName1); + File innerJarFile2 = buildCoprocessorJar(cpName2); + File outerJarFile = new File(TEST_UTIL.getDataTestDir().toString(), "outer.jar"); + + byte buffer[] = new byte[BUFFER_SIZE]; + // Open archive file + FileOutputStream stream = new FileOutputStream(outerJarFile); + JarOutputStream out = new JarOutputStream(stream, new Manifest()); + + for (File jarFile: new File[] { innerJarFile1, innerJarFile2 }) { + // Add archive entry + JarEntry jarAdd = new JarEntry("/lib/" + jarFile.getName()); + jarAdd.setTime(jarFile.lastModified()); + out.putNextEntry(jarAdd); + + // Write file to archive + FileInputStream in = new FileInputStream(jarFile); + while (true) { + int nRead = in.read(buffer, 0, buffer.length); + if (nRead <= 0) + break; + out.write(buffer, 0, nRead); + } + in.close(); + } + out.close(); + stream.close(); + LOG.info("Adding jar file to outer jar file completed"); + + // copy the jars into dfs + fs.copyFromLocalFile(new Path(outerJarFile.getPath()), + new Path(fs.getUri().toString() + Path.SEPARATOR)); + String jarFileOnHDFS = fs.getUri().toString() + Path.SEPARATOR + + outerJarFile.getName(); + assertTrue("Copy jar file to HDFS failed.", + fs.exists(new Path(jarFileOnHDFS))); + LOG.info("Copied jar file to HDFS: " + jarFileOnHDFS); + + // create a table that references the coprocessors + HTableDescriptor htd = new HTableDescriptor(tableName); + htd.addFamily(new HColumnDescriptor("test")); + // without configuration values + htd.setValue("COPROCESSOR$1", jarFileOnHDFS.toString() + "|" + cpName1 + + "|" + Coprocessor.PRIORITY_USER); + // with configuration values + htd.setValue("COPROCESSOR$2", jarFileOnHDFS.toString() + "|" + cpName2 + + "|" + Coprocessor.PRIORITY_USER + "|k1=v1,k2=v2,k3=v3"); + HBaseAdmin admin = new HBaseAdmin(this.conf); + if (admin.tableExists(tableName)) { + admin.disableTable(tableName); + admin.deleteTable(tableName); + } + admin.createTable(htd); + + // verify that the coprocessors were loaded + boolean found1 = false, found2 = false, found2_k1 = false, + found2_k2 = false, found2_k3 = false; + MiniHBaseCluster hbase = TEST_UTIL.getHBaseCluster(); + for (HRegion region: + hbase.getRegionServer(0).getOnlineRegionsLocalContext()) { + if (region.getRegionNameAsString().startsWith(tableName)) { + CoprocessorEnvironment env; + env = region.getCoprocessorHost().findCoprocessorEnvironment(cpName1); + if (env != null) { + found1 = true; + } + env = region.getCoprocessorHost().findCoprocessorEnvironment(cpName2); + if (env != null) { + found2 = true; + Configuration conf = env.getConfiguration(); + found2_k1 = conf.get("k1") != null; + found2_k2 = conf.get("k2") != null; + found2_k3 = conf.get("k3") != null; + } + } + } + assertTrue("Class " + cpName1 + " was missing on a region", found1); + assertTrue("Class " + cpName2 + " was missing on a region", found2); + assertTrue("Configuration key 'k1' was missing on a region", found2_k1); + assertTrue("Configuration key 'k2' was missing on a region", found2_k2); + assertTrue("Configuration key 'k3' was missing on a region", found2_k3); + } + @Test public void testRegionServerCoprocessorsReported() throws Exception { // HBASE 4070: Improve region server metrics to report loaded coprocessors