From 48ee32824f5b09d26fe3d71183c70b69b54a718e Mon Sep 17 00:00:00 2001 From: jxiang Date: Thu, 11 Apr 2013 21:19:07 +0000 Subject: [PATCH] HBASE-1936 ClassLoader that loads from hdfs; useful adding filters to classpath without having to restart services git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1467092 13f79535-47bb-0310-9956-ffa450edef68 --- .../hadoop/hbase/protobuf/ProtobufUtil.java | 18 +- .../apache/hadoop/hbase/client/TestGet.java | 55 ++++- .../org/apache/hadoop/hbase/util/Base64.java | 0 .../hadoop/hbase/util/DynamicClassLoader.java | 218 ++++++++++++++++++ .../apache/hadoop/hbase/util/TestBase64.java | 0 .../hbase/util/TestDynamicClassLoader.java | 212 +++++++++++++++++ 6 files changed, 499 insertions(+), 4 deletions(-) rename {hbase-server => hbase-common}/src/main/java/org/apache/hadoop/hbase/util/Base64.java (100%) create mode 100644 hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java rename {hbase-server => hbase-common}/src/test/java/org/apache/hadoop/hbase/util/TestBase64.java (100%) create mode 100644 hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestDynamicClassLoader.java diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java index 199ce294ff4..b58a66d628a 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java @@ -47,9 +47,11 @@ import com.google.protobuf.ServiceException; import com.google.protobuf.TextFormat; +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; @@ -119,6 +121,7 @@ import org.apache.hadoop.hbase.security.access.TablePermission; import org.apache.hadoop.hbase.security.access.UserPermission; import org.apache.hadoop.hbase.security.token.AuthenticationTokenIdentifier; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.DynamicClassLoader; import org.apache.hadoop.hbase.util.Methods; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.io.Text; @@ -138,7 +141,16 @@ public final class ProtobufUtil { private final static Map> PRIMITIVES = new HashMap>(); + /** + * Dynamic class loader to load filter/comparators + */ + private final static ClassLoader CLASS_LOADER; + static { + ClassLoader parent = ProtobufUtil.class.getClassLoader(); + Configuration conf = HBaseConfiguration.create(); + CLASS_LOADER = new DynamicClassLoader(conf, parent); + PRIMITIVES.put(Boolean.TYPE.getName(), Boolean.TYPE); PRIMITIVES.put(Byte.TYPE.getName(), Byte.TYPE); PRIMITIVES.put(Character.TYPE.getName(), Character.TYPE); @@ -1046,7 +1058,7 @@ public final class ProtobufUtil { byte [] value = proto.getSerializedComparator().toByteArray(); try { Class c = - (Class)(Class.forName(type)); + (Class)Class.forName(type, true, CLASS_LOADER); Method parseFrom = c.getMethod(funcName, byte[].class); if (parseFrom == null) { throw new IOException("Unable to locate function: " + funcName + " in type: " + type); @@ -1070,7 +1082,7 @@ public final class ProtobufUtil { String funcName = "parseFrom"; try { Class c = - (Class)Class.forName(type); + (Class)Class.forName(type, true, CLASS_LOADER); Method parseFrom = c.getMethod(funcName, byte[].class); if (parseFrom == null) { throw new IOException("Unable to locate function: " + funcName + " in type: " + type); @@ -1130,7 +1142,7 @@ public final class ProtobufUtil { String type = parameter.getName(); try { Class c = - (Class)Class.forName(type); + (Class)Class.forName(type, true, CLASS_LOADER); Constructor cn = c.getDeclaredConstructor(String.class); return cn.newInstance(desc); diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestGet.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestGet.java index 419d41dfe05..4a79105f81f 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestGet.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestGet.java @@ -19,13 +19,20 @@ package org.apache.hadoop.hbase.client; +import static org.junit.Assert.fail; + +import java.io.File; +import java.io.FileOutputStream; import java.io.IOException; import java.util.Arrays; import java.util.Set; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; +import org.apache.hadoop.hbase.util.Base64; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Assert; import org.junit.Test; @@ -35,6 +42,27 @@ import org.junit.experimental.categories.Category; @Category(SmallTests.class) public class TestGet { private static final byte [] ROW = new byte [] {'r'}; + + private static final String PB_GET = "CgNyb3ciEwoPdGVzdC5Nb2NrRmlsdGVyEgAwATgB"; + + private static final String MOCK_FILTER_JAR = + "UEsDBBQACAgIACqBiEIAAAAAAAAAAAAAAAAJAAQATUVUQS1JTkYv/soAAAMAUEsHCAAAAAACAAAA" + + "AAAAAFBLAwQUAAgICAAqgYhCAAAAAAAAAAAAAAAAFAAAAE1FVEEtSU5GL01BTklGRVNULk1G803M" + + "y0xLLS7RDUstKs7Mz7NSMNQz4OVyLkpNLElN0XWqBAmY6xnEG1gqaPgXJSbnpCo45xcV5BcllgCV" + + "a/Jy8XIBAFBLBwgxyqRbQwAAAEQAAABQSwMECgAACAAAz4CIQgAAAAAAAAAAAAAAAAUAAAB0ZXN0" + + "L1BLAwQUAAgICACPgIhCAAAAAAAAAAAAAAAAFQAAAHRlc3QvTW9ja0ZpbHRlci5jbGFzc41Qy0rD" + + "QBQ9k6RNG6N9aH2uXAhWwUC3FRdRC0J1oxSkq0k6mmjaCUkq6lfpqqLgB/hR4k1aqlQEs7j3zLnn" + + "3Ec+Pl/fATSwoUNhKCUiTqxT6d62/CARkQ6NoS6ja4uH3PWE5fGelKHlOTwW1lWmscZSmxiG/L4/" + + "8JMDBnW73mHQDmVPGFBRNJFDnga0/YE4G/YdEV1wJyBHtS1dHnR45KfvCaklnh8zVNoz+zQZiiGP" + + "YtGKZJ+htt216780BkjFoIeO/UA1BqVrM+xm2n+dQlOM43tXhIkvB7GOZYbmX0Yx1VlHIhZ0ReA/" + + "8pSYdkj3WTWxgBL1PZfDyBU0h64sfS+9d8PvODZJqSL9VEL0wyjq9LIoM8q5nREKzwQUGBTzYxJz" + + "FM0JNjFPuZhOm5gbpE5rhTewyxHKTzN+/Ye/gAqqQPmE/IukWiJOo0ot67Q1XeMFK7NtWNZGydBa" + + "hta/AFBLBwjdsJqTXwEAAF0CAABQSwECFAAUAAgICAAqgYhCAAAAAAIAAAAAAAAACQAEAAAAAAAA" + + "AAAAAAAAAAAATUVUQS1JTkYv/soAAFBLAQIUABQACAgIACqBiEIxyqRbQwAAAEQAAAAUAAAAAAAA" + + "AAAAAAAAAD0AAABNRVRBLUlORi9NQU5JRkVTVC5NRlBLAQIKAAoAAAgAAM+AiEIAAAAAAAAAAAAA" + + "AAAFAAAAAAAAAAAAAAAAAMIAAAB0ZXN0L1BLAQIUABQACAgIAI+AiELdsJqTXwEAAF0CAAAVAAAA" + + "AAAAAAAAAAAAAOUAAAB0ZXN0L01vY2tGaWx0ZXIuY2xhc3NQSwUGAAAAAAQABADzAAAAhwIAAAAA"; + @Test public void testAttributesSerialization() throws IOException { Get get = new Get(Bytes.toBytes("row")); @@ -107,4 +135,29 @@ public class TestGet { Set qualifiers = get.getFamilyMap().get(family); Assert.assertEquals(1, qualifiers.size()); } -} \ No newline at end of file + + @Test + public void testDynamicFilter() throws Exception { + ClientProtos.Get getProto = ClientProtos.Get.parseFrom(Base64.decode(PB_GET)); + try { + ProtobufUtil.toGet(getProto); + fail("Should not be able to load the filter class"); + } catch (IOException ioe) { + Assert.assertTrue(ioe.getCause() instanceof ClassNotFoundException); + } + + Configuration conf = HBaseConfiguration.create(); + String localPath = conf.get("hbase.local.dir") + File.separator + + "dynamic" + File.separator + "jars" + File.separator; + File jarFile = new File(localPath, "MockFilter.jar"); + jarFile.deleteOnExit(); + + FileOutputStream fos = new FileOutputStream(jarFile); + fos.write(Base64.decode(MOCK_FILTER_JAR)); + fos.close(); + + Get get = ProtobufUtil.toGet(getProto); + Assert.assertEquals("test.MockFilter", + get.getFilter().getClass().getName()); + } +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/Base64.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java similarity index 100% rename from hbase-server/src/main/java/org/apache/hadoop/hbase/util/Base64.java rename to hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java new file mode 100644 index 00000000000..01132ae8f74 --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java @@ -0,0 +1,218 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.util; + +import java.io.File; +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; +import java.net.URLClassLoader; +import java.util.HashMap; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; + +/** + * This is a class loader that can load classes dynamically from new + * jar files under a configured folder. It always uses its parent class + * loader to load a class at first. Only if its parent class loader + * can not load a class, we will try to load it using the logic here. + *

+ * We can't unload a class already loaded. So we will use the existing + * jar files we already know to load any class which can't be loaded + * using the parent class loader. If we still can't load the class from + * the existing jar files, we will check if any new jar file is added, + * if so, we will load the new jar file and try to load the class again. + * If still failed, a class not found exception will be thrown. + *

+ * Be careful in uploading new jar files and make sure all classes + * are consistent, otherwise, we may not be able to load your + * classes properly. + */ +@InterfaceAudience.Private +public class DynamicClassLoader extends URLClassLoader { + private static final Log LOG = + LogFactory.getLog(DynamicClassLoader.class); + + // Dynamic jars are put under ${hbase.local.dir}/dynamic/jars/ + private static final String DYNAMIC_JARS_DIR = File.separator + + "dynamic" + File.separator + "jars" + File.separator; + + /** + * Parent class loader used to load any class at first. + */ + private final ClassLoader parent; + + private File localDir; + + // FileSystem of the remote path, set only if remoteDir != null + private FileSystem remoteDirFs; + private Path remoteDir; + + // Last modified time of local jars + private HashMap jarModifiedTime; + + /** + * Creates a DynamicClassLoader that can load classes dynamically + * from jar files under a specific folder. + * + * @param conf the configuration for the cluster. + * @param parent the parent ClassLoader to set. + */ + public DynamicClassLoader( + final Configuration conf, final ClassLoader parent) { + super(new URL[]{}, parent); + this.parent = parent; + + jarModifiedTime = new HashMap(); + String localDirPath = conf.get("hbase.local.dir") + DYNAMIC_JARS_DIR; + localDir = new File(localDirPath); + if (!localDir.mkdirs() && !localDir.isDirectory()) { + throw new RuntimeException("Failed to create local dir " + localDir.getPath() + + ", DynamicClassLoader failed to init"); + } + + String remotePath = conf.get("hbase.dynamic.jars.dir"); + if (remotePath == null || remotePath.equals(localDirPath)) { + remoteDir = null; // ignore if it is the same as the local path + } else { + remoteDir = new Path(remotePath); + try { + remoteDirFs = remoteDir.getFileSystem(conf); + } catch (IOException ioe) { + LOG.warn("Failed to identify the fs of dir " + + remoteDir + ", ignored", ioe); + remoteDir = null; + } + } + } + + @Override + public Class loadClass(String name) + throws ClassNotFoundException { + try { + return parent.loadClass(name); + } catch (ClassNotFoundException e) { + if (LOG.isDebugEnabled()) { + LOG.debug("Class " + name + " not found - using dynamical class loader"); + } + + // Check whether the class has already been loaded: + Class clasz = findLoadedClass(name); + if (clasz != null) { + if (LOG.isDebugEnabled()) { + LOG.debug("Class " + name + " already loaded"); + } + } + else { + try { + if (LOG.isDebugEnabled()) { + LOG.debug("Finding class: " + name); + } + clasz = findClass(name); + } catch (ClassNotFoundException cnfe) { + // Load new jar files if any + if (LOG.isDebugEnabled()) { + LOG.debug("Loading new jar files, if any"); + } + loadNewJars(); + + if (LOG.isDebugEnabled()) { + LOG.debug("Finding class again: " + name); + } + clasz = findClass(name); + } + } + return clasz; + } + } + + private synchronized void loadNewJars() { + // Refresh local jar file lists + for (File file: localDir.listFiles()) { + String fileName = file.getName(); + if (jarModifiedTime.containsKey(fileName)) { + continue; + } + if (file.isFile() && fileName.endsWith(".jar")) { + jarModifiedTime.put(fileName, Long.valueOf(file.lastModified())); + try { + URL url = file.toURI().toURL(); + addURL(url); + } catch (MalformedURLException mue) { + // This should not happen, just log it + LOG.warn("Failed to load new jar " + fileName, mue); + } + } + } + + // Check remote files + FileStatus[] statuses = null; + if (remoteDir != null) { + try { + statuses = remoteDirFs.listStatus(remoteDir); + } catch (IOException ioe) { + LOG.warn("Failed to check remote dir status " + remoteDir, ioe); + } + } + if (statuses == null || statuses.length == 0) { + return; // no remote files at all + } + + for (FileStatus status: statuses) { + if (status.isDir()) continue; // No recursive lookup + Path path = status.getPath(); + String fileName = path.getName(); + if (!fileName.endsWith(".jar")) { + if (LOG.isDebugEnabled()) { + LOG.debug("Ignored non-jar file " + fileName); + } + continue; // Ignore non-jar files + } + Long cachedLastModificationTime = jarModifiedTime.get(fileName); + if (cachedLastModificationTime != null) { + long lastModified = status.getModificationTime(); + if (lastModified < cachedLastModificationTime.longValue()) { + // There could be some race, for example, someone uploads + // a new one right in the middle the old one is copied to + // local. We can check the size as well. But it is still + // not guaranteed. This should be rare. Most likely, + // we already have the latest one. + // If you are unlucky to hit this race issue, you have + // to touch the remote jar to update its last modified time + continue; + } + } + try { + // Copy it to local + File dst = new File(localDir, fileName); + remoteDirFs.copyToLocalFile(path, new Path(dst.getPath())); + jarModifiedTime.put(fileName, Long.valueOf(dst.lastModified())); + URL url = dst.toURI().toURL(); + addURL(url); + } catch (IOException ioe) { + LOG.warn("Failed to load new jar " + fileName, ioe); + } + } + } +} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestBase64.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBase64.java similarity index 100% rename from hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestBase64.java rename to hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBase64.java diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestDynamicClassLoader.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestDynamicClassLoader.java new file mode 100644 index 00000000000..f52418532d1 --- /dev/null +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestDynamicClassLoader.java @@ -0,0 +1,212 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.util; + +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import java.io.BufferedWriter; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.FileWriter; +import java.util.ArrayList; +import java.util.List; +import java.util.jar.JarEntry; +import java.util.jar.JarOutputStream; +import java.util.jar.Manifest; + +import javax.tools.JavaCompiler; +import javax.tools.JavaFileObject; +import javax.tools.StandardJavaFileManager; +import javax.tools.ToolProvider; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.HBaseCommonTestingUtility; +import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.SmallTests; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +/** + * Test TestDynamicClassLoader + */ +@Category(SmallTests.class) +public class TestDynamicClassLoader { + private static final Log LOG = LogFactory.getLog(TestDynamicClassLoader.class); + + private static final Configuration conf = HBaseConfiguration.create(); + + private static final HBaseCommonTestingUtility TEST_UTIL = new HBaseCommonTestingUtility(); + + static { + conf.set("hbase.dynamic.jars.dir", TEST_UTIL.getDataTestDir().toString()); + } + + // generate jar file + private boolean createJarArchive(File archiveFile, File[] tobeJared) { + try { + byte buffer[] = new byte[4096]; + // Open archive file + FileOutputStream stream = new FileOutputStream(archiveFile); + JarOutputStream out = new JarOutputStream(stream, new Manifest()); + + for (int i = 0; i < tobeJared.length; i++) { + if (tobeJared[i] == null || !tobeJared[i].exists() + || tobeJared[i].isDirectory()) { + continue; + } + + // Add archive entry + JarEntry jarAdd = new JarEntry(tobeJared[i].getName()); + jarAdd.setTime(tobeJared[i].lastModified()); + out.putNextEntry(jarAdd); + + // Write file to archive + FileInputStream in = new FileInputStream(tobeJared[i]); + while (true) { + int nRead = in.read(buffer, 0, buffer.length); + if (nRead <= 0) + break; + out.write(buffer, 0, nRead); + } + in.close(); + } + out.close(); + stream.close(); + LOG.info("Adding classes to jar file completed"); + return true; + } catch (Exception ex) { + LOG.error("Error: " + ex.getMessage()); + return false; + } + } + + private File buildJar( + String className, String folder) throws Exception { + String javaCode = "public class " + className + " {}"; + Path srcDir = new Path(TEST_UTIL.getDataTestDir(), "src"); + File srcDirPath = new File(srcDir.toString()); + srcDirPath.mkdirs(); + File sourceCodeFile = new File(srcDir.toString(), className + ".java"); + BufferedWriter bw = new BufferedWriter(new FileWriter(sourceCodeFile)); + bw.write(javaCode); + bw.close(); + + // compile it by JavaCompiler + JavaCompiler compiler = ToolProvider.getSystemJavaCompiler(); + ArrayList srcFileNames = new ArrayList(); + srcFileNames.add(sourceCodeFile.toString()); + StandardJavaFileManager fm = compiler.getStandardFileManager(null, null, + null); + Iterable cu = + fm.getJavaFileObjects(sourceCodeFile); + List options = new ArrayList(); + options.add("-classpath"); + // only add hbase classes to classpath. This is a little bit tricky: assume + // the classpath is {hbaseSrc}/target/classes. + String currentDir = new File(".").getAbsolutePath(); + String classpath = + currentDir + File.separator + "target"+ File.separator + "classes" + + System.getProperty("path.separator") + System.getProperty("java.class.path"); + options.add(classpath); + LOG.debug("Setting classpath to: "+classpath); + + JavaCompiler.CompilationTask task = compiler.getTask(null, fm, null, + options, null, cu); + assertTrue("Compile file " + sourceCodeFile + " failed.", task.call()); + + // build a jar file by the classes files + String jarFileName = className + ".jar"; + File jarFile = new File(folder, jarFileName); + if (!createJarArchive(jarFile, + new File[]{new File(srcDir.toString(), className + ".class")})){ + assertTrue("Build jar file failed.", false); + } + return jarFile; + } + + @Test + public void testLoadClassFromLocalPath() throws Exception { + ClassLoader parent = TestDynamicClassLoader.class.getClassLoader(); + DynamicClassLoader classLoader = new DynamicClassLoader(conf, parent); + + String className = "TestLoadClassFromLocalPath"; + try { + classLoader.loadClass(className); + fail("Should not be able to load class " + className); + } catch (ClassNotFoundException cnfe) { + // expected, move on + } + + try { + buildJar(className, localDirPath()); + classLoader.loadClass(className); + } catch (ClassNotFoundException cnfe) { + LOG.error("Should be able to load class " + className, cnfe); + fail(cnfe.getMessage()); + } finally { + deleteClass(className); + } + } + + @Test + public void testLoadClassFromAnotherPath() throws Exception { + ClassLoader parent = TestDynamicClassLoader.class.getClassLoader(); + DynamicClassLoader classLoader = new DynamicClassLoader(conf, parent); + + String className = "TestLoadClassFromAnotherPath"; + try { + classLoader.loadClass(className); + fail("Should not be able to load class " + className); + } catch (ClassNotFoundException cnfe) { + // expected, move on + } + + try { + buildJar(className, TEST_UTIL.getDataTestDir().toString()); + classLoader.loadClass(className); + } catch (ClassNotFoundException cnfe) { + LOG.error("Should be able to load class " + className, cnfe); + fail(cnfe.getMessage()); + } finally { + deleteClass(className); + } + } + + private String localDirPath() { + return conf.get("hbase.local.dir") + File.separator + + "dynamic" + File.separator + "jars" + File.separator; + } + + private void deleteClass(String className) throws Exception { + String jarFileName = className + ".jar"; + File file = new File(TEST_UTIL.getDataTestDir().toString(), jarFileName); + file.deleteOnExit(); + + file = new File(conf.get("hbase.dynamic.jars.dir"), jarFileName); + file.deleteOnExit(); + + file = new File(localDirPath(), jarFileName); + file.deleteOnExit(); + } +}