HBASE-14334 Move Memcached block cache in to it's own optional module.

This commit is contained in:
Elliott Clark 2015-08-28 16:13:36 -07:00
parent 08eabb89f6
commit 7b08f4c8be
7 changed files with 409 additions and 7 deletions

View File

@ -185,6 +185,11 @@
<artifactId>hbase-rest</artifactId> <artifactId>hbase-rest</artifactId>
<version>${project.version}</version> <version>${project.version}</version>
</dependency> </dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-external-blockcache</artifactId>
<version>${project.version}</version>
</dependency>
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>hbase-testing-util</artifactId> <artifactId>hbase-testing-util</artifactId>

View File

@ -46,6 +46,7 @@
<include>org.apache.hbase:hbase-server</include> <include>org.apache.hbase:hbase-server</include>
<include>org.apache.hbase:hbase-shell</include> <include>org.apache.hbase:hbase-shell</include>
<include>org.apache.hbase:hbase-thrift</include> <include>org.apache.hbase:hbase-thrift</include>
<include>org.apache.hbase:hbase-external-blockcache</include>
</includes> </includes>
<!-- Binaries for the dependencies also go in the hbase-jars directory --> <!-- Binaries for the dependencies also go in the hbase-jars directory -->
<binaries> <binaries>

View File

@ -0,0 +1,382 @@
<?xml version="1.0"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<!--
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-->
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>hbase</artifactId>
<groupId>org.apache.hbase</groupId>
<version>2.0.0-SNAPSHOT</version>
<relativePath>..</relativePath>
</parent>
<artifactId>hbase-external-blockcache</artifactId>
<name>Apache HBase - External Block Cache</name>
<description>
HBase module that provides out of process block cache.
Currently Memcached is the reference implementation for external block cache.
External block caches allow HBase to take advantage of other more complex caches that can live
longer than the HBase regionserver process and are not necessarily tied to a single computer
life time. However external block caches add in extra operational overhead.
</description>
<build>
<resources>
<resource>
<directory>src/main/resources/</directory>
<includes>
<include>hbase-default.xml</include>
</includes>
</resource>
</resources>
<testResources>
<testResource>
<directory>src/test/resources/META-INF/</directory>
<targetPath>META-INF/</targetPath>
<includes>
<include>NOTICE</include>
</includes>
<filtering>true</filtering>
</testResource>
</testResources>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-site-plugin</artifactId>
<configuration>
<skip>true</skip>
</configuration>
</plugin>
<plugin>
<!--Make it so assembly:single does nothing in here-->
<artifactId>maven-assembly-plugin</artifactId>
<configuration>
<skipAssembly>true</skipAssembly>
</configuration>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>build-helper-maven-plugin</artifactId>
<executions>
<!-- Add the generated sources -->
<execution>
<id>versionInfo-source</id>
<phase>generate-sources</phase>
<goals>
<goal>add-source</goal>
</goals>
<configuration>
<sources>
<source>${project.build.directory}/generated-sources/java</source>
</sources>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<properties>
<property>
<name>listener</name>
<value>org.apache.hadoop.hbase.ResourceCheckerJUnitListener</value>
</property>
</properties>
</configuration>
<!-- Always skip the second part executions, since we only run
simple unit tests in this module -->
<executions>
<execution>
<id>secondPartTestsExecution</id>
<phase>test</phase>
<goals>
<goal>test</goal>
</goals>
<configuration>
<skip>true</skip>
</configuration>
</execution>
</executions>
</plugin>
<!-- Make a jar and put the sources in the jar -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<configuration>
<excludeResources>true</excludeResources>
<includes>
<include>src/main/java</include>
<include>${project.build.outputDirectory}/META-INF</include>
</includes>
</configuration>
</plugin>
</plugins>
<pluginManagement>
<plugins>
<!--This plugin's configuration is used to store Eclipse m2e settings
only. It has no influence on the Maven build itself. -->
<plugin>
<groupId>org.eclipse.m2e</groupId>
<artifactId>lifecycle-mapping</artifactId>
<version>1.0.0</version>
<configuration>
<lifecycleMappingMetadata>
<pluginExecutions>
<pluginExecution>
<pluginExecutionFilter>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<versionRange>[${maven.antrun.version}]</versionRange>
<goals>
<goal>run</goal>
</goals>
</pluginExecutionFilter>
<action>
<execute/>
</action>
</pluginExecution>
<pluginExecution>
<pluginExecutionFilter>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<versionRange>[2.8,)</versionRange>
<goals>
<goal>build-classpath</goal>
</goals>
</pluginExecutionFilter>
<action>
<ignore></ignore>
</action>
</pluginExecution>
<pluginExecution>
<pluginExecutionFilter>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<versionRange>[3.2,)</versionRange>
<goals>
<goal>compile</goal>
</goals>
</pluginExecutionFilter>
<action>
<ignore></ignore>
</action>
</pluginExecution>
</pluginExecutions>
</lifecycleMappingMetadata>
</configuration>
</plugin>
</plugins>
</pluginManagement>
</build>
<dependencies>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-server</artifactId>
</dependency>
<dependency>
<groupId>net.spy</groupId>
<artifactId>spymemcached</artifactId>
<optional>true</optional>
</dependency>
</dependencies>
<profiles>
<!-- Needs to make the profile in apache parent pom -->
<profile>
<id>apache-release</id>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-resources-plugin</artifactId>
<executions>
<execution>
<id>license-javadocs</id>
<phase>prepare-package</phase>
<goals>
<goal>copy-resources</goal>
</goals>
<configuration>
<outputDirectory>${project.build.directory}/apidocs</outputDirectory>
<resources>
<resource>
<directory>src/main/javadoc/META-INF/</directory>
<targetPath>META-INF/</targetPath>
<includes>
<include>NOTICE</include>
</includes>
<filtering>true</filtering>
</resource>
</resources>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<!-- Skip the tests in this module -->
<profile>
<id>skipCommonTests</id>
<activation>
<property>
<name>skipCommonTests</name>
</property>
</activation>
<properties>
<surefire.skipFirstPart>true</surefire.skipFirstPart>
</properties>
</profile>
<!-- profile against Hadoop 1.1.x: This is the default. It has to have the same
activation property as the parent Hadoop 1.1.x profile to make sure it gets run at
the same time. -->
<profile>
<id>hadoop-1.1</id>
<activation>
<property>
<!--Below formatting for dev-support/generate-hadoopX-poms.sh-->
<!--h1--><name>hadoop.profile</name><value>1.1</value>
</property>
</activation>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-core</artifactId>
</dependency>
</dependencies>
</profile>
<!-- profile against Hadoop 1.0.x:
mvn -Dhadoop.profile=1.0
-->
<profile>
<id>hadoop-1.0</id>
<activation>
<property>
<name>hadoop.profile</name>
<value>1.0</value>
</property>
</activation>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-core</artifactId>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
</dependency>
</dependencies>
</profile>
<!--
profile for building against Hadoop 2.0.0-alpha. Activate using:
mvn -Dhadoop.profile=2.0
-->
<profile>
<id>hadoop-2.0</id>
<activation>
<property>
<!--Below formatting for dev-support/generate-hadoopX-poms.sh-->
<!--h2--><name>!hadoop.profile</name>
</property>
</activation>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<id>create-mrapp-generated-classpath</id>
<phase>generate-test-resources</phase>
<goals>
<goal>build-classpath</goal>
</goals>
<configuration>
<!-- needed to run the unit test for DS to generate
the required classpath that is required in the env
of the launch container in the mini mr/yarn cluster
-->
<outputFile>${project.build.directory}/test-classes/mrapp-generated-classpath</outputFile>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<!--
profile for building against Hadoop 3.0.x. Activate using:
mvn -Dhadoop.profile=3.0
-->
<profile>
<id>hadoop-3.0</id>
<activation>
<property>
<name>hadoop.profile</name>
<value>3.0</value>
</property>
</activation>
<properties>
<hadoop.version>3.0-SNAPSHOT</hadoop.version>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<id>create-mrapp-generated-classpath</id>
<phase>generate-test-resources</phase>
<goals>
<goal>build-classpath</goal>
</goals>
<configuration>
<!-- needed to run the unit test for DS to generate
the required classpath that is required in the env
of the launch container in the mini mr/yarn cluster
-->
<outputFile>${project.build.directory}/test-classes/mrapp-generated-classpath</outputFile>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>

View File

@ -535,11 +535,7 @@
<groupId>io.netty</groupId> <groupId>io.netty</groupId>
<artifactId>netty-all</artifactId> <artifactId>netty-all</artifactId>
</dependency> </dependency>
<dependency>
<groupId>net.spy</groupId>
<artifactId>spymemcached</artifactId>
<optional>true</optional>
</dependency>
<!-- tracing Dependencies --> <!-- tracing Dependencies -->
<dependency> <dependency>
<groupId>org.apache.htrace</groupId> <groupId>org.apache.htrace</groupId>

View File

@ -139,9 +139,16 @@ public class CacheConfig {
* This is used for config. * This is used for config.
*/ */
private static enum ExternalBlockCaches { private static enum ExternalBlockCaches {
memcached(MemcachedBlockCache.class); memcached("org.apache.hadoop.hbase.io.hfile.MemcachedBlockCache");
// TODO(eclark): Consider more. Redis, etc. // TODO(eclark): Consider more. Redis, etc.
Class<? extends BlockCache> clazz; Class<? extends BlockCache> clazz;
ExternalBlockCaches(String clazzName) {
try {
clazz = (Class<? extends BlockCache>) Class.forName(clazzName);
} catch (ClassNotFoundException cnef) {
clazz = null;
}
}
ExternalBlockCaches(Class<? extends BlockCache> clazz) { ExternalBlockCaches(Class<? extends BlockCache> clazz) {
this.clazz = clazz; this.clazz = clazz;
} }
@ -572,7 +579,12 @@ public class CacheConfig {
try { try {
klass = ExternalBlockCaches.valueOf(c.get(EXTERNAL_BLOCKCACHE_CLASS_KEY, "memcache")).clazz; klass = ExternalBlockCaches.valueOf(c.get(EXTERNAL_BLOCKCACHE_CLASS_KEY, "memcache")).clazz;
} catch (IllegalArgumentException exception) { } catch (IllegalArgumentException exception) {
klass = c.getClass(EXTERNAL_BLOCKCACHE_CLASS_KEY, MemcachedBlockCache.class); try {
klass = c.getClass(EXTERNAL_BLOCKCACHE_CLASS_KEY, Class.forName(
"org.apache.hadoop.hbase.io.hfile.MemcachedBlockCache"));
} catch (ClassNotFoundException e) {
return null;
}
} }
// Now try and create an instance of the block cache. // Now try and create an instance of the block cache.

View File

@ -67,6 +67,7 @@
<module>hbase-annotations</module> <module>hbase-annotations</module>
<module>hbase-rest</module> <module>hbase-rest</module>
<module>hbase-checkstyle</module> <module>hbase-checkstyle</module>
<module>hbase-external-blockcache</module>
<module>hbase-shaded</module> <module>hbase-shaded</module>
<module>hbase-spark</module> <module>hbase-spark</module>
</modules> </modules>
@ -1434,6 +1435,11 @@
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<version>${project.version}</version> <version>${project.version}</version>
</dependency> </dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-external-blockcache</artifactId>
<version>${project.version}</version>
</dependency>
<dependency> <dependency>
<artifactId>hbase-it</artifactId> <artifactId>hbase-it</artifactId>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>