From 761626a5ad89c37ee710662e43cc4f74ae334fca Mon Sep 17 00:00:00 2001 From: Viraj Jasani Date: Sun, 28 Jul 2019 07:31:45 +0530 Subject: [PATCH] HBASE-22743 : ClientUtils for Demo Client classes (#413) --- .../hadoop/hbase/rest/RESTDemoClient.java | 32 +---- .../hadoop/hbase/thrift/DemoClient.java | 89 ++----------- .../hadoop/hbase/thrift/HttpDoAsClient.java | 51 ++------ .../hadoop/hbase/thrift2/DemoClient.java | 33 +---- .../apache/hadoop/hbase/util/ClientUtils.java | 120 ++++++++++++++++++ 5 files changed, 151 insertions(+), 174 deletions(-) create mode 100644 hbase-examples/src/main/java/org/apache/hadoop/hbase/util/ClientUtils.java diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/rest/RESTDemoClient.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/rest/RESTDemoClient.java index 19fae470ce0..bf3aafa0f5f 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/rest/RESTDemoClient.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/rest/RESTDemoClient.java @@ -19,12 +19,8 @@ package org.apache.hadoop.hbase.rest; import java.security.PrivilegedExceptionAction; -import java.util.HashMap; -import java.util.Map; import javax.security.auth.Subject; -import javax.security.auth.login.AppConfigurationEntry; -import javax.security.auth.login.Configuration; import javax.security.auth.login.LoginContext; import org.apache.hadoop.hbase.Cell; @@ -37,6 +33,7 @@ import org.apache.hadoop.hbase.rest.client.Client; import org.apache.hadoop.hbase.rest.client.Cluster; import org.apache.hadoop.hbase.rest.client.RemoteHTable; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.ClientUtils; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hbase.thirdparty.com.google.common.base.Preconditions; @@ -112,32 +109,7 @@ public class RESTDemoClient { return new Subject(); } - /* - * To authenticate the demo client, kinit should be invoked ahead. Here we try to get the - * Kerberos credential from the ticket cache. - */ - LoginContext context = new LoginContext("", new Subject(), null, new Configuration() { - @Override - public AppConfigurationEntry[] getAppConfigurationEntry(String name) { - Map options = new HashMap<>(); - options.put("useKeyTab", "false"); - options.put("storeKey", "false"); - options.put("doNotPrompt", "true"); - options.put("useTicketCache", "true"); - options.put("renewTGT", "true"); - options.put("refreshKrb5Config", "true"); - options.put("isInitiator", "true"); - String ticketCache = System.getenv("KRB5CCNAME"); - if (ticketCache != null) { - options.put("ticketCache", ticketCache); - } - options.put("debug", "true"); - - return new AppConfigurationEntry[] { - new AppConfigurationEntry("com.sun.security.auth.module.Krb5LoginModule", - AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, options) }; - } - }); + LoginContext context = ClientUtils.getLoginContext(); context.login(); return context.getSubject(); } diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java index dba27191852..c72a1c48490 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java @@ -20,20 +20,13 @@ package org.apache.hadoop.hbase.thrift; import java.io.UnsupportedEncodingException; import java.nio.ByteBuffer; -import java.nio.charset.CharacterCodingException; -import java.nio.charset.Charset; -import java.nio.charset.CharsetDecoder; import java.security.PrivilegedExceptionAction; import java.text.NumberFormat; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.SortedMap; -import java.util.TreeMap; import javax.security.auth.Subject; -import javax.security.auth.login.AppConfigurationEntry; -import javax.security.auth.login.Configuration; import javax.security.auth.login.LoginContext; import javax.security.sasl.Sasl; import org.apache.hadoop.hbase.thrift.generated.AlreadyExists; @@ -42,6 +35,8 @@ import org.apache.hadoop.hbase.thrift.generated.Hbase; import org.apache.hadoop.hbase.thrift.generated.Mutation; import org.apache.hadoop.hbase.thrift.generated.TCell; import org.apache.hadoop.hbase.thrift.generated.TRowResult; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.ClientUtils; import org.apache.thrift.protocol.TBinaryProtocol; import org.apache.thrift.protocol.TProtocol; import org.apache.thrift.transport.TSaslClientTransport; @@ -57,7 +52,6 @@ public class DemoClient { static protected int port; static protected String host; - CharsetDecoder decoder = null; private static boolean secure = false; private static String serverPrincipal = "hbase"; @@ -98,16 +92,6 @@ public class DemoClient { } DemoClient() { - decoder = Charset.forName("UTF-8").newDecoder(); - } - - // Helper to translate byte[]'s to UTF8 strings - private String utf8(byte[] buf) { - try { - return decoder.decode(ByteBuffer.wrap(buf)).toString(); - } catch (CharacterCodingException e) { - return "[INVALID UTF-8]"; - } } // Helper to translate strings to UTF8 bytes @@ -148,15 +132,15 @@ public class DemoClient { System.out.println("scanning tables..."); for (ByteBuffer name : client.getTableNames()) { - System.out.println(" found: " + utf8(name.array())); + System.out.println(" found: " + ClientUtils.utf8(name.array())); - if (utf8(name.array()).equals(utf8(t))) { + if (ClientUtils.utf8(name.array()).equals(ClientUtils.utf8(t))) { if (client.isTableEnabled(name)) { - System.out.println(" disabling table: " + utf8(name.array())); + System.out.println(" disabling table: " + ClientUtils.utf8(name.array())); client.disableTable(name); } - System.out.println(" deleting table: " + utf8(name.array())); + System.out.println(" deleting table: " + ClientUtils.utf8(name.array())); client.deleteTable(name); } } @@ -174,7 +158,7 @@ public class DemoClient { col.timeToLive = Integer.MAX_VALUE; columns.add(col); - System.out.println("creating table: " + utf8(t)); + System.out.println("creating table: " + ClientUtils.utf8(t)); try { client.createTable(ByteBuffer.wrap(t), columns); @@ -182,11 +166,12 @@ public class DemoClient { System.out.println("WARN: " + ae.message); } - System.out.println("column families in " + utf8(t) + ": "); + System.out.println("column families in " + ClientUtils.utf8(t) + ": "); Map columnMap = client.getColumnDescriptors(ByteBuffer.wrap(t)); for (ColumnDescriptor col2 : columnMap.values()) { - System.out.println(" column: " + utf8(col2.name.array()) + ", maxVer: " + col2.maxVersions); + System.out.println(" column: " + ClientUtils.utf8(col2.name.array()) + ", maxVer: " + + col2.maxVersions); } Map dummyAttributes = null; @@ -360,31 +345,15 @@ public class DemoClient { StringBuilder rowStr = new StringBuilder(); for (TCell cell : versions) { - rowStr.append(utf8(cell.value.array())); + rowStr.append(ClientUtils.utf8(cell.value.array())); rowStr.append("; "); } - System.out.println("row: " + utf8(row.array()) + ", values: " + rowStr); + System.out.println("row: " + ClientUtils.utf8(row.array()) + ", values: " + rowStr); } private void printRow(TRowResult rowResult) { - // copy values into a TreeMap to get them in sorted order - TreeMap sorted = new TreeMap<>(); - - for (Map.Entry column : rowResult.columns.entrySet()) { - sorted.put(utf8(column.getKey().array()), column.getValue()); - } - - StringBuilder rowStr = new StringBuilder(); - - for (SortedMap.Entry entry : sorted.entrySet()) { - rowStr.append(entry.getKey()); - rowStr.append(" => "); - rowStr.append(utf8(entry.getValue().value.array())); - rowStr.append("; "); - } - - System.out.println("row: " + utf8(rowResult.row.array()) + ", cols: " + rowStr); + ClientUtils.printRow(rowResult); } private void printRow(List rows) { @@ -398,37 +367,7 @@ public class DemoClient { return new Subject(); } - /* - * To authenticate the DemoClient, kinit should be invoked ahead. - * Here we try to get the Kerberos credential from the ticket cache. - */ - LoginContext context = new LoginContext("", new Subject(), null, - new Configuration() { - @Override - public AppConfigurationEntry[] getAppConfigurationEntry(String name) { - Map options = new HashMap<>(); - options.put("useKeyTab", "false"); - options.put("storeKey", "false"); - options.put("doNotPrompt", "true"); - options.put("useTicketCache", "true"); - options.put("renewTGT", "true"); - options.put("refreshKrb5Config", "true"); - options.put("isInitiator", "true"); - String ticketCache = System.getenv("KRB5CCNAME"); - - if (ticketCache != null) { - options.put("ticketCache", ticketCache); - } - - options.put("debug", "true"); - - return new AppConfigurationEntry[]{ - new AppConfigurationEntry("com.sun.security.auth.module.Krb5LoginModule", - AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, - options)}; - } - }); - + LoginContext context = ClientUtils.getLoginContext(); context.login(); return context.getSubject(); } diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java index 59890ce9def..81c5d9a70db 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java @@ -21,9 +21,6 @@ package org.apache.hadoop.hbase.thrift; import java.io.File; import java.io.UnsupportedEncodingException; import java.nio.ByteBuffer; -import java.nio.charset.CharacterCodingException; -import java.nio.charset.Charset; -import java.nio.charset.CharsetDecoder; import java.security.Principal; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; @@ -33,8 +30,6 @@ import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; -import java.util.SortedMap; -import java.util.TreeMap; import javax.security.auth.Subject; import javax.security.auth.kerberos.KerberosPrincipal; import javax.security.auth.login.AppConfigurationEntry; @@ -46,6 +41,7 @@ import org.apache.hadoop.hbase.thrift.generated.Hbase; import org.apache.hadoop.hbase.thrift.generated.TCell; import org.apache.hadoop.hbase.thrift.generated.TRowResult; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.ClientUtils; import org.apache.thrift.protocol.TBinaryProtocol; import org.apache.thrift.protocol.TProtocol; import org.apache.thrift.transport.THttpClient; @@ -67,7 +63,6 @@ public class HttpDoAsClient { static protected int port; static protected String host; - CharsetDecoder decoder = null; private static boolean secure = false; static protected String doAsUser = null; static protected String principal = null; @@ -110,16 +105,6 @@ public class HttpDoAsClient { } HttpDoAsClient() { - decoder = Charset.forName("UTF-8").newDecoder(); - } - - // Helper to translate byte[]'s to UTF8 strings - private String utf8(byte[] buf) { - try { - return decoder.decode(ByteBuffer.wrap(buf)).toString(); - } catch (CharacterCodingException e) { - return "[INVALID UTF-8]"; - } } // Helper to translate strings to UTF8 bytes @@ -149,13 +134,13 @@ public class HttpDoAsClient { // System.out.println("scanning tables..."); for (ByteBuffer name : refresh(client, httpClient).getTableNames()) { - System.out.println(" found: " + utf8(name.array())); - if (utf8(name.array()).equals(utf8(t))) { + System.out.println(" found: " + ClientUtils.utf8(name.array())); + if (ClientUtils.utf8(name.array()).equals(ClientUtils.utf8(t))) { if (refresh(client, httpClient).isTableEnabled(name)) { - System.out.println(" disabling table: " + utf8(name.array())); + System.out.println(" disabling table: " + ClientUtils.utf8(name.array())); refresh(client, httpClient).disableTable(name); } - System.out.println(" deleting table: " + utf8(name.array())); + System.out.println(" deleting table: " + ClientUtils.utf8(name.array())); refresh(client, httpClient).deleteTable(name); } } @@ -175,7 +160,7 @@ public class HttpDoAsClient { col.timeToLive = Integer.MAX_VALUE; columns.add(col); - System.out.println("creating table: " + utf8(t)); + System.out.println("creating table: " + ClientUtils.utf8(t)); try { refresh(client, httpClient).createTable(ByteBuffer.wrap(t), columns); @@ -183,11 +168,12 @@ public class HttpDoAsClient { System.out.println("WARN: " + ae.message); } - System.out.println("column families in " + utf8(t) + ": "); + System.out.println("column families in " + ClientUtils.utf8(t) + ": "); Map columnMap = refresh(client, httpClient) .getColumnDescriptors(ByteBuffer.wrap(t)); for (ColumnDescriptor col2 : columnMap.values()) { - System.out.println(" column: " + utf8(col2.name.array()) + ", maxVer: " + col2.maxVersions); + System.out.println(" column: " + ClientUtils.utf8(col2.name.array()) + ", maxVer: " + + col2.maxVersions); } transport.close(); @@ -239,27 +225,14 @@ public class HttpDoAsClient { private void printVersions(ByteBuffer row, List versions) { StringBuilder rowStr = new StringBuilder(); for (TCell cell : versions) { - rowStr.append(utf8(cell.value.array())); + rowStr.append(ClientUtils.utf8(cell.value.array())); rowStr.append("; "); } - System.out.println("row: " + utf8(row.array()) + ", values: " + rowStr); + System.out.println("row: " + ClientUtils.utf8(row.array()) + ", values: " + rowStr); } private void printRow(TRowResult rowResult) { - // copy values into a TreeMap to get them in sorted order - TreeMap sorted = new TreeMap<>(); - for (Map.Entry column : rowResult.columns.entrySet()) { - sorted.put(utf8(column.getKey().array()), column.getValue()); - } - - StringBuilder rowStr = new StringBuilder(); - for (SortedMap.Entry entry : sorted.entrySet()) { - rowStr.append(entry.getKey()); - rowStr.append(" => "); - rowStr.append(utf8(entry.getValue().value.array())); - rowStr.append("; "); - } - System.out.println("row: " + utf8(rowResult.row.array()) + ", cols: " + rowStr); + ClientUtils.printRow(rowResult); } static Subject getSubject() throws Exception { diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift2/DemoClient.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift2/DemoClient.java index a7201439416..fffe682e619 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift2/DemoClient.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift2/DemoClient.java @@ -25,8 +25,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; import javax.security.auth.Subject; -import javax.security.auth.login.AppConfigurationEntry; -import javax.security.auth.login.Configuration; import javax.security.auth.login.LoginContext; import javax.security.sasl.Sasl; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -35,6 +33,8 @@ import org.apache.hadoop.hbase.thrift2.generated.TGet; import org.apache.hadoop.hbase.thrift2.generated.THBaseService; import org.apache.hadoop.hbase.thrift2.generated.TPut; import org.apache.hadoop.hbase.thrift2.generated.TResult; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.ClientUtils; import org.apache.thrift.protocol.TBinaryProtocol; import org.apache.thrift.protocol.TProtocol; import org.apache.thrift.transport.TFramedTransport; @@ -152,34 +152,7 @@ public class DemoClient { return new Subject(); } - /* - * To authenticate the DemoClient, kinit should be invoked ahead. - * Here we try to get the Kerberos credential from the ticket cache. - */ - LoginContext context = new LoginContext("", new Subject(), null, - new Configuration() { - @Override - public AppConfigurationEntry[] getAppConfigurationEntry(String name) { - Map options = new HashMap<>(); - options.put("useKeyTab", "false"); - options.put("storeKey", "false"); - options.put("doNotPrompt", "true"); - options.put("useTicketCache", "true"); - options.put("renewTGT", "true"); - options.put("refreshKrb5Config", "true"); - options.put("isInitiator", "true"); - String ticketCache = System.getenv("KRB5CCNAME"); - if (ticketCache != null) { - options.put("ticketCache", ticketCache); - } - options.put("debug", "true"); - - return new AppConfigurationEntry[]{ - new AppConfigurationEntry("com.sun.security.auth.module.Krb5LoginModule", - AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, - options)}; - } - }); + LoginContext context = ClientUtils.getLoginContext(); context.login(); return context.getSubject(); } diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/util/ClientUtils.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/util/ClientUtils.java new file mode 100644 index 00000000000..66fad57469c --- /dev/null +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/util/ClientUtils.java @@ -0,0 +1,120 @@ +/** + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +package org.apache.hadoop.hbase.util; + +import java.nio.ByteBuffer; +import java.util.HashMap; +import java.util.Map; +import java.util.SortedMap; +import java.util.TreeMap; +import javax.security.auth.Subject; +import javax.security.auth.login.AppConfigurationEntry; +import javax.security.auth.login.Configuration; +import javax.security.auth.login.LoginContext; +import javax.security.auth.login.LoginException; + +import org.apache.commons.lang3.StringUtils; +import org.apache.hadoop.hbase.thrift.generated.TCell; +import org.apache.hadoop.hbase.thrift.generated.TRowResult; +import org.apache.yetus.audience.InterfaceAudience; + +/** + * Common Utility class for clients + */ +@InterfaceAudience.Private +public final class ClientUtils { + + private ClientUtils() { + // Empty block + } + + /** + * To authenticate the demo client, kinit should be invoked ahead. Here we try to get the + * Kerberos credential from the ticket cache + * + * @return LoginContext Object + * @throws LoginException Exception thrown if unable to get LoginContext + */ + public static LoginContext getLoginContext() throws LoginException { + + return new LoginContext(StringUtils.EMPTY, new Subject(), null, new Configuration() { + @Override + public AppConfigurationEntry[] getAppConfigurationEntry(String name) { + Map options = new HashMap<>(); + options.put("useKeyTab", "false"); + options.put("storeKey", "false"); + options.put("doNotPrompt", "true"); + options.put("useTicketCache", "true"); + options.put("renewTGT", "true"); + options.put("refreshKrb5Config", "true"); + options.put("isInitiator", "true"); + String ticketCache = System.getenv("KRB5CCNAME"); + if (ticketCache != null) { + options.put("ticketCache", ticketCache); + } + options.put("debug", "true"); + + return new AppConfigurationEntry[]{new AppConfigurationEntry( + "com.sun.security.auth.module.Krb5LoginModule", + AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, options)}; + } + }); + + } + + /** + * copy values into a TreeMap to get them in sorted order and print it + * + * @param rowResult Holds row name and then a map of columns to cells + */ + public static void printRow(final TRowResult rowResult) { + + TreeMap sorted = new TreeMap<>(); + for (Map.Entry column : rowResult.columns.entrySet()) { + sorted.put(utf8(column.getKey().array()), column.getValue()); + } + + StringBuilder rowStr = new StringBuilder(); + for (SortedMap.Entry entry : sorted.entrySet()) { + rowStr.append(entry.getKey()); + rowStr.append(" => "); + rowStr.append(utf8(entry.getValue().value.array())); + rowStr.append("; "); + } + System.out.println("row: " + utf8(rowResult.row.array()) + ", cols: " + rowStr); + + } + + /** + * Helper to translate byte[]'s to UTF8 strings + * + * @param buf byte array buffer + * @return UTF8 decoded string value + */ + public static String utf8(final byte[] buf) { + try { + return Bytes.toString(buf); + } catch (IllegalArgumentException e) { + return "[INVALID UTF-8]"; + } + } + +}