HADOOP-2589 Change an classes/package name from shell to hql

git-svn-id: https://svn.apache.org/repos/asf/lucene/hadoop/trunk/src/contrib/hbase@612016 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2008-01-15 05:14:34 +00:00
parent 7a8ebbcefb
commit ff4aac71c3
45 changed files with 179 additions and 209 deletions

View File

@ -179,6 +179,8 @@ Trunk (unreleased changes)
HADOOP-2548 Make TableMap and TableReduce generic HADOOP-2548 Make TableMap and TableReduce generic
(Frederik Hedberg via Stack) (Frederik Hedberg via Stack)
HADOOP-2557 Shell count function (Edward Yoon via Stack) HADOOP-2557 Shell count function (Edward Yoon via Stack)
HADOOP-2589 Change an classes/package name from Shell to hql
(Edward Yoon via Stack)
Release 0.15.1 Release 0.15.1
Branch 0.15 Branch 0.15

View File

@ -66,12 +66,12 @@
<target name="javacc" if="javacc.home"> <target name="javacc" if="javacc.home">
<echo message="javacc.home: ${javacc.home}"/> <echo message="javacc.home: ${javacc.home}"/>
<property name="hbaseshell.src.dir" <property name="hql.src.dir"
value="${src.dir}/org/apache/hadoop/hbase/shell" /> value="${src.dir}/org/apache/hadoop/hbase/hql" />
<mkdir dir="${hbaseshell.src.dir}/generated" /> <mkdir dir="${hql.src.dir}/generated" />
<javacc <javacc
target="${hbaseshell.src.dir}/HBaseShell.jj" target="${hql.src.dir}/HQLParser.jj"
outputdirectory="${hbaseshell.src.dir}/generated" outputdirectory="${hql.src.dir}/generated"
javacchome="${javacc.home}" javacchome="${javacc.home}"
/> />
</target> </target>

View File

@ -229,7 +229,7 @@
</property> </property>
<property> <property>
<name>hbaseshell.formatter</name> <name>hbaseshell.formatter</name>
<value>org.apache.hadoop.hbase.shell.formatter.AsciiTableFormatter</value> <value>org.apache.hadoop.hbase.hql.formatter.AsciiTableFormatter</value>
<description>TableFormatter to use outputting HQL result sets. <description>TableFormatter to use outputting HQL result sets.
</description> </description>
</property> </property>

View File

@ -25,16 +25,13 @@ import java.io.Writer;
import jline.ConsoleReader; import jline.ConsoleReader;
import org.apache.hadoop.hbase.shell.Command; import org.apache.hadoop.hbase.hql.HQLClient;
import org.apache.hadoop.hbase.shell.HelpCommand; import org.apache.hadoop.hbase.hql.HelpCommand;
import org.apache.hadoop.hbase.shell.ReturnMsg; import org.apache.hadoop.hbase.hql.ReturnMsg;
import org.apache.hadoop.hbase.shell.ShellSecurityManager; import org.apache.hadoop.hbase.hql.HQLSecurityManager;
import org.apache.hadoop.hbase.shell.TableFormatter; import org.apache.hadoop.hbase.hql.TableFormatter;
import org.apache.hadoop.hbase.shell.TableFormatterFactory; import org.apache.hadoop.hbase.hql.TableFormatterFactory;
import org.apache.hadoop.hbase.shell.formatter.HtmlTableFormatter; import org.apache.hadoop.hbase.hql.formatter.HtmlTableFormatter;
import org.apache.hadoop.hbase.shell.generated.ParseException;
import org.apache.hadoop.hbase.shell.generated.Parser;
import org.apache.hadoop.hbase.shell.generated.TokenMgrError;
/** /**
* An hbase shell. * An hbase shell.
@ -90,7 +87,7 @@ public class Shell {
HBaseConfiguration conf = new HBaseConfiguration(); HBaseConfiguration conf = new HBaseConfiguration();
ConsoleReader reader = new ConsoleReader(); ConsoleReader reader = new ConsoleReader();
System.setSecurityManager(new ShellSecurityManager()); System.setSecurityManager(new HQLSecurityManager());
reader.setBellEnabled(conf.getBoolean("hbaseshell.jline.bell.enabled", reader.setBellEnabled(conf.getBoolean("hbaseshell.jline.bell.enabled",
DEFAULT_BELL_ENABLED)); DEFAULT_BELL_ENABLED));
Writer out = new OutputStreamWriter(System.out, "UTF-8"); Writer out = new OutputStreamWriter(System.out, "UTF-8");
@ -112,22 +109,9 @@ public class Shell {
if (isEndOfCommand(extendedLine)) { if (isEndOfCommand(extendedLine)) {
queryStr.append(" " + extendedLine); queryStr.append(" " + extendedLine);
long start = System.currentTimeMillis(); long start = System.currentTimeMillis();
Parser parser = new Parser(queryStr.toString(), out, tableFormater);
ReturnMsg rs = null; HQLClient hql = new HQLClient(conf, MASTER_ADDRESS, out, tableFormater);
try { ReturnMsg rs = hql.executeQuery(queryStr.toString());
Command cmd = parser.terminatedCommand();
if (cmd != null) {
rs = cmd.execute(conf);
}
} catch (ParseException pe) {
String[] msg = pe.getMessage().split("[\n]");
System.out.println("Syntax error : Type 'help;' for usage.\nMessage : "
+ msg[0]);
} catch (TokenMgrError te) {
String[] msg = te.getMessage().split("[\n]");
System.out.println("Lexical error : Type 'help;' for usage.\nMessage : "
+ msg[0]);
}
long end = System.currentTimeMillis(); long end = System.currentTimeMillis();
if (rs != null && rs.getType() > -1) if (rs != null && rs.getType() > -1)

View File

@ -5,11 +5,11 @@ import javax.servlet.http.*;
import javax.servlet.jsp.*; import javax.servlet.jsp.*;
import java.util.*; import java.util.*;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.shell.TableFormatter; import org.apache.hadoop.hbase.hql.TableFormatter;
import org.apache.hadoop.hbase.shell.ReturnMsg; import org.apache.hadoop.hbase.hql.ReturnMsg;
import org.apache.hadoop.hbase.shell.generated.Parser; import org.apache.hadoop.hbase.hql.generated.HQLParser;
import org.apache.hadoop.hbase.shell.Command; import org.apache.hadoop.hbase.hql.Command;
import org.apache.hadoop.hbase.shell.formatter.HtmlTableFormatter; import org.apache.hadoop.hbase.hql.formatter.HtmlTableFormatter;
public final class hql_jsp extends org.apache.jasper.runtime.HttpJspBase public final class hql_jsp extends org.apache.jasper.runtime.HttpJspBase
implements org.apache.jasper.runtime.JspSourceDependent { implements org.apache.jasper.runtime.JspSourceDependent {
@ -60,7 +60,7 @@ public final class hql_jsp extends org.apache.jasper.runtime.HttpJspBase
out.write("\n <hr/>\n "); out.write("\n <hr/>\n ");
Parser parser = new Parser(query, out, new HtmlTableFormatter(out)); HQLParser parser = new HQLParser(query, out, new HtmlTableFormatter(out));
Command cmd = parser.terminatedCommand(); Command cmd = parser.terminatedCommand();
if (cmd.getCommandType() != Command.CommandType.SELECT) { if (cmd.getCommandType() != Command.CommandType.SELECT) {

View File

@ -14,10 +14,10 @@ import org.apache.hadoop.hbase.HServerInfo;
import org.apache.hadoop.hbase.HServerAddress; import org.apache.hadoop.hbase.HServerAddress;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.shell.ShowCommand; import org.apache.hadoop.hbase.hql.ShowCommand;
import org.apache.hadoop.hbase.shell.TableFormatter; import org.apache.hadoop.hbase.hql.TableFormatter;
import org.apache.hadoop.hbase.shell.ReturnMsg; import org.apache.hadoop.hbase.hql.ReturnMsg;
import org.apache.hadoop.hbase.shell.formatter.HtmlTableFormatter; import org.apache.hadoop.hbase.hql.formatter.HtmlTableFormatter;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
public final class master_jsp extends org.apache.jasper.runtime.HttpJspBase public final class master_jsp extends org.apache.jasper.runtime.HttpJspBase

View File

@ -17,7 +17,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.shell; package org.apache.hadoop.hbase.hql;
import java.io.IOException; import java.io.IOException;
import java.io.Writer; import java.io.Writer;

View File

@ -17,7 +17,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.shell; package org.apache.hadoop.hbase.hql;
import java.io.IOException; import java.io.IOException;
import java.io.Writer; import java.io.Writer;

View File

@ -17,7 +17,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.shell; package org.apache.hadoop.hbase.hql;
import java.io.IOException; import java.io.IOException;
import java.io.Writer; import java.io.Writer;

View File

@ -17,7 +17,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.shell; package org.apache.hadoop.hbase.hql;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;

View File

@ -17,7 +17,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.shell; package org.apache.hadoop.hbase.hql;
/** /**
* Parser uses command factories to create command. * Parser uses command factories to create command.

View File

@ -17,7 +17,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.shell; package org.apache.hadoop.hbase.hql;
import java.io.Writer; import java.io.Writer;
import java.util.HashMap; import java.util.HashMap;

View File

@ -17,7 +17,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.shell; package org.apache.hadoop.hbase.hql;
import java.io.IOException; import java.io.IOException;
import java.io.Writer; import java.io.Writer;

View File

@ -17,7 +17,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.shell; package org.apache.hadoop.hbase.hql;
import java.io.IOException; import java.io.IOException;
import java.io.Writer; import java.io.Writer;

View File

@ -17,7 +17,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.shell; package org.apache.hadoop.hbase.hql;
import java.io.IOException; import java.io.IOException;
import java.io.Writer; import java.io.Writer;

View File

@ -17,7 +17,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.shell; package org.apache.hadoop.hbase.hql;
import java.io.IOException; import java.io.IOException;
import java.io.Writer; import java.io.Writer;

View File

@ -17,7 +17,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.shell; package org.apache.hadoop.hbase.hql;
import java.io.IOException; import java.io.IOException;
import java.io.Writer; import java.io.Writer;

View File

@ -17,7 +17,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.shell; package org.apache.hadoop.hbase.hql;
import java.io.Writer; import java.io.Writer;

View File

@ -17,7 +17,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.shell; package org.apache.hadoop.hbase.hql;
public class ExitException extends SecurityException { public class ExitException extends SecurityException {
private static final long serialVersionUID = -8085525076856622991L; private static final long serialVersionUID = -8085525076856622991L;

View File

@ -17,7 +17,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.shell; package org.apache.hadoop.hbase.hql;
import java.io.Writer; import java.io.Writer;
import java.util.List; import java.util.List;

View File

@ -0,0 +1,62 @@
/**
* Copyright 2007 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.hql;
import java.io.Writer;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.hql.generated.HQLParser;
import org.apache.hadoop.hbase.hql.generated.ParseException;
import org.apache.hadoop.hbase.hql.generated.TokenMgrError;
public class HQLClient {
public static String MASTER_ADDRESS = null;
static HBaseConfiguration conf;
static TableFormatter tableFormater;
static Writer out;
public HQLClient(HBaseConfiguration config, String master, Writer output,
TableFormatter formatter) {
conf = config;
out = output;
tableFormater = formatter;
MASTER_ADDRESS = master;
}
public ReturnMsg executeQuery(String query) {
HQLParser parser = new HQLParser(query, out, tableFormater);
ReturnMsg rs = null;
try {
Command cmd = parser.terminatedCommand();
if (cmd != null) {
rs = cmd.execute(conf);
}
} catch (ParseException pe) {
String[] msg = pe.getMessage().split("[\n]");
rs = new ReturnMsg(-9, "Syntax error : Type 'help;' for usage.\nMessage : " + msg[0]);
} catch (TokenMgrError te) {
String[] msg = te.getMessage().split("[\n]");
rs = new ReturnMsg(-9, "Lexical error : Type 'help;' for usage.\nMessage : " + msg[0]);
}
return rs;
}
}

View File

@ -3,8 +3,8 @@ options {
IGNORE_CASE = true; IGNORE_CASE = true;
} }
PARSER_BEGIN(Parser) PARSER_BEGIN(HQLParser)
package org.apache.hadoop.hbase.shell.generated; package org.apache.hadoop.hbase.hql.generated;
/** /**
* Copyright 2007 The Apache Software Foundation * Copyright 2007 The Apache Software Foundation
@ -36,18 +36,17 @@ import java.io.Writer;
import java.net.URLEncoder; import java.net.URLEncoder;
import java.io.UnsupportedEncodingException; import java.io.UnsupportedEncodingException;
import org.apache.hadoop.hbase.shell.*; import org.apache.hadoop.hbase.hql.*;
/** /**
* Parsing command line. * Parsing command line.
*/ */
public class Parser { public class HQLParser {
private String QueryString; private String QueryString;
private TableFormatter formatter; private TableFormatter formatter;
private Writer out; private Writer out;
private String secondR;
public Parser(final String query, final Writer o, final TableFormatter f) { public HQLParser(final String query, final Writer o, final TableFormatter f) {
this((Reader)(new StringReader(query))); this((Reader)(new StringReader(query)));
this.QueryString = query; this.QueryString = query;
this.formatter = f; this.formatter = f;
@ -59,7 +58,7 @@ public class Parser {
} }
} }
PARSER_END(Parser) PARSER_END(HQLParser)
SKIP : SKIP :
{ {

View File

@ -17,7 +17,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.shell; package org.apache.hadoop.hbase.hql;
import java.io.IOException; import java.io.IOException;
import java.security.Permission; import java.security.Permission;
@ -33,7 +33,7 @@ import org.apache.hadoop.hbase.Shell;
* *
* @see ExitException * @see ExitException
*/ */
public class ShellSecurityManager extends SecurityManager { public class HQLSecurityManager extends SecurityManager {
/** /**
* Override SecurityManager#checkExit. This throws an ExitException(status) * Override SecurityManager#checkExit. This throws an ExitException(status)

View File

@ -17,7 +17,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.shell; package org.apache.hadoop.hbase.hql;
import java.io.IOException; import java.io.IOException;
import java.io.OutputStreamWriter; import java.io.OutputStreamWriter;

View File

@ -17,7 +17,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.shell; package org.apache.hadoop.hbase.hql;
import java.io.IOException; import java.io.IOException;
import java.io.Writer; import java.io.Writer;

View File

@ -17,7 +17,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.shell; package org.apache.hadoop.hbase.hql;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;

View File

@ -17,7 +17,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.shell; package org.apache.hadoop.hbase.hql;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;

View File

@ -17,7 +17,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.shell; package org.apache.hadoop.hbase.hql;
import java.io.Writer; import java.io.Writer;
import java.util.Map; import java.util.Map;

View File

@ -17,7 +17,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.shell; package org.apache.hadoop.hbase.hql;
import java.io.IOException; import java.io.IOException;
import java.io.OutputStreamWriter; import java.io.OutputStreamWriter;
@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.Shell;
import org.apache.hadoop.hbase.filter.RowFilterInterface; import org.apache.hadoop.hbase.filter.RowFilterInterface;
import org.apache.hadoop.hbase.filter.StopRowFilter; import org.apache.hadoop.hbase.filter.StopRowFilter;
import org.apache.hadoop.hbase.filter.WhileMatchRowFilter; import org.apache.hadoop.hbase.filter.WhileMatchRowFilter;
import org.apache.hadoop.hbase.shell.generated.Parser; import org.apache.hadoop.hbase.hql.generated.HQLParser;
import org.apache.hadoop.hbase.util.Writables; import org.apache.hadoop.hbase.util.Writables;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
@ -375,7 +375,7 @@ public class SelectCommand extends BasicCommand {
HBaseConfiguration c = new HBaseConfiguration(); HBaseConfiguration c = new HBaseConfiguration();
// For debugging // For debugging
TableFormatterFactory tff = new TableFormatterFactory(out, c); TableFormatterFactory tff = new TableFormatterFactory(out, c);
Parser parser = new Parser("select * from 'x' where row='x';", out, tff.get()); HQLParser parser = new HQLParser("select * from 'x' where row='x';", out, tff.get());
Command cmd = parser.terminatedCommand(); Command cmd = parser.terminatedCommand();
ReturnMsg rm = cmd.execute(c); ReturnMsg rm = cmd.execute(c);

View File

@ -17,7 +17,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.shell; package org.apache.hadoop.hbase.hql;
import java.io.IOException; import java.io.IOException;
import java.io.Writer; import java.io.Writer;

View File

@ -17,12 +17,12 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.shell; package org.apache.hadoop.hbase.hql;
import java.io.IOException; import java.io.IOException;
import java.io.Writer; import java.io.Writer;
import org.apache.hadoop.hbase.shell.formatter.AsciiTableFormatter; import org.apache.hadoop.hbase.hql.formatter.AsciiTableFormatter;
/** /**
* Interface implemented by table formatters outputting select results. * Interface implemented by table formatters outputting select results.

View File

@ -17,7 +17,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.shell; package org.apache.hadoop.hbase.hql;
import java.io.Writer; import java.io.Writer;
import java.lang.reflect.Constructor; import java.lang.reflect.Constructor;
@ -25,7 +25,7 @@ import java.lang.reflect.Constructor;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.shell.formatter.AsciiTableFormatter; import org.apache.hadoop.hbase.hql.formatter.AsciiTableFormatter;
/** /**
* Table formatter. Specify formatter by setting "hbaseshell.formatter" property * Table formatter. Specify formatter by setting "hbaseshell.formatter" property

View File

@ -17,7 +17,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.shell; package org.apache.hadoop.hbase.hql;
import java.io.IOException; import java.io.IOException;
import java.io.Writer; import java.io.Writer;

View File

@ -15,12 +15,12 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.shell.formatter; package org.apache.hadoop.hbase.hql.formatter;
import java.io.IOException; import java.io.IOException;
import java.io.Writer; import java.io.Writer;
import org.apache.hadoop.hbase.shell.TableFormatter; import org.apache.hadoop.hbase.hql.TableFormatter;
/** /**

View File

@ -15,14 +15,14 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.shell.formatter; package org.apache.hadoop.hbase.hql.formatter;
import java.io.IOException; import java.io.IOException;
import java.io.OutputStreamWriter; import java.io.OutputStreamWriter;
import java.io.UnsupportedEncodingException; import java.io.UnsupportedEncodingException;
import java.io.Writer; import java.io.Writer;
import org.apache.hadoop.hbase.shell.TableFormatter; import org.apache.hadoop.hbase.hql.TableFormatter;
import org.znerd.xmlenc.LineBreak; import org.znerd.xmlenc.LineBreak;
import org.znerd.xmlenc.XMLOutputter; import org.znerd.xmlenc.XMLOutputter;
import org.znerd.xmlenc.XMLEncoder; import org.znerd.xmlenc.XMLEncoder;

View File

@ -1,5 +1,5 @@
/* Generated By:JavaCC: Do not edit this line. Parser.java */ /* Generated By:JavaCC: Do not edit this line. HQLParser.java */
package org.apache.hadoop.hbase.shell.generated; package org.apache.hadoop.hbase.hql.generated;
/** /**
* Copyright 2007 The Apache Software Foundation * Copyright 2007 The Apache Software Foundation
@ -31,18 +31,17 @@ import java.io.Writer;
import java.net.URLEncoder; import java.net.URLEncoder;
import java.io.UnsupportedEncodingException; import java.io.UnsupportedEncodingException;
import org.apache.hadoop.hbase.shell.*; import org.apache.hadoop.hbase.hql.*;
/** /**
* Parsing command line. * Parsing command line.
*/ */
public class Parser implements ParserConstants { public class HQLParser implements HQLParserConstants {
private String QueryString; private String QueryString;
private TableFormatter formatter; private TableFormatter formatter;
private Writer out; private Writer out;
private String secondR;
public Parser(final String query, final Writer o, final TableFormatter f) { public HQLParser(final String query, final Writer o, final TableFormatter f) {
this((Reader)(new StringReader(query))); this((Reader)(new StringReader(query)));
this.QueryString = query; this.QueryString = query;
this.formatter = f; this.formatter = f;
@ -1081,6 +1080,11 @@ public class Parser implements ParserConstants {
finally { jj_save(0, xla); } finally { jj_save(0, xla); }
} }
final private boolean jj_3R_11() {
if (jj_scan_token(ID)) return true;
return false;
}
final private boolean jj_3R_10() { final private boolean jj_3R_10() {
Token xsp; Token xsp;
xsp = jj_scanpos; xsp = jj_scanpos;
@ -1107,12 +1111,7 @@ public class Parser implements ParserConstants {
return false; return false;
} }
final private boolean jj_3R_11() { public HQLParserTokenManager token_source;
if (jj_scan_token(ID)) return true;
return false;
}
public ParserTokenManager token_source;
SimpleCharStream jj_input_stream; SimpleCharStream jj_input_stream;
public Token token, jj_nt; public Token token, jj_nt;
private int jj_ntk; private int jj_ntk;
@ -1143,12 +1142,12 @@ public class Parser implements ParserConstants {
private boolean jj_rescan = false; private boolean jj_rescan = false;
private int jj_gc = 0; private int jj_gc = 0;
public Parser(java.io.InputStream stream) { public HQLParser(java.io.InputStream stream) {
this(stream, null); this(stream, null);
} }
public Parser(java.io.InputStream stream, String encoding) { public HQLParser(java.io.InputStream stream, String encoding) {
try { jj_input_stream = new SimpleCharStream(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); } try { jj_input_stream = new SimpleCharStream(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); }
token_source = new ParserTokenManager(jj_input_stream); token_source = new HQLParserTokenManager(jj_input_stream);
token = new Token(); token = new Token();
jj_ntk = -1; jj_ntk = -1;
jj_gen = 0; jj_gen = 0;
@ -1169,9 +1168,9 @@ public class Parser implements ParserConstants {
for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls(); for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls();
} }
public Parser(java.io.Reader stream) { public HQLParser(java.io.Reader stream) {
jj_input_stream = new SimpleCharStream(stream, 1, 1); jj_input_stream = new SimpleCharStream(stream, 1, 1);
token_source = new ParserTokenManager(jj_input_stream); token_source = new HQLParserTokenManager(jj_input_stream);
token = new Token(); token = new Token();
jj_ntk = -1; jj_ntk = -1;
jj_gen = 0; jj_gen = 0;
@ -1189,7 +1188,7 @@ public class Parser implements ParserConstants {
for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls(); for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls();
} }
public Parser(ParserTokenManager tm) { public HQLParser(HQLParserTokenManager tm) {
token_source = tm; token_source = tm;
token = new Token(); token = new Token();
jj_ntk = -1; jj_ntk = -1;
@ -1198,7 +1197,7 @@ public class Parser implements ParserConstants {
for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls(); for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls();
} }
public void ReInit(ParserTokenManager tm) { public void ReInit(HQLParserTokenManager tm) {
token_source = tm; token_source = tm;
token = new Token(); token = new Token();
jj_ntk = -1; jj_ntk = -1;

View File

@ -1,7 +1,7 @@
/* Generated By:JavaCC: Do not edit this line. ParserConstants.java */ /* Generated By:JavaCC: Do not edit this line. HQLParserConstants.java */
package org.apache.hadoop.hbase.shell.generated; package org.apache.hadoop.hbase.hql.generated;
public interface ParserConstants { public interface HQLParserConstants {
int EOF = 0; int EOF = 0;
int HELP = 5; int HELP = 5;

View File

@ -1,5 +1,5 @@
/* Generated By:JavaCC: Do not edit this line. ParserTokenManager.java */ /* Generated By:JavaCC: Do not edit this line. HQLParserTokenManager.java */
package org.apache.hadoop.hbase.shell.generated; package org.apache.hadoop.hbase.hql.generated;
/** /**
* Copyright 2007 The Apache Software Foundation * Copyright 2007 The Apache Software Foundation
* *
@ -29,9 +29,9 @@ import java.io.Reader;
import java.io.Writer; import java.io.Writer;
import java.net.URLEncoder; import java.net.URLEncoder;
import java.io.UnsupportedEncodingException; import java.io.UnsupportedEncodingException;
import org.apache.hadoop.hbase.shell.*; import org.apache.hadoop.hbase.hql.*;
public class ParserTokenManager implements ParserConstants public class HQLParserTokenManager implements HQLParserConstants
{ {
public java.io.PrintStream debugStream = System.out; public java.io.PrintStream debugStream = System.out;
public void setDebugStream(java.io.PrintStream ds) { debugStream = ds; } public void setDebugStream(java.io.PrintStream ds) { debugStream = ds; }
@ -1480,12 +1480,12 @@ protected SimpleCharStream input_stream;
private final int[] jjrounds = new int[32]; private final int[] jjrounds = new int[32];
private final int[] jjstateSet = new int[64]; private final int[] jjstateSet = new int[64];
protected char curChar; protected char curChar;
public ParserTokenManager(SimpleCharStream stream){ public HQLParserTokenManager(SimpleCharStream stream){
if (SimpleCharStream.staticFlag) if (SimpleCharStream.staticFlag)
throw new Error("ERROR: Cannot use a static CharStream class with a non-static lexical analyzer."); throw new Error("ERROR: Cannot use a static CharStream class with a non-static lexical analyzer.");
input_stream = stream; input_stream = stream;
} }
public ParserTokenManager(SimpleCharStream stream, int lexState){ public HQLParserTokenManager(SimpleCharStream stream, int lexState){
this(stream); this(stream);
SwitchTo(lexState); SwitchTo(lexState);
} }

View File

@ -1,24 +1,5 @@
/* Generated By:JavaCC: Do not edit this line. ParseException.java Version 3.0 */ /* Generated By:JavaCC: Do not edit this line. ParseException.java Version 3.0 */
/** package org.apache.hadoop.hbase.hql.generated;
* Copyright 2007 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.shell.generated;
/** /**
* This exception is thrown when parse errors are encountered. * This exception is thrown when parse errors are encountered.

View File

@ -1,24 +1,5 @@
/* Generated By:JavaCC: Do not edit this line. SimpleCharStream.java Version 4.0 */ /* Generated By:JavaCC: Do not edit this line. SimpleCharStream.java Version 4.0 */
/** package org.apache.hadoop.hbase.hql.generated;
* Copyright 2007 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.shell.generated;
/** /**
* An implementation of interface CharStream, where the stream is assumed to * An implementation of interface CharStream, where the stream is assumed to

View File

@ -1,24 +1,5 @@
/* Generated By:JavaCC: Do not edit this line. Token.java Version 3.0 */ /* Generated By:JavaCC: Do not edit this line. Token.java Version 3.0 */
/** package org.apache.hadoop.hbase.hql.generated;
* Copyright 2007 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.shell.generated;
/** /**
* Describes the input token stream. * Describes the input token stream.

View File

@ -1,24 +1,5 @@
/* Generated By:JavaCC: Do not edit this line. TokenMgrError.java Version 3.0 */ /* Generated By:JavaCC: Do not edit this line. TokenMgrError.java Version 3.0 */
/** package org.apache.hadoop.hbase.hql.generated;
* Copyright 2007 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.shell.generated;
public class TokenMgrError extends Error public class TokenMgrError extends Error
{ {

View File

@ -17,7 +17,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.shell; package org.apache.hadoop.hbase.hql;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
import java.io.IOException; import java.io.IOException;
@ -32,21 +32,21 @@ import org.apache.hadoop.hbase.HBaseAdmin;
import org.apache.hadoop.hbase.HBaseClusterTestCase; import org.apache.hadoop.hbase.HBaseClusterTestCase;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.shell.generated.ParseException; import org.apache.hadoop.hbase.hql.generated.ParseException;
import org.apache.hadoop.hbase.shell.generated.Parser; import org.apache.hadoop.hbase.hql.generated.HQLParser;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
/** /**
* Tests for Hbase shell * Tests for HQL
*/ */
public class TestHBaseShell extends HBaseClusterTestCase { public class TestHQL extends HBaseClusterTestCase {
protected final Log LOG = LogFactory.getLog(this.getClass().getName()); protected final Log LOG = LogFactory.getLog(this.getClass().getName());
private ByteArrayOutputStream baos; private ByteArrayOutputStream baos;
private HBaseAdmin admin; private HBaseAdmin admin;
/** constructor */ /** constructor */
public TestHBaseShell() { public TestHQL() {
super(1 /*One region server only*/); super(1 /*One region server only*/);
} }
@ -161,7 +161,7 @@ public class TestHBaseShell extends HBaseClusterTestCase {
LOG.info("Running command: " + cmdStr); LOG.info("Running command: " + cmdStr);
Writer out = new OutputStreamWriter(System.out, "UTF-8"); Writer out = new OutputStreamWriter(System.out, "UTF-8");
TableFormatterFactory tff = new TableFormatterFactory(out, this.conf); TableFormatterFactory tff = new TableFormatterFactory(out, this.conf);
Parser parser = new Parser(cmdStr, out, tff.get()); HQLParser parser = new HQLParser(cmdStr, out, tff.get());
Command cmd = parser.terminatedCommand(); Command cmd = parser.terminatedCommand();
ReturnMsg rm = cmd.execute(this.conf); ReturnMsg rm = cmd.execute(this.conf);
dumpStdout(); dumpStdout();

View File

@ -1,11 +1,11 @@
<%@ page contentType="text/html;charset=UTF-8" <%@ page contentType="text/html;charset=UTF-8"
import="java.util.*" import="java.util.*"
import="org.apache.hadoop.hbase.HBaseConfiguration" import="org.apache.hadoop.hbase.HBaseConfiguration"
import="org.apache.hadoop.hbase.shell.TableFormatter" import="org.apache.hadoop.hbase.hql.TableFormatter"
import="org.apache.hadoop.hbase.shell.ReturnMsg" import="org.apache.hadoop.hbase.hql.ReturnMsg"
import="org.apache.hadoop.hbase.shell.generated.Parser" import="org.apache.hadoop.hbase.hql.generated.Parser"
import="org.apache.hadoop.hbase.shell.Command" import="org.apache.hadoop.hbase.hql.Command"
import="org.apache.hadoop.hbase.shell.formatter.HtmlTableFormatter" import="org.apache.hadoop.hbase.hql.formatter.HtmlTableFormatter"
%><?xml version="1.0" encoding="UTF-8" ?> %><?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
@ -41,7 +41,7 @@
%> %>
<hr/> <hr/>
<% <%
Parser parser = new Parser(query, out, new HtmlTableFormatter(out)); HQLParser parser = new HQLParser(query, out, new HtmlTableFormatter(out));
Command cmd = parser.terminatedCommand(); Command cmd = parser.terminatedCommand();
if (cmd.getCommandType() != Command.CommandType.SELECT) { if (cmd.getCommandType() != Command.CommandType.SELECT) {
%> %>

View File

@ -10,10 +10,10 @@
import="org.apache.hadoop.hbase.HServerAddress" import="org.apache.hadoop.hbase.HServerAddress"
import="org.apache.hadoop.hbase.HRegionInfo" import="org.apache.hadoop.hbase.HRegionInfo"
import="org.apache.hadoop.hbase.HBaseConfiguration" import="org.apache.hadoop.hbase.HBaseConfiguration"
import="org.apache.hadoop.hbase.shell.ShowCommand" import="org.apache.hadoop.hbase.hql.ShowCommand"
import="org.apache.hadoop.hbase.shell.TableFormatter" import="org.apache.hadoop.hbase.hql.TableFormatter"
import="org.apache.hadoop.hbase.shell.ReturnMsg" import="org.apache.hadoop.hbase.hql.ReturnMsg"
import="org.apache.hadoop.hbase.shell.formatter.HtmlTableFormatter" import="org.apache.hadoop.hbase.hql.formatter.HtmlTableFormatter"
import="org.apache.hadoop.hbase.HTableDescriptor" %><% import="org.apache.hadoop.hbase.HTableDescriptor" %><%
HMaster master = (HMaster)getServletContext().getAttribute(HMaster.MASTER); HMaster master = (HMaster)getServletContext().getAttribute(HMaster.MASTER);
HBaseConfiguration conf = new HBaseConfiguration(); HBaseConfiguration conf = new HBaseConfiguration();